+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![stable(feature = "rust1", since = "1.0.0")]
-
-//! Thread-safe reference-counting pointers.
-//!
-//! See the [`Arc<T>`][arc] documentation for more details.
-//!
-//! [arc]: struct.Arc.html
-
-use core::any::Any;
-use core::sync::atomic;
-use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
-use core::borrow;
-use core::fmt;
-use core::cmp::Ordering;
-use core::intrinsics::abort;
-use core::mem::{self, align_of_val, size_of_val};
-use core::ops::Deref;
-use core::ops::CoerceUnsized;
-use core::ptr::{self, NonNull};
-use core::marker::{Unsize, PhantomData};
-use core::hash::{Hash, Hasher};
-use core::{isize, usize};
-use core::convert::From;
-
-use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
-use boxed::Box;
-use string::String;
-use vec::Vec;
-
-/// A soft limit on the amount of references that may be made to an `Arc`.
-///
-/// Going above this limit will abort your program (although not
-/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
-const MAX_REFCOUNT: usize = (isize::MAX) as usize;
-
-/// A sentinel value that is used for the pointer of `Weak::new()`.
-const WEAK_EMPTY: usize = 1;
-
-/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
-/// Reference Counted'.
-///
-/// The type `Arc<T>` provides shared ownership of a value of type `T`,
-/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
-/// a new pointer to the same value in the heap. When the last `Arc`
-/// pointer to a given value is destroyed, the pointed-to value is
-/// also destroyed.
-///
-/// Shared references in Rust disallow mutation by default, and `Arc` is no
-/// exception: you cannot generally obtain a mutable reference to something
-/// inside an `Arc`. If you need to mutate through an `Arc`, use
-/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
-/// types.
-///
-/// ## Thread Safety
-///
-/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
-/// counting. This means that it is thread-safe. The disadvantage is that
-/// atomic operations are more expensive than ordinary memory accesses. If you
-/// are not sharing reference-counted values between threads, consider using
-/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
-/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
-/// However, a library might choose `Arc<T>` in order to give library consumers
-/// more flexibility.
-///
-/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
-/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
-/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
-/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
-/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
-/// data, but it doesn't add thread safety to its data. Consider
-/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
-/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
-/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
-/// non-atomic operations.
-///
-/// In the end, this means that you may need to pair `Arc<T>` with some sort of
-/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
-///
-/// ## Breaking cycles with `Weak`
-///
-/// The [`downgrade`][downgrade] method can be used to create a non-owning
-/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
-/// to an `Arc`, but this will return [`None`] if the value has already been
-/// dropped.
-///
-/// A cycle between `Arc` pointers will never be deallocated. For this reason,
-/// [`Weak`][weak] is used to break cycles. For example, a tree could have
-/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
-/// pointers from children back to their parents.
-///
-/// # Cloning references
-///
-/// Creating a new reference from an existing reference counted pointer is done using the
-/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
-///
-/// ```
-/// use std::sync::Arc;
-/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
-/// // The two syntaxes below are equivalent.
-/// let a = foo.clone();
-/// let b = Arc::clone(&foo);
-/// // a and b both point to the same memory location as foo.
-/// ```
-///
-/// The [`Arc::clone(&from)`] syntax is the most idiomatic because it conveys more explicitly
-/// the meaning of the code. In the example above, this syntax makes it easier to see that
-/// this code is creating a new reference rather than copying the whole content of foo.
-///
-/// ## `Deref` behavior
-///
-/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
-/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
-/// clashes with `T`'s methods, the methods of `Arc<T>` itself are [associated
-/// functions][assoc], called using function-like syntax:
-///
-/// ```
-/// use std::sync::Arc;
-/// let my_arc = Arc::new(());
-///
-/// Arc::downgrade(&my_arc);
-/// ```
-///
-/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the value may have
-/// already been destroyed.
-///
-/// [arc]: struct.Arc.html
-/// [weak]: struct.Weak.html
-/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
-/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
-/// [mutex]: ../../std/sync/struct.Mutex.html
-/// [rwlock]: ../../std/sync/struct.RwLock.html
-/// [atomic]: ../../std/sync/atomic/index.html
-/// [`Send`]: ../../std/marker/trait.Send.html
-/// [`Sync`]: ../../std/marker/trait.Sync.html
-/// [deref]: ../../std/ops/trait.Deref.html
-/// [downgrade]: struct.Arc.html#method.downgrade
-/// [upgrade]: struct.Weak.html#method.upgrade
-/// [`None`]: ../../std/option/enum.Option.html#variant.None
-/// [assoc]: ../../book/first-edition/method-syntax.html#associated-functions
-/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
-/// [`std::sync`]: ../../std/sync/index.html
-/// [`Arc::clone(&from)`]: #method.clone
-///
-/// # Examples
-///
-/// Sharing some immutable data between threads:
-///
-// Note that we **do not** run these tests here. The windows builders get super
-// unhappy if a thread outlives the main thread and then exits at the same time
-// (something deadlocks) so we just avoid this entirely by not running these
-// tests.
-/// ```no_run
-/// use std::sync::Arc;
-/// use std::thread;
-///
-/// let five = Arc::new(5);
-///
-/// for _ in 0..10 {
-/// let five = Arc::clone(&five);
-///
-/// thread::spawn(move || {
-/// println!("{:?}", five);
-/// });
-/// }
-/// ```
-///
-/// Sharing a mutable [`AtomicUsize`]:
-///
-/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
-///
-/// ```no_run
-/// use std::sync::Arc;
-/// use std::sync::atomic::{AtomicUsize, Ordering};
-/// use std::thread;
-///
-/// let val = Arc::new(AtomicUsize::new(5));
-///
-/// for _ in 0..10 {
-/// let val = Arc::clone(&val);
-///
-/// thread::spawn(move || {
-/// let v = val.fetch_add(1, Ordering::SeqCst);
-/// println!("{:?}", v);
-/// });
-/// }
-/// ```
-///
-/// See the [`rc` documentation][rc_examples] for more examples of reference
-/// counting in general.
-///
-/// [rc_examples]: ../../std/rc/index.html#examples
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Arc<T: ?Sized> {
- ptr: NonNull<ArcInner<T>>,
- phantom: PhantomData<T>,
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
-
-#[unstable(feature = "coerce_unsized", issue = "27732")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
-
-/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
-/// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
-/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
-///
-/// Since a `Weak` reference does not count towards ownership, it will not
-/// prevent the inner value from being dropped, and `Weak` itself makes no
-/// guarantees about the value still being present and may return [`None`]
-/// when [`upgrade`]d.
-///
-/// A `Weak` pointer is useful for keeping a temporary reference to the value
-/// within [`Arc`] without extending its lifetime. It is also used to prevent
-/// circular references between [`Arc`] pointers, since mutual owning references
-/// would never allow either [`Arc`] to be dropped. For example, a tree could
-/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
-/// pointers from children back to their parents.
-///
-/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
-///
-/// [`Arc`]: struct.Arc.html
-/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
-/// [`upgrade`]: struct.Weak.html#method.upgrade
-/// [`Option`]: ../../std/option/enum.Option.html
-/// [`None`]: ../../std/option/enum.Option.html#variant.None
-#[stable(feature = "arc_weak", since = "1.4.0")]
-pub struct Weak<T: ?Sized> {
- // This is a `NonNull` to allow optimizing the size of this type in enums,
- // but it is actually not truly "non-null". A `Weak::new()` will set this
- // to a sentinel value, instead of needing to allocate some space in the
- // heap.
- ptr: NonNull<ArcInner<T>>,
-}
-
-#[stable(feature = "arc_weak", since = "1.4.0")]
-unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
-#[stable(feature = "arc_weak", since = "1.4.0")]
-unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
-
-#[unstable(feature = "coerce_unsized", issue = "27732")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
-
-#[stable(feature = "arc_weak", since = "1.4.0")]
-impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "(Weak)")
- }
-}
-
-struct ArcInner<T: ?Sized> {
- strong: atomic::AtomicUsize,
-
- // the value usize::MAX acts as a sentinel for temporarily "locking" the
- // ability to upgrade weak pointers or downgrade strong ones; this is used
- // to avoid races in `make_mut` and `get_mut`.
- weak: atomic::AtomicUsize,
-
- data: T,
-}
-
-unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
-unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
-
-impl<T> Arc<T> {
- /// Constructs a new `Arc<T>`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn new(data: T) -> Arc<T> {
- // Start the weak pointer count as 1 which is the weak pointer that's
- // held by all the strong pointers (kinda), see std/rc.rs for more info
- let x: Box<_> = box ArcInner {
- strong: atomic::AtomicUsize::new(1),
- weak: atomic::AtomicUsize::new(1),
- data,
- };
- Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData }
- }
-
- /// Returns the contained value, if the `Arc` has exactly one strong reference.
- ///
- /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
- /// passed in.
- ///
- /// This will succeed even if there are outstanding weak references.
- ///
- /// [result]: ../../std/result/enum.Result.html
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let x = Arc::new(3);
- /// assert_eq!(Arc::try_unwrap(x), Ok(3));
- ///
- /// let x = Arc::new(4);
- /// let _y = Arc::clone(&x);
- /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
- /// ```
- #[inline]
- #[stable(feature = "arc_unique", since = "1.4.0")]
- pub fn try_unwrap(this: Self) -> Result<T, Self> {
- // See `drop` for why all these atomics are like this
- if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
- return Err(this);
- }
-
- atomic::fence(Acquire);
-
- unsafe {
- let elem = ptr::read(&this.ptr.as_ref().data);
-
- // Make a weak pointer to clean up the implicit strong-weak reference
- let _weak = Weak { ptr: this.ptr };
- mem::forget(this);
-
- Ok(elem)
- }
- }
-}
-
-impl<T: ?Sized> Arc<T> {
- /// Consumes the `Arc`, returning the wrapped pointer.
- ///
- /// To avoid a memory leak the pointer must be converted back to an `Arc` using
- /// [`Arc::from_raw`][from_raw].
- ///
- /// [from_raw]: struct.Arc.html#method.from_raw
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let x = Arc::new(10);
- /// let x_ptr = Arc::into_raw(x);
- /// assert_eq!(unsafe { *x_ptr }, 10);
- /// ```
- #[stable(feature = "rc_raw", since = "1.17.0")]
- pub fn into_raw(this: Self) -> *const T {
- let ptr: *const T = &*this;
- mem::forget(this);
- ptr
- }
-
- /// Constructs an `Arc` from a raw pointer.
- ///
- /// The raw pointer must have been previously returned by a call to a
- /// [`Arc::into_raw`][into_raw].
- ///
- /// This function is unsafe because improper use may lead to memory problems. For example, a
- /// double-free may occur if the function is called twice on the same raw pointer.
- ///
- /// [into_raw]: struct.Arc.html#method.into_raw
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let x = Arc::new(10);
- /// let x_ptr = Arc::into_raw(x);
- ///
- /// unsafe {
- /// // Convert back to an `Arc` to prevent leak.
- /// let x = Arc::from_raw(x_ptr);
- /// assert_eq!(*x, 10);
- ///
- /// // Further calls to `Arc::from_raw(x_ptr)` would be memory unsafe.
- /// }
- ///
- /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
- /// ```
- #[stable(feature = "rc_raw", since = "1.17.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
- // Align the unsized value to the end of the ArcInner.
- // Because it is ?Sized, it will always be the last field in memory.
- let align = align_of_val(&*ptr);
- let layout = Layout::new::<ArcInner<()>>();
- let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
-
- // Reverse the offset to find the original ArcInner.
- let fake_ptr = ptr as *mut ArcInner<T>;
- let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
-
- Arc {
- ptr: NonNull::new_unchecked(arc_ptr),
- phantom: PhantomData,
- }
- }
-
- /// Creates a new [`Weak`][weak] pointer to this value.
- ///
- /// [weak]: struct.Weak.html
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// let weak_five = Arc::downgrade(&five);
- /// ```
- #[stable(feature = "arc_weak", since = "1.4.0")]
- pub fn downgrade(this: &Self) -> Weak<T> {
- // This Relaxed is OK because we're checking the value in the CAS
- // below.
- let mut cur = this.inner().weak.load(Relaxed);
-
- loop {
- // check if the weak counter is currently "locked"; if so, spin.
- if cur == usize::MAX {
- cur = this.inner().weak.load(Relaxed);
- continue;
- }
-
- // NOTE: this code currently ignores the possibility of overflow
- // into usize::MAX; in general both Rc and Arc need to be adjusted
- // to deal with overflow.
-
- // Unlike with Clone(), we need this to be an Acquire read to
- // synchronize with the write coming from `is_unique`, so that the
- // events prior to that write happen before this read.
- match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
- Ok(_) => return Weak { ptr: this.ptr },
- Err(old) => cur = old,
- }
- }
- }
-
- /// Gets the number of [`Weak`][weak] pointers to this value.
- ///
- /// [weak]: struct.Weak.html
- ///
- /// # Safety
- ///
- /// This method by itself is safe, but using it correctly requires extra care.
- /// Another thread can change the weak count at any time,
- /// including potentially between calling this method and acting on the result.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- /// let _weak_five = Arc::downgrade(&five);
- ///
- /// // This assertion is deterministic because we haven't shared
- /// // the `Arc` or `Weak` between threads.
- /// assert_eq!(1, Arc::weak_count(&five));
- /// ```
- #[inline]
- #[stable(feature = "arc_counts", since = "1.15.0")]
- pub fn weak_count(this: &Self) -> usize {
- let cnt = this.inner().weak.load(SeqCst);
- // If the weak count is currently locked, the value of the
- // count was 0 just before taking the lock.
- if cnt == usize::MAX { 0 } else { cnt - 1 }
- }
-
- /// Gets the number of strong (`Arc`) pointers to this value.
- ///
- /// # Safety
- ///
- /// This method by itself is safe, but using it correctly requires extra care.
- /// Another thread can change the strong count at any time,
- /// including potentially between calling this method and acting on the result.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- /// let _also_five = Arc::clone(&five);
- ///
- /// // This assertion is deterministic because we haven't shared
- /// // the `Arc` between threads.
- /// assert_eq!(2, Arc::strong_count(&five));
- /// ```
- #[inline]
- #[stable(feature = "arc_counts", since = "1.15.0")]
- pub fn strong_count(this: &Self) -> usize {
- this.inner().strong.load(SeqCst)
- }
-
- #[inline]
- fn inner(&self) -> &ArcInner<T> {
- // This unsafety is ok because while this arc is alive we're guaranteed
- // that the inner pointer is valid. Furthermore, we know that the
- // `ArcInner` structure itself is `Sync` because the inner data is
- // `Sync` as well, so we're ok loaning out an immutable pointer to these
- // contents.
- unsafe { self.ptr.as_ref() }
- }
-
- // Non-inlined part of `drop`.
- #[inline(never)]
- unsafe fn drop_slow(&mut self) {
- // Destroy the data at this time, even though we may not free the box
- // allocation itself (there may still be weak pointers lying around).
- ptr::drop_in_place(&mut self.ptr.as_mut().data);
-
- if self.inner().weak.fetch_sub(1, Release) == 1 {
- atomic::fence(Acquire);
- Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
- }
- }
-
- #[inline]
- #[stable(feature = "ptr_eq", since = "1.17.0")]
- /// Returns true if the two `Arc`s point to the same value (not
- /// just values that compare as equal).
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- /// let same_five = Arc::clone(&five);
- /// let other_five = Arc::new(5);
- ///
- /// assert!(Arc::ptr_eq(&five, &same_five));
- /// assert!(!Arc::ptr_eq(&five, &other_five));
- /// ```
- pub fn ptr_eq(this: &Self, other: &Self) -> bool {
- this.ptr.as_ptr() == other.ptr.as_ptr()
- }
-}
-
-impl<T: ?Sized> Arc<T> {
- // Allocates an `ArcInner<T>` with sufficient space for an unsized value
- unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
- // Create a fake ArcInner to find allocation size and alignment
- let fake_ptr = ptr as *mut ArcInner<T>;
-
- let layout = Layout::for_value(&*fake_ptr);
-
- let mem = Global.alloc(layout)
- .unwrap_or_else(|_| handle_alloc_error(layout));
-
- // Initialize the real ArcInner
- let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
-
- ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
- ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
-
- inner
- }
-
- fn from_box(v: Box<T>) -> Arc<T> {
- unsafe {
- let box_unique = Box::into_unique(v);
- let bptr = box_unique.as_ptr();
-
- let value_size = size_of_val(&*bptr);
- let ptr = Self::allocate_for_ptr(bptr);
-
- // Copy value as bytes
- ptr::copy_nonoverlapping(
- bptr as *const T as *const u8,
- &mut (*ptr).data as *mut _ as *mut u8,
- value_size);
-
- // Free the allocation without dropping its contents
- box_free(box_unique);
-
- Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
- }
- }
-}
-
-// Sets the data pointer of a `?Sized` raw pointer.
-//
-// For a slice/trait object, this sets the `data` field and leaves the rest
-// unchanged. For a sized raw pointer, this simply sets the pointer.
-unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
- ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
- ptr
-}
-
-impl<T> Arc<[T]> {
- // Copy elements from slice into newly allocated Arc<[T]>
- //
- // Unsafe because the caller must either take ownership or bind `T: Copy`
- unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
- let v_ptr = v as *const [T];
- let ptr = Self::allocate_for_ptr(v_ptr);
-
- ptr::copy_nonoverlapping(
- v.as_ptr(),
- &mut (*ptr).data as *mut [T] as *mut T,
- v.len());
-
- Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
- }
-}
-
-// Specialization trait used for From<&[T]>
-trait ArcFromSlice<T> {
- fn from_slice(slice: &[T]) -> Self;
-}
-
-impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
- #[inline]
- default fn from_slice(v: &[T]) -> Self {
- // Panic guard while cloning T elements.
- // In the event of a panic, elements that have been written
- // into the new ArcInner will be dropped, then the memory freed.
- struct Guard<T> {
- mem: NonNull<u8>,
- elems: *mut T,
- layout: Layout,
- n_elems: usize,
- }
-
- impl<T> Drop for Guard<T> {
- fn drop(&mut self) {
- use core::slice::from_raw_parts_mut;
-
- unsafe {
- let slice = from_raw_parts_mut(self.elems, self.n_elems);
- ptr::drop_in_place(slice);
-
- Global.dealloc(self.mem.cast(), self.layout.clone());
- }
- }
- }
-
- unsafe {
- let v_ptr = v as *const [T];
- let ptr = Self::allocate_for_ptr(v_ptr);
-
- let mem = ptr as *mut _ as *mut u8;
- let layout = Layout::for_value(&*ptr);
-
- // Pointer to first element
- let elems = &mut (*ptr).data as *mut [T] as *mut T;
-
- let mut guard = Guard{
- mem: NonNull::new_unchecked(mem),
- elems: elems,
- layout: layout,
- n_elems: 0,
- };
-
- for (i, item) in v.iter().enumerate() {
- ptr::write(elems.offset(i as isize), item.clone());
- guard.n_elems += 1;
- }
-
- // All clear. Forget the guard so it doesn't free the new ArcInner.
- mem::forget(guard);
-
- Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
- }
- }
-}
-
-impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
- #[inline]
- fn from_slice(v: &[T]) -> Self {
- unsafe { Arc::copy_from_slice(v) }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Clone for Arc<T> {
- /// Makes a clone of the `Arc` pointer.
- ///
- /// This creates another pointer to the same inner value, increasing the
- /// strong reference count.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// Arc::clone(&five);
- /// ```
- #[inline]
- fn clone(&self) -> Arc<T> {
- // Using a relaxed ordering is alright here, as knowledge of the
- // original reference prevents other threads from erroneously deleting
- // the object.
- //
- // As explained in the [Boost documentation][1], Increasing the
- // reference counter can always be done with memory_order_relaxed: New
- // references to an object can only be formed from an existing
- // reference, and passing an existing reference from one thread to
- // another must already provide any required synchronization.
- //
- // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
- let old_size = self.inner().strong.fetch_add(1, Relaxed);
-
- // However we need to guard against massive refcounts in case someone
- // is `mem::forget`ing Arcs. If we don't do this the count can overflow
- // and users will use-after free. We racily saturate to `isize::MAX` on
- // the assumption that there aren't ~2 billion threads incrementing
- // the reference count at once. This branch will never be taken in
- // any realistic program.
- //
- // We abort because such a program is incredibly degenerate, and we
- // don't care to support it.
- if old_size > MAX_REFCOUNT {
- unsafe {
- abort();
- }
- }
-
- Arc { ptr: self.ptr, phantom: PhantomData }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Deref for Arc<T> {
- type Target = T;
-
- #[inline]
- fn deref(&self) -> &T {
- &self.inner().data
- }
-}
-
-impl<T: Clone> Arc<T> {
- /// Makes a mutable reference into the given `Arc`.
- ///
- /// If there are other `Arc` or [`Weak`][weak] pointers to the same value,
- /// then `make_mut` will invoke [`clone`][clone] on the inner value to
- /// ensure unique ownership. This is also referred to as clone-on-write.
- ///
- /// See also [`get_mut`][get_mut], which will fail rather than cloning.
- ///
- /// [weak]: struct.Weak.html
- /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
- /// [get_mut]: struct.Arc.html#method.get_mut
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let mut data = Arc::new(5);
- ///
- /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
- /// let mut other_data = Arc::clone(&data); // Won't clone inner data
- /// *Arc::make_mut(&mut data) += 1; // Clones inner data
- /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
- /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
- ///
- /// // Now `data` and `other_data` point to different values.
- /// assert_eq!(*data, 8);
- /// assert_eq!(*other_data, 12);
- /// ```
- #[inline]
- #[stable(feature = "arc_unique", since = "1.4.0")]
- pub fn make_mut(this: &mut Self) -> &mut T {
- // Note that we hold both a strong reference and a weak reference.
- // Thus, releasing our strong reference only will not, by itself, cause
- // the memory to be deallocated.
- //
- // Use Acquire to ensure that we see any writes to `weak` that happen
- // before release writes (i.e., decrements) to `strong`. Since we hold a
- // weak count, there's no chance the ArcInner itself could be
- // deallocated.
- if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
- // Another strong pointer exists; clone
- *this = Arc::new((**this).clone());
- } else if this.inner().weak.load(Relaxed) != 1 {
- // Relaxed suffices in the above because this is fundamentally an
- // optimization: we are always racing with weak pointers being
- // dropped. Worst case, we end up allocated a new Arc unnecessarily.
-
- // We removed the last strong ref, but there are additional weak
- // refs remaining. We'll move the contents to a new Arc, and
- // invalidate the other weak refs.
-
- // Note that it is not possible for the read of `weak` to yield
- // usize::MAX (i.e., locked), since the weak count can only be
- // locked by a thread with a strong reference.
-
- // Materialize our own implicit weak pointer, so that it can clean
- // up the ArcInner as needed.
- let weak = Weak { ptr: this.ptr };
-
- // mark the data itself as already deallocated
- unsafe {
- // there is no data race in the implicit write caused by `read`
- // here (due to zeroing) because data is no longer accessed by
- // other threads (due to there being no more strong refs at this
- // point).
- let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
- mem::swap(this, &mut swap);
- mem::forget(swap);
- }
- } else {
- // We were the sole reference of either kind; bump back up the
- // strong ref count.
- this.inner().strong.store(1, Release);
- }
-
- // As with `get_mut()`, the unsafety is ok because our reference was
- // either unique to begin with, or became one upon cloning the contents.
- unsafe {
- &mut this.ptr.as_mut().data
- }
- }
-}
-
-impl<T: ?Sized> Arc<T> {
- /// Returns a mutable reference to the inner value, if there are
- /// no other `Arc` or [`Weak`][weak] pointers to the same value.
- ///
- /// Returns [`None`][option] otherwise, because it is not safe to
- /// mutate a shared value.
- ///
- /// See also [`make_mut`][make_mut], which will [`clone`][clone]
- /// the inner value when it's shared.
- ///
- /// [weak]: struct.Weak.html
- /// [option]: ../../std/option/enum.Option.html
- /// [make_mut]: struct.Arc.html#method.make_mut
- /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let mut x = Arc::new(3);
- /// *Arc::get_mut(&mut x).unwrap() = 4;
- /// assert_eq!(*x, 4);
- ///
- /// let _y = Arc::clone(&x);
- /// assert!(Arc::get_mut(&mut x).is_none());
- /// ```
- #[inline]
- #[stable(feature = "arc_unique", since = "1.4.0")]
- pub fn get_mut(this: &mut Self) -> Option<&mut T> {
- if this.is_unique() {
- // This unsafety is ok because we're guaranteed that the pointer
- // returned is the *only* pointer that will ever be returned to T. Our
- // reference count is guaranteed to be 1 at this point, and we required
- // the Arc itself to be `mut`, so we're returning the only possible
- // reference to the inner data.
- unsafe {
- Some(&mut this.ptr.as_mut().data)
- }
- } else {
- None
- }
- }
-
- /// Determine whether this is the unique reference (including weak refs) to
- /// the underlying data.
- ///
- /// Note that this requires locking the weak ref count.
- fn is_unique(&mut self) -> bool {
- // lock the weak pointer count if we appear to be the sole weak pointer
- // holder.
- //
- // The acquire label here ensures a happens-before relationship with any
- // writes to `strong` prior to decrements of the `weak` count (via drop,
- // which uses Release).
- if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
- // Due to the previous acquire read, this will observe any writes to
- // `strong` that were due to upgrading weak pointers; only strong
- // clones remain, which require that the strong count is > 1 anyway.
- let unique = self.inner().strong.load(Relaxed) == 1;
-
- // The release write here synchronizes with a read in `downgrade`,
- // effectively preventing the above read of `strong` from happening
- // after the write.
- self.inner().weak.store(1, Release); // release the lock
- unique
- } else {
- false
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
- /// Drops the `Arc`.
- ///
- /// This will decrement the strong reference count. If the strong reference
- /// count reaches zero then the only other references (if any) are
- /// [`Weak`][weak], so we `drop` the inner value.
- ///
- /// [weak]: struct.Weak.html
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// struct Foo;
- ///
- /// impl Drop for Foo {
- /// fn drop(&mut self) {
- /// println!("dropped!");
- /// }
- /// }
- ///
- /// let foo = Arc::new(Foo);
- /// let foo2 = Arc::clone(&foo);
- ///
- /// drop(foo); // Doesn't print anything
- /// drop(foo2); // Prints "dropped!"
- /// ```
- #[inline]
- fn drop(&mut self) {
- // Because `fetch_sub` is already atomic, we do not need to synchronize
- // with other threads unless we are going to delete the object. This
- // same logic applies to the below `fetch_sub` to the `weak` count.
- if self.inner().strong.fetch_sub(1, Release) != 1 {
- return;
- }
-
- // This fence is needed to prevent reordering of use of the data and
- // deletion of the data. Because it is marked `Release`, the decreasing
- // of the reference count synchronizes with this `Acquire` fence. This
- // means that use of the data happens before decreasing the reference
- // count, which happens before this fence, which happens before the
- // deletion of the data.
- //
- // As explained in the [Boost documentation][1],
- //
- // > It is important to enforce any possible access to the object in one
- // > thread (through an existing reference) to *happen before* deleting
- // > the object in a different thread. This is achieved by a "release"
- // > operation after dropping a reference (any access to the object
- // > through this reference must obviously happened before), and an
- // > "acquire" operation before deleting the object.
- //
- // In particular, while the contents of an Arc are usually immutable, it's
- // possible to have interior writes to something like a Mutex<T>. Since a
- // Mutex is not acquired when it is deleted, we can't rely on its
- // synchronization logic to make writes in thread A visible to a destructor
- // running in thread B.
- //
- // Also note that the Acquire fence here could probably be replaced with an
- // Acquire load, which could improve performance in highly-contended
- // situations. See [2].
- //
- // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
- // [2]: (https://github.com/rust-lang/rust/pull/41714)
- atomic::fence(Acquire);
-
- unsafe {
- self.drop_slow();
- }
- }
-}
-
-impl Arc<Any + Send + Sync> {
- #[inline]
- #[unstable(feature = "rc_downcast", issue = "44608")]
- /// Attempt to downcast the `Arc<Any + Send + Sync>` to a concrete type.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(rc_downcast)]
- /// use std::any::Any;
- /// use std::sync::Arc;
- ///
- /// fn print_if_string(value: Arc<Any + Send + Sync>) {
- /// if let Ok(string) = value.downcast::<String>() {
- /// println!("String ({}): {}", string.len(), string);
- /// }
- /// }
- ///
- /// fn main() {
- /// let my_string = "Hello World".to_string();
- /// print_if_string(Arc::new(my_string));
- /// print_if_string(Arc::new(0i8));
- /// }
- /// ```
- pub fn downcast<T>(self) -> Result<Arc<T>, Self>
- where
- T: Any + Send + Sync + 'static,
- {
- if (*self).is::<T>() {
- let ptr = self.ptr.cast::<ArcInner<T>>();
- mem::forget(self);
- Ok(Arc { ptr, phantom: PhantomData })
- } else {
- Err(self)
- }
- }
-}
-
-impl<T> Weak<T> {
- /// Constructs a new `Weak<T>`, without allocating any memory.
- /// Calling [`upgrade`] on the return value always gives [`None`].
- ///
- /// [`upgrade`]: struct.Weak.html#method.upgrade
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Weak;
- ///
- /// let empty: Weak<i64> = Weak::new();
- /// assert!(empty.upgrade().is_none());
- /// ```
- #[stable(feature = "downgraded_weak", since = "1.10.0")]
- pub fn new() -> Weak<T> {
- unsafe {
- Weak {
- ptr: NonNull::new_unchecked(WEAK_EMPTY as *mut _),
- }
- }
- }
-}
-
-impl<T: ?Sized> Weak<T> {
- /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending
- /// the lifetime of the value if successful.
- ///
- /// Returns [`None`] if the value has since been dropped.
- ///
- /// [`Arc`]: struct.Arc.html
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// let weak_five = Arc::downgrade(&five);
- ///
- /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
- /// assert!(strong_five.is_some());
- ///
- /// // Destroy all strong pointers.
- /// drop(strong_five);
- /// drop(five);
- ///
- /// assert!(weak_five.upgrade().is_none());
- /// ```
- #[stable(feature = "arc_weak", since = "1.4.0")]
- pub fn upgrade(&self) -> Option<Arc<T>> {
- // We use a CAS loop to increment the strong count instead of a
- // fetch_add because once the count hits 0 it must never be above 0.
- let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
- return None;
- } else {
- unsafe { self.ptr.as_ref() }
- };
-
- // Relaxed load because any write of 0 that we can observe
- // leaves the field in a permanently zero state (so a
- // "stale" read of 0 is fine), and any other value is
- // confirmed via the CAS below.
- let mut n = inner.strong.load(Relaxed);
-
- loop {
- if n == 0 {
- return None;
- }
-
- // See comments in `Arc::clone` for why we do this (for `mem::forget`).
- if n > MAX_REFCOUNT {
- unsafe {
- abort();
- }
- }
-
- // Relaxed is valid for the same reason it is on Arc's Clone impl
- match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
- Ok(_) => return Some(Arc {
- // null checked above
- ptr: self.ptr,
- phantom: PhantomData,
- }),
- Err(old) => n = old,
- }
- }
- }
-}
-
-#[stable(feature = "arc_weak", since = "1.4.0")]
-impl<T: ?Sized> Clone for Weak<T> {
- /// Makes a clone of the `Weak` pointer that points to the same value.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::{Arc, Weak};
- ///
- /// let weak_five = Arc::downgrade(&Arc::new(5));
- ///
- /// Weak::clone(&weak_five);
- /// ```
- #[inline]
- fn clone(&self) -> Weak<T> {
- let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
- return Weak { ptr: self.ptr };
- } else {
- unsafe { self.ptr.as_ref() }
- };
- // See comments in Arc::clone() for why this is relaxed. This can use a
- // fetch_add (ignoring the lock) because the weak count is only locked
- // where are *no other* weak pointers in existence. (So we can't be
- // running this code in that case).
- let old_size = inner.weak.fetch_add(1, Relaxed);
-
- // See comments in Arc::clone() for why we do this (for mem::forget).
- if old_size > MAX_REFCOUNT {
- unsafe {
- abort();
- }
- }
-
- return Weak { ptr: self.ptr };
- }
-}
-
-#[stable(feature = "downgraded_weak", since = "1.10.0")]
-impl<T> Default for Weak<T> {
- /// Constructs a new `Weak<T>`, without allocating memory.
- /// Calling [`upgrade`] on the return value always gives [`None`].
- ///
- /// [`upgrade`]: struct.Weak.html#method.upgrade
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Weak;
- ///
- /// let empty: Weak<i64> = Default::default();
- /// assert!(empty.upgrade().is_none());
- /// ```
- fn default() -> Weak<T> {
- Weak::new()
- }
-}
-
-#[stable(feature = "arc_weak", since = "1.4.0")]
-impl<T: ?Sized> Drop for Weak<T> {
- /// Drops the `Weak` pointer.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::{Arc, Weak};
- ///
- /// struct Foo;
- ///
- /// impl Drop for Foo {
- /// fn drop(&mut self) {
- /// println!("dropped!");
- /// }
- /// }
- ///
- /// let foo = Arc::new(Foo);
- /// let weak_foo = Arc::downgrade(&foo);
- /// let other_weak_foo = Weak::clone(&weak_foo);
- ///
- /// drop(weak_foo); // Doesn't print anything
- /// drop(foo); // Prints "dropped!"
- ///
- /// assert!(other_weak_foo.upgrade().is_none());
- /// ```
- fn drop(&mut self) {
- // If we find out that we were the last weak pointer, then its time to
- // deallocate the data entirely. See the discussion in Arc::drop() about
- // the memory orderings
- //
- // It's not necessary to check for the locked state here, because the
- // weak count can only be locked if there was precisely one weak ref,
- // meaning that drop could only subsequently run ON that remaining weak
- // ref, which can only happen after the lock is released.
- let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
- return;
- } else {
- unsafe { self.ptr.as_ref() }
- };
-
- if inner.weak.fetch_sub(1, Release) == 1 {
- atomic::fence(Acquire);
- unsafe {
- Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
- }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
- /// Equality for two `Arc`s.
- ///
- /// Two `Arc`s are equal if their inner values are equal.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert!(five == Arc::new(5));
- /// ```
- fn eq(&self, other: &Arc<T>) -> bool {
- *(*self) == *(*other)
- }
-
- /// Inequality for two `Arc`s.
- ///
- /// Two `Arc`s are unequal if their inner values are unequal.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert!(five != Arc::new(6));
- /// ```
- fn ne(&self, other: &Arc<T>) -> bool {
- *(*self) != *(*other)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
- /// Partial comparison for two `Arc`s.
- ///
- /// The two are compared by calling `partial_cmp()` on their inner values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- /// use std::cmp::Ordering;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
- /// ```
- fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
- (**self).partial_cmp(&**other)
- }
-
- /// Less-than comparison for two `Arc`s.
- ///
- /// The two are compared by calling `<` on their inner values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert!(five < Arc::new(6));
- /// ```
- fn lt(&self, other: &Arc<T>) -> bool {
- *(*self) < *(*other)
- }
-
- /// 'Less than or equal to' comparison for two `Arc`s.
- ///
- /// The two are compared by calling `<=` on their inner values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert!(five <= Arc::new(5));
- /// ```
- fn le(&self, other: &Arc<T>) -> bool {
- *(*self) <= *(*other)
- }
-
- /// Greater-than comparison for two `Arc`s.
- ///
- /// The two are compared by calling `>` on their inner values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert!(five > Arc::new(4));
- /// ```
- fn gt(&self, other: &Arc<T>) -> bool {
- *(*self) > *(*other)
- }
-
- /// 'Greater than or equal to' comparison for two `Arc`s.
- ///
- /// The two are compared by calling `>=` on their inner values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert!(five >= Arc::new(5));
- /// ```
- fn ge(&self, other: &Arc<T>) -> bool {
- *(*self) >= *(*other)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Ord> Ord for Arc<T> {
- /// Comparison for two `Arc`s.
- ///
- /// The two are compared by calling `cmp()` on their inner values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- /// use std::cmp::Ordering;
- ///
- /// let five = Arc::new(5);
- ///
- /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
- /// ```
- fn cmp(&self, other: &Arc<T>) -> Ordering {
- (**self).cmp(&**other)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Eq> Eq for Arc<T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Display::fmt(&**self, f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Debug::fmt(&**self, f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> fmt::Pointer for Arc<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Pointer::fmt(&(&**self as *const T), f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Default> Default for Arc<T> {
- /// Creates a new `Arc<T>`, with the `Default` value for `T`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::sync::Arc;
- ///
- /// let x: Arc<i32> = Default::default();
- /// assert_eq!(*x, 0);
- /// ```
- fn default() -> Arc<T> {
- Arc::new(Default::default())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Hash> Hash for Arc<T> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- (**self).hash(state)
- }
-}
-
-#[stable(feature = "from_for_ptrs", since = "1.6.0")]
-impl<T> From<T> for Arc<T> {
- fn from(t: T) -> Self {
- Arc::new(t)
- }
-}
-
-#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<'a, T: Clone> From<&'a [T]> for Arc<[T]> {
- #[inline]
- fn from(v: &[T]) -> Arc<[T]> {
- <Self as ArcFromSlice<T>>::from_slice(v)
- }
-}
-
-#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<'a> From<&'a str> for Arc<str> {
- #[inline]
- fn from(v: &str) -> Arc<str> {
- let arc = Arc::<[u8]>::from(v.as_bytes());
- unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
- }
-}
-
-#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl From<String> for Arc<str> {
- #[inline]
- fn from(v: String) -> Arc<str> {
- Arc::from(&v[..])
- }
-}
-
-#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T: ?Sized> From<Box<T>> for Arc<T> {
- #[inline]
- fn from(v: Box<T>) -> Arc<T> {
- Arc::from_box(v)
- }
-}
-
-#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T> From<Vec<T>> for Arc<[T]> {
- #[inline]
- fn from(mut v: Vec<T>) -> Arc<[T]> {
- unsafe {
- let arc = Arc::copy_from_slice(&v);
-
- // Allow the Vec to free its memory, but not destroy its contents
- v.set_len(0);
-
- arc
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use std::boxed::Box;
- use std::clone::Clone;
- use std::sync::mpsc::channel;
- use std::mem::drop;
- use std::ops::Drop;
- use std::option::Option;
- use std::option::Option::{None, Some};
- use std::sync::atomic;
- use std::sync::atomic::Ordering::{Acquire, SeqCst};
- use std::thread;
- use std::sync::Mutex;
- use std::convert::From;
-
- use super::{Arc, Weak};
- use vec::Vec;
-
- struct Canary(*mut atomic::AtomicUsize);
-
- impl Drop for Canary {
- fn drop(&mut self) {
- unsafe {
- match *self {
- Canary(c) => {
- (*c).fetch_add(1, SeqCst);
- }
- }
- }
- }
- }
-
- #[test]
- #[cfg_attr(target_os = "emscripten", ignore)]
- fn manually_share_arc() {
- let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
- let arc_v = Arc::new(v);
-
- let (tx, rx) = channel();
-
- let _t = thread::spawn(move || {
- let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
- assert_eq!((*arc_v)[3], 4);
- });
-
- tx.send(arc_v.clone()).unwrap();
-
- assert_eq!((*arc_v)[2], 3);
- assert_eq!((*arc_v)[4], 5);
- }
-
- #[test]
- fn test_arc_get_mut() {
- let mut x = Arc::new(3);
- *Arc::get_mut(&mut x).unwrap() = 4;
- assert_eq!(*x, 4);
- let y = x.clone();
- assert!(Arc::get_mut(&mut x).is_none());
- drop(y);
- assert!(Arc::get_mut(&mut x).is_some());
- let _w = Arc::downgrade(&x);
- assert!(Arc::get_mut(&mut x).is_none());
- }
-
- #[test]
- fn try_unwrap() {
- let x = Arc::new(3);
- assert_eq!(Arc::try_unwrap(x), Ok(3));
- let x = Arc::new(4);
- let _y = x.clone();
- assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
- let x = Arc::new(5);
- let _w = Arc::downgrade(&x);
- assert_eq!(Arc::try_unwrap(x), Ok(5));
- }
-
- #[test]
- fn into_from_raw() {
- let x = Arc::new(box "hello");
- let y = x.clone();
-
- let x_ptr = Arc::into_raw(x);
- drop(y);
- unsafe {
- assert_eq!(**x_ptr, "hello");
-
- let x = Arc::from_raw(x_ptr);
- assert_eq!(**x, "hello");
-
- assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
- }
- }
-
- #[test]
- fn test_into_from_raw_unsized() {
- use std::fmt::Display;
- use std::string::ToString;
-
- let arc: Arc<str> = Arc::from("foo");
-
- let ptr = Arc::into_raw(arc.clone());
- let arc2 = unsafe { Arc::from_raw(ptr) };
-
- assert_eq!(unsafe { &*ptr }, "foo");
- assert_eq!(arc, arc2);
-
- let arc: Arc<Display> = Arc::new(123);
-
- let ptr = Arc::into_raw(arc.clone());
- let arc2 = unsafe { Arc::from_raw(ptr) };
-
- assert_eq!(unsafe { &*ptr }.to_string(), "123");
- assert_eq!(arc2.to_string(), "123");
- }
-
- #[test]
- fn test_cowarc_clone_make_mut() {
- let mut cow0 = Arc::new(75);
- let mut cow1 = cow0.clone();
- let mut cow2 = cow1.clone();
-
- assert!(75 == *Arc::make_mut(&mut cow0));
- assert!(75 == *Arc::make_mut(&mut cow1));
- assert!(75 == *Arc::make_mut(&mut cow2));
-
- *Arc::make_mut(&mut cow0) += 1;
- *Arc::make_mut(&mut cow1) += 2;
- *Arc::make_mut(&mut cow2) += 3;
-
- assert!(76 == *cow0);
- assert!(77 == *cow1);
- assert!(78 == *cow2);
-
- // none should point to the same backing memory
- assert!(*cow0 != *cow1);
- assert!(*cow0 != *cow2);
- assert!(*cow1 != *cow2);
- }
-
- #[test]
- fn test_cowarc_clone_unique2() {
- let mut cow0 = Arc::new(75);
- let cow1 = cow0.clone();
- let cow2 = cow1.clone();
-
- assert!(75 == *cow0);
- assert!(75 == *cow1);
- assert!(75 == *cow2);
-
- *Arc::make_mut(&mut cow0) += 1;
- assert!(76 == *cow0);
- assert!(75 == *cow1);
- assert!(75 == *cow2);
-
- // cow1 and cow2 should share the same contents
- // cow0 should have a unique reference
- assert!(*cow0 != *cow1);
- assert!(*cow0 != *cow2);
- assert!(*cow1 == *cow2);
- }
-
- #[test]
- fn test_cowarc_clone_weak() {
- let mut cow0 = Arc::new(75);
- let cow1_weak = Arc::downgrade(&cow0);
-
- assert!(75 == *cow0);
- assert!(75 == *cow1_weak.upgrade().unwrap());
-
- *Arc::make_mut(&mut cow0) += 1;
-
- assert!(76 == *cow0);
- assert!(cow1_weak.upgrade().is_none());
- }
-
- #[test]
- fn test_live() {
- let x = Arc::new(5);
- let y = Arc::downgrade(&x);
- assert!(y.upgrade().is_some());
- }
-
- #[test]
- fn test_dead() {
- let x = Arc::new(5);
- let y = Arc::downgrade(&x);
- drop(x);
- assert!(y.upgrade().is_none());
- }
-
- #[test]
- fn weak_self_cyclic() {
- struct Cycle {
- x: Mutex<Option<Weak<Cycle>>>,
- }
-
- let a = Arc::new(Cycle { x: Mutex::new(None) });
- let b = Arc::downgrade(&a.clone());
- *a.x.lock().unwrap() = Some(b);
-
- // hopefully we don't double-free (or leak)...
- }
-
- #[test]
- fn drop_arc() {
- let mut canary = atomic::AtomicUsize::new(0);
- let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
- drop(x);
- assert!(canary.load(Acquire) == 1);
- }
-
- #[test]
- fn drop_arc_weak() {
- let mut canary = atomic::AtomicUsize::new(0);
- let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
- let arc_weak = Arc::downgrade(&arc);
- assert!(canary.load(Acquire) == 0);
- drop(arc);
- assert!(canary.load(Acquire) == 1);
- drop(arc_weak);
- }
-
- #[test]
- fn test_strong_count() {
- let a = Arc::new(0);
- assert!(Arc::strong_count(&a) == 1);
- let w = Arc::downgrade(&a);
- assert!(Arc::strong_count(&a) == 1);
- let b = w.upgrade().expect("");
- assert!(Arc::strong_count(&b) == 2);
- assert!(Arc::strong_count(&a) == 2);
- drop(w);
- drop(a);
- assert!(Arc::strong_count(&b) == 1);
- let c = b.clone();
- assert!(Arc::strong_count(&b) == 2);
- assert!(Arc::strong_count(&c) == 2);
- }
-
- #[test]
- fn test_weak_count() {
- let a = Arc::new(0);
- assert!(Arc::strong_count(&a) == 1);
- assert!(Arc::weak_count(&a) == 0);
- let w = Arc::downgrade(&a);
- assert!(Arc::strong_count(&a) == 1);
- assert!(Arc::weak_count(&a) == 1);
- let x = w.clone();
- assert!(Arc::weak_count(&a) == 2);
- drop(w);
- drop(x);
- assert!(Arc::strong_count(&a) == 1);
- assert!(Arc::weak_count(&a) == 0);
- let c = a.clone();
- assert!(Arc::strong_count(&a) == 2);
- assert!(Arc::weak_count(&a) == 0);
- let d = Arc::downgrade(&c);
- assert!(Arc::weak_count(&c) == 1);
- assert!(Arc::strong_count(&c) == 2);
-
- drop(a);
- drop(c);
- drop(d);
- }
-
- #[test]
- fn show_arc() {
- let a = Arc::new(5);
- assert_eq!(format!("{:?}", a), "5");
- }
-
- // Make sure deriving works with Arc<T>
- #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
- struct Foo {
- inner: Arc<i32>,
- }
-
- #[test]
- fn test_unsized() {
- let x: Arc<[i32]> = Arc::new([1, 2, 3]);
- assert_eq!(format!("{:?}", x), "[1, 2, 3]");
- let y = Arc::downgrade(&x.clone());
- drop(x);
- assert!(y.upgrade().is_none());
- }
-
- #[test]
- fn test_from_owned() {
- let foo = 123;
- let foo_arc = Arc::from(foo);
- assert!(123 == *foo_arc);
- }
-
- #[test]
- fn test_new_weak() {
- let foo: Weak<usize> = Weak::new();
- assert!(foo.upgrade().is_none());
- }
-
- #[test]
- fn test_ptr_eq() {
- let five = Arc::new(5);
- let same_five = five.clone();
- let other_five = Arc::new(5);
-
- assert!(Arc::ptr_eq(&five, &same_five));
- assert!(!Arc::ptr_eq(&five, &other_five));
- }
-
- #[test]
- #[cfg_attr(target_os = "emscripten", ignore)]
- fn test_weak_count_locked() {
- let mut a = Arc::new(atomic::AtomicBool::new(false));
- let a2 = a.clone();
- let t = thread::spawn(move || {
- for _i in 0..1000000 {
- Arc::get_mut(&mut a);
- }
- a.store(true, SeqCst);
- });
-
- while !a2.load(SeqCst) {
- let n = Arc::weak_count(&a2);
- assert!(n < 2, "bad weak count: {}", n);
- }
- t.join().unwrap();
- }
-
- #[test]
- fn test_from_str() {
- let r: Arc<str> = Arc::from("foo");
-
- assert_eq!(&r[..], "foo");
- }
-
- #[test]
- fn test_copy_from_slice() {
- let s: &[u32] = &[1, 2, 3];
- let r: Arc<[u32]> = Arc::from(s);
-
- assert_eq!(&r[..], [1, 2, 3]);
- }
-
- #[test]
- fn test_clone_from_slice() {
- #[derive(Clone, Debug, Eq, PartialEq)]
- struct X(u32);
-
- let s: &[X] = &[X(1), X(2), X(3)];
- let r: Arc<[X]> = Arc::from(s);
-
- assert_eq!(&r[..], s);
- }
-
- #[test]
- #[should_panic]
- fn test_clone_from_slice_panic() {
- use std::string::{String, ToString};
-
- struct Fail(u32, String);
-
- impl Clone for Fail {
- fn clone(&self) -> Fail {
- if self.0 == 2 {
- panic!();
- }
- Fail(self.0, self.1.clone())
- }
- }
-
- let s: &[Fail] = &[
- Fail(0, "foo".to_string()),
- Fail(1, "bar".to_string()),
- Fail(2, "baz".to_string()),
- ];
-
- // Should panic, but not cause memory corruption
- let _r: Arc<[Fail]> = Arc::from(s);
- }
-
- #[test]
- fn test_from_box() {
- let b: Box<u32> = box 123;
- let r: Arc<u32> = Arc::from(b);
-
- assert_eq!(*r, 123);
- }
-
- #[test]
- fn test_from_box_str() {
- use std::string::String;
-
- let s = String::from("foo").into_boxed_str();
- let r: Arc<str> = Arc::from(s);
-
- assert_eq!(&r[..], "foo");
- }
-
- #[test]
- fn test_from_box_slice() {
- let s = vec![1, 2, 3].into_boxed_slice();
- let r: Arc<[u32]> = Arc::from(s);
-
- assert_eq!(&r[..], [1, 2, 3]);
- }
-
- #[test]
- fn test_from_box_trait() {
- use std::fmt::Display;
- use std::string::ToString;
-
- let b: Box<Display> = box 123;
- let r: Arc<Display> = Arc::from(b);
-
- assert_eq!(r.to_string(), "123");
- }
-
- #[test]
- fn test_from_box_trait_zero_sized() {
- use std::fmt::Debug;
-
- let b: Box<Debug> = box ();
- let r: Arc<Debug> = Arc::from(b);
-
- assert_eq!(format!("{:?}", r), "()");
- }
-
- #[test]
- fn test_from_vec() {
- let v = vec![1, 2, 3];
- let r: Arc<[u32]> = Arc::from(v);
-
- assert_eq!(&r[..], [1, 2, 3]);
- }
-
- #[test]
- fn test_downcast() {
- use std::any::Any;
-
- let r1: Arc<Any + Send + Sync> = Arc::new(i32::max_value());
- let r2: Arc<Any + Send + Sync> = Arc::new("abc");
-
- assert!(r1.clone().downcast::<u32>().is_err());
-
- let r1i32 = r1.downcast::<i32>();
- assert!(r1i32.is_ok());
- assert_eq!(r1i32.unwrap(), Arc::new(i32::max_value()));
-
- assert!(r2.clone().downcast::<i32>().is_err());
-
- let r2str = r2.downcast::<&'static str>();
- assert!(r2str.is_ok());
- assert_eq!(r2str.unwrap(), Arc::new("abc"));
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
- fn borrow(&self) -> &T {
- &**self
- }
-}
-
-#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
-impl<T: ?Sized> AsRef<T> for Arc<T> {
- fn as_ref(&self) -> &T {
- &**self
- }
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A priority queue implemented with a binary heap.
-//!
-//! Insertion and popping the largest element have `O(log n)` time complexity.
-//! Checking the largest element is `O(1)`. Converting a vector to a binary heap
-//! can be done in-place, and has `O(n)` complexity. A binary heap can also be
-//! converted to a sorted vector in-place, allowing it to be used for an `O(n
-//! log n)` in-place heapsort.
-//!
-//! # Examples
-//!
-//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
-//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
-//! It shows how to use [`BinaryHeap`] with custom types.
-//!
-//! [dijkstra]: http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
-//! [sssp]: http://en.wikipedia.org/wiki/Shortest_path_problem
-//! [dir_graph]: http://en.wikipedia.org/wiki/Directed_graph
-//! [`BinaryHeap`]: struct.BinaryHeap.html
-//!
-//! ```
-//! use std::cmp::Ordering;
-//! use std::collections::BinaryHeap;
-//! use std::usize;
-//!
-//! #[derive(Copy, Clone, Eq, PartialEq)]
-//! struct State {
-//! cost: usize,
-//! position: usize,
-//! }
-//!
-//! // The priority queue depends on `Ord`.
-//! // Explicitly implement the trait so the queue becomes a min-heap
-//! // instead of a max-heap.
-//! impl Ord for State {
-//! fn cmp(&self, other: &State) -> Ordering {
-//! // Notice that the we flip the ordering on costs.
-//! // In case of a tie we compare positions - this step is necessary
-//! // to make implementations of `PartialEq` and `Ord` consistent.
-//! other.cost.cmp(&self.cost)
-//! .then_with(|| self.position.cmp(&other.position))
-//! }
-//! }
-//!
-//! // `PartialOrd` needs to be implemented as well.
-//! impl PartialOrd for State {
-//! fn partial_cmp(&self, other: &State) -> Option<Ordering> {
-//! Some(self.cmp(other))
-//! }
-//! }
-//!
-//! // Each node is represented as an `usize`, for a shorter implementation.
-//! struct Edge {
-//! node: usize,
-//! cost: usize,
-//! }
-//!
-//! // Dijkstra's shortest path algorithm.
-//!
-//! // Start at `start` and use `dist` to track the current shortest distance
-//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
-//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
-//! // for a simpler implementation.
-//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
-//! // dist[node] = current shortest distance from `start` to `node`
-//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
-//!
-//! let mut heap = BinaryHeap::new();
-//!
-//! // We're at `start`, with a zero cost
-//! dist[start] = 0;
-//! heap.push(State { cost: 0, position: start });
-//!
-//! // Examine the frontier with lower cost nodes first (min-heap)
-//! while let Some(State { cost, position }) = heap.pop() {
-//! // Alternatively we could have continued to find all shortest paths
-//! if position == goal { return Some(cost); }
-//!
-//! // Important as we may have already found a better way
-//! if cost > dist[position] { continue; }
-//!
-//! // For each node we can reach, see if we can find a way with
-//! // a lower cost going through this node
-//! for edge in &adj_list[position] {
-//! let next = State { cost: cost + edge.cost, position: edge.node };
-//!
-//! // If so, add it to the frontier and continue
-//! if next.cost < dist[next.position] {
-//! heap.push(next);
-//! // Relaxation, we have now found a better way
-//! dist[next.position] = next.cost;
-//! }
-//! }
-//! }
-//!
-//! // Goal not reachable
-//! None
-//! }
-//!
-//! fn main() {
-//! // This is the directed graph we're going to use.
-//! // The node numbers correspond to the different states,
-//! // and the edge weights symbolize the cost of moving
-//! // from one node to another.
-//! // Note that the edges are one-way.
-//! //
-//! // 7
-//! // +-----------------+
-//! // | |
-//! // v 1 2 | 2
-//! // 0 -----> 1 -----> 3 ---> 4
-//! // | ^ ^ ^
-//! // | | 1 | |
-//! // | | | 3 | 1
-//! // +------> 2 -------+ |
-//! // 10 | |
-//! // +---------------+
-//! //
-//! // The graph is represented as an adjacency list where each index,
-//! // corresponding to a node value, has a list of outgoing edges.
-//! // Chosen for its efficiency.
-//! let graph = vec![
-//! // Node 0
-//! vec![Edge { node: 2, cost: 10 },
-//! Edge { node: 1, cost: 1 }],
-//! // Node 1
-//! vec![Edge { node: 3, cost: 2 }],
-//! // Node 2
-//! vec![Edge { node: 1, cost: 1 },
-//! Edge { node: 3, cost: 3 },
-//! Edge { node: 4, cost: 1 }],
-//! // Node 3
-//! vec![Edge { node: 0, cost: 7 },
-//! Edge { node: 4, cost: 2 }],
-//! // Node 4
-//! vec![]];
-//!
-//! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
-//! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
-//! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
-//! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
-//! assert_eq!(shortest_path(&graph, 4, 0), None);
-//! }
-//! ```
-
-#![allow(missing_docs)]
-#![stable(feature = "rust1", since = "1.0.0")]
-
-use core::ops::{Deref, DerefMut};
-use core::iter::{FromIterator, FusedIterator};
-use core::mem::{swap, size_of, ManuallyDrop};
-use core::ptr;
-use core::fmt;
-
-use slice;
-use vec::{self, Vec};
-
-use super::SpecExtend;
-
-/// A priority queue implemented with a binary heap.
-///
-/// This will be a max-heap.
-///
-/// It is a logic error for an item to be modified in such a way that the
-/// item's ordering relative to any other item, as determined by the `Ord`
-/// trait, changes while it is in the heap. This is normally only possible
-/// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
-///
-/// # Examples
-///
-/// ```
-/// use std::collections::BinaryHeap;
-///
-/// // Type inference lets us omit an explicit type signature (which
-/// // would be `BinaryHeap<i32>` in this example).
-/// let mut heap = BinaryHeap::new();
-///
-/// // We can use peek to look at the next item in the heap. In this case,
-/// // there's no items in there yet so we get None.
-/// assert_eq!(heap.peek(), None);
-///
-/// // Let's add some scores...
-/// heap.push(1);
-/// heap.push(5);
-/// heap.push(2);
-///
-/// // Now peek shows the most important item in the heap.
-/// assert_eq!(heap.peek(), Some(&5));
-///
-/// // We can check the length of a heap.
-/// assert_eq!(heap.len(), 3);
-///
-/// // We can iterate over the items in the heap, although they are returned in
-/// // a random order.
-/// for x in &heap {
-/// println!("{}", x);
-/// }
-///
-/// // If we instead pop these scores, they should come back in order.
-/// assert_eq!(heap.pop(), Some(5));
-/// assert_eq!(heap.pop(), Some(2));
-/// assert_eq!(heap.pop(), Some(1));
-/// assert_eq!(heap.pop(), None);
-///
-/// // We can clear the heap of any remaining items.
-/// heap.clear();
-///
-/// // The heap should now be empty.
-/// assert!(heap.is_empty())
-/// ```
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct BinaryHeap<T> {
- data: Vec<T>,
-}
-
-/// Structure wrapping a mutable reference to the greatest item on a
-/// `BinaryHeap`.
-///
-/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
-/// its documentation for more.
-///
-/// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut
-/// [`BinaryHeap`]: struct.BinaryHeap.html
-#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-pub struct PeekMut<'a, T: 'a + Ord> {
- heap: &'a mut BinaryHeap<T>,
- sift: bool,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: Ord + fmt::Debug> fmt::Debug for PeekMut<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("PeekMut")
- .field(&self.heap.data[0])
- .finish()
- }
-}
-
-#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-impl<'a, T: Ord> Drop for PeekMut<'a, T> {
- fn drop(&mut self) {
- if self.sift {
- self.heap.sift_down(0);
- }
- }
-}
-
-#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-impl<'a, T: Ord> Deref for PeekMut<'a, T> {
- type Target = T;
- fn deref(&self) -> &T {
- &self.heap.data[0]
- }
-}
-
-#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-impl<'a, T: Ord> DerefMut for PeekMut<'a, T> {
- fn deref_mut(&mut self) -> &mut T {
- &mut self.heap.data[0]
- }
-}
-
-impl<'a, T: Ord> PeekMut<'a, T> {
- /// Removes the peeked value from the heap and returns it.
- #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
- pub fn pop(mut this: PeekMut<'a, T>) -> T {
- let value = this.heap.pop().unwrap();
- this.sift = false;
- value
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone> Clone for BinaryHeap<T> {
- fn clone(&self) -> Self {
- BinaryHeap { data: self.data.clone() }
- }
-
- fn clone_from(&mut self, source: &Self) {
- self.data.clone_from(&source.data);
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> Default for BinaryHeap<T> {
- /// Creates an empty `BinaryHeap<T>`.
- #[inline]
- fn default() -> BinaryHeap<T> {
- BinaryHeap::new()
- }
-}
-
-#[stable(feature = "binaryheap_debug", since = "1.4.0")]
-impl<T: fmt::Debug + Ord> fmt::Debug for BinaryHeap<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self.iter()).finish()
- }
-}
-
-impl<T: Ord> BinaryHeap<T> {
- /// Creates an empty `BinaryHeap` as a max-heap.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- /// heap.push(4);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn new() -> BinaryHeap<T> {
- BinaryHeap { data: vec![] }
- }
-
- /// Creates an empty `BinaryHeap` with a specific capacity.
- /// This preallocates enough memory for `capacity` elements,
- /// so that the `BinaryHeap` does not have to be reallocated
- /// until it contains at least that many values.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::with_capacity(10);
- /// heap.push(4);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
- BinaryHeap { data: Vec::with_capacity(capacity) }
- }
-
- /// Returns an iterator visiting all values in the underlying vector, in
- /// arbitrary order.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
- ///
- /// // Print 1, 2, 3, 4 in arbitrary order
- /// for x in heap.iter() {
- /// println!("{}", x);
- /// }
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter(&self) -> Iter<T> {
- Iter { iter: self.data.iter() }
- }
-
- /// Returns the greatest item in the binary heap, or `None` if it is empty.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- /// assert_eq!(heap.peek(), None);
- ///
- /// heap.push(1);
- /// heap.push(5);
- /// heap.push(2);
- /// assert_eq!(heap.peek(), Some(&5));
- ///
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn peek(&self) -> Option<&T> {
- self.data.get(0)
- }
-
- /// Returns a mutable reference to the greatest item in the binary heap, or
- /// `None` if it is empty.
- ///
- /// Note: If the `PeekMut` value is leaked, the heap may be in an
- /// inconsistent state.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- /// assert!(heap.peek_mut().is_none());
- ///
- /// heap.push(1);
- /// heap.push(5);
- /// heap.push(2);
- /// {
- /// let mut val = heap.peek_mut().unwrap();
- /// *val = 0;
- /// }
- /// assert_eq!(heap.peek(), Some(&2));
- /// ```
- #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
- pub fn peek_mut(&mut self) -> Option<PeekMut<T>> {
- if self.is_empty() {
- None
- } else {
- Some(PeekMut {
- heap: self,
- sift: true,
- })
- }
- }
-
- /// Returns the number of elements the binary heap can hold without reallocating.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::with_capacity(100);
- /// assert!(heap.capacity() >= 100);
- /// heap.push(4);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn capacity(&self) -> usize {
- self.data.capacity()
- }
-
- /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
- /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
- ///
- /// Note that the allocator may give the collection more space than it requests. Therefore
- /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
- /// insertions are expected.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity overflows `usize`.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- /// heap.reserve_exact(100);
- /// assert!(heap.capacity() >= 100);
- /// heap.push(4);
- /// ```
- ///
- /// [`reserve`]: #method.reserve
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn reserve_exact(&mut self, additional: usize) {
- self.data.reserve_exact(additional);
- }
-
- /// Reserves capacity for at least `additional` more elements to be inserted in the
- /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity overflows `usize`.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- /// heap.reserve(100);
- /// assert!(heap.capacity() >= 100);
- /// heap.push(4);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn reserve(&mut self, additional: usize) {
- self.data.reserve(additional);
- }
-
- /// Discards as much additional capacity as possible.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
- ///
- /// assert!(heap.capacity() >= 100);
- /// heap.shrink_to_fit();
- /// assert!(heap.capacity() == 0);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn shrink_to_fit(&mut self) {
- self.data.shrink_to_fit();
- }
-
- /// Discards capacity with a lower bound.
- ///
- /// The capacity will remain at least as large as both the length
- /// and the supplied value.
- ///
- /// Panics if the current capacity is smaller than the supplied
- /// minimum capacity.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(shrink_to)]
- /// use std::collections::BinaryHeap;
- /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
- ///
- /// assert!(heap.capacity() >= 100);
- /// heap.shrink_to(10);
- /// assert!(heap.capacity() >= 10);
- /// ```
- #[inline]
- #[unstable(feature = "shrink_to", reason = "new API", issue="0")]
- pub fn shrink_to(&mut self, min_capacity: usize) {
- self.data.shrink_to(min_capacity)
- }
-
- /// Removes the greatest item from the binary heap and returns it, or `None` if it
- /// is empty.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::from(vec![1, 3]);
- ///
- /// assert_eq!(heap.pop(), Some(3));
- /// assert_eq!(heap.pop(), Some(1));
- /// assert_eq!(heap.pop(), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn pop(&mut self) -> Option<T> {
- self.data.pop().map(|mut item| {
- if !self.is_empty() {
- swap(&mut item, &mut self.data[0]);
- self.sift_down_to_bottom(0);
- }
- item
- })
- }
-
- /// Pushes an item onto the binary heap.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- /// heap.push(3);
- /// heap.push(5);
- /// heap.push(1);
- ///
- /// assert_eq!(heap.len(), 3);
- /// assert_eq!(heap.peek(), Some(&5));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn push(&mut self, item: T) {
- let old_len = self.len();
- self.data.push(item);
- self.sift_up(0, old_len);
- }
-
- /// Consumes the `BinaryHeap` and returns the underlying vector
- /// in arbitrary order.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
- /// let vec = heap.into_vec();
- ///
- /// // Will print in some order
- /// for x in vec {
- /// println!("{}", x);
- /// }
- /// ```
- #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
- pub fn into_vec(self) -> Vec<T> {
- self.into()
- }
-
- /// Consumes the `BinaryHeap` and returns a vector in sorted
- /// (ascending) order.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- ///
- /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
- /// heap.push(6);
- /// heap.push(3);
- ///
- /// let vec = heap.into_sorted_vec();
- /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
- /// ```
- #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
- pub fn into_sorted_vec(mut self) -> Vec<T> {
- let mut end = self.len();
- while end > 1 {
- end -= 1;
- self.data.swap(0, end);
- self.sift_down_range(0, end);
- }
- self.into_vec()
- }
-
- // The implementations of sift_up and sift_down use unsafe blocks in
- // order to move an element out of the vector (leaving behind a
- // hole), shift along the others and move the removed element back into the
- // vector at the final location of the hole.
- // The `Hole` type is used to represent this, and make sure
- // the hole is filled back at the end of its scope, even on panic.
- // Using a hole reduces the constant factor compared to using swaps,
- // which involves twice as many moves.
- fn sift_up(&mut self, start: usize, pos: usize) -> usize {
- unsafe {
- // Take out the value at `pos` and create a hole.
- let mut hole = Hole::new(&mut self.data, pos);
-
- while hole.pos() > start {
- let parent = (hole.pos() - 1) / 2;
- if hole.element() <= hole.get(parent) {
- break;
- }
- hole.move_to(parent);
- }
- hole.pos()
- }
- }
-
- /// Take an element at `pos` and move it down the heap,
- /// while its children are larger.
- fn sift_down_range(&mut self, pos: usize, end: usize) {
- unsafe {
- let mut hole = Hole::new(&mut self.data, pos);
- let mut child = 2 * pos + 1;
- while child < end {
- let right = child + 1;
- // compare with the greater of the two children
- if right < end && !(hole.get(child) > hole.get(right)) {
- child = right;
- }
- // if we are already in order, stop.
- if hole.element() >= hole.get(child) {
- break;
- }
- hole.move_to(child);
- child = 2 * hole.pos() + 1;
- }
- }
- }
-
- fn sift_down(&mut self, pos: usize) {
- let len = self.len();
- self.sift_down_range(pos, len);
- }
-
- /// Take an element at `pos` and move it all the way down the heap,
- /// then sift it up to its position.
- ///
- /// Note: This is faster when the element is known to be large / should
- /// be closer to the bottom.
- fn sift_down_to_bottom(&mut self, mut pos: usize) {
- let end = self.len();
- let start = pos;
- unsafe {
- let mut hole = Hole::new(&mut self.data, pos);
- let mut child = 2 * pos + 1;
- while child < end {
- let right = child + 1;
- // compare with the greater of the two children
- if right < end && !(hole.get(child) > hole.get(right)) {
- child = right;
- }
- hole.move_to(child);
- child = 2 * hole.pos() + 1;
- }
- pos = hole.pos;
- }
- self.sift_up(start, pos);
- }
-
- /// Returns the length of the binary heap.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let heap = BinaryHeap::from(vec![1, 3]);
- ///
- /// assert_eq!(heap.len(), 2);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn len(&self) -> usize {
- self.data.len()
- }
-
- /// Checks if the binary heap is empty.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::new();
- ///
- /// assert!(heap.is_empty());
- ///
- /// heap.push(3);
- /// heap.push(5);
- /// heap.push(1);
- ///
- /// assert!(!heap.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-
- /// Clears the binary heap, returning an iterator over the removed elements.
- ///
- /// The elements are removed in arbitrary order.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::from(vec![1, 3]);
- ///
- /// assert!(!heap.is_empty());
- ///
- /// for x in heap.drain() {
- /// println!("{}", x);
- /// }
- ///
- /// assert!(heap.is_empty());
- /// ```
- #[inline]
- #[stable(feature = "drain", since = "1.6.0")]
- pub fn drain(&mut self) -> Drain<T> {
- Drain { iter: self.data.drain(..) }
- }
-
- /// Drops all items from the binary heap.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let mut heap = BinaryHeap::from(vec![1, 3]);
- ///
- /// assert!(!heap.is_empty());
- ///
- /// heap.clear();
- ///
- /// assert!(heap.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn clear(&mut self) {
- self.drain();
- }
-
- fn rebuild(&mut self) {
- let mut n = self.len() / 2;
- while n > 0 {
- n -= 1;
- self.sift_down(n);
- }
- }
-
- /// Moves all the elements of `other` into `self`, leaving `other` empty.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- ///
- /// let v = vec![-10, 1, 2, 3, 3];
- /// let mut a = BinaryHeap::from(v);
- ///
- /// let v = vec![-20, 5, 43];
- /// let mut b = BinaryHeap::from(v);
- ///
- /// a.append(&mut b);
- ///
- /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
- /// assert!(b.is_empty());
- /// ```
- #[stable(feature = "binary_heap_append", since = "1.11.0")]
- pub fn append(&mut self, other: &mut Self) {
- if self.len() < other.len() {
- swap(self, other);
- }
-
- if other.is_empty() {
- return;
- }
-
- #[inline(always)]
- fn log2_fast(x: usize) -> usize {
- 8 * size_of::<usize>() - (x.leading_zeros() as usize) - 1
- }
-
- // `rebuild` takes O(len1 + len2) operations
- // and about 2 * (len1 + len2) comparisons in the worst case
- // while `extend` takes O(len2 * log_2(len1)) operations
- // and about 1 * len2 * log_2(len1) comparisons in the worst case,
- // assuming len1 >= len2.
- #[inline]
- fn better_to_rebuild(len1: usize, len2: usize) -> bool {
- 2 * (len1 + len2) < len2 * log2_fast(len1)
- }
-
- if better_to_rebuild(self.len(), other.len()) {
- self.data.append(&mut other.data);
- self.rebuild();
- } else {
- self.extend(other.drain());
- }
- }
-}
-
-/// Hole represents a hole in a slice i.e. an index without valid value
-/// (because it was moved from or duplicated).
-/// In drop, `Hole` will restore the slice by filling the hole
-/// position with the value that was originally removed.
-struct Hole<'a, T: 'a> {
- data: &'a mut [T],
- elt: ManuallyDrop<T>,
- pos: usize,
-}
-
-impl<'a, T> Hole<'a, T> {
- /// Create a new Hole at index `pos`.
- ///
- /// Unsafe because pos must be within the data slice.
- #[inline]
- unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
- debug_assert!(pos < data.len());
- let elt = ptr::read(&data[pos]);
- Hole {
- data,
- elt: ManuallyDrop::new(elt),
- pos,
- }
- }
-
- #[inline]
- fn pos(&self) -> usize {
- self.pos
- }
-
- /// Returns a reference to the element removed.
- #[inline]
- fn element(&self) -> &T {
- &self.elt
- }
-
- /// Returns a reference to the element at `index`.
- ///
- /// Unsafe because index must be within the data slice and not equal to pos.
- #[inline]
- unsafe fn get(&self, index: usize) -> &T {
- debug_assert!(index != self.pos);
- debug_assert!(index < self.data.len());
- self.data.get_unchecked(index)
- }
-
- /// Move hole to new location
- ///
- /// Unsafe because index must be within the data slice and not equal to pos.
- #[inline]
- unsafe fn move_to(&mut self, index: usize) {
- debug_assert!(index != self.pos);
- debug_assert!(index < self.data.len());
- let index_ptr: *const _ = self.data.get_unchecked(index);
- let hole_ptr = self.data.get_unchecked_mut(self.pos);
- ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
- self.pos = index;
- }
-}
-
-impl<'a, T> Drop for Hole<'a, T> {
- #[inline]
- fn drop(&mut self) {
- // fill the hole again
- unsafe {
- let pos = self.pos;
- ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
- }
- }
-}
-
-/// An iterator over the elements of a `BinaryHeap`.
-///
-/// This `struct` is created by the [`iter`] method on [`BinaryHeap`]. See its
-/// documentation for more.
-///
-/// [`iter`]: struct.BinaryHeap.html#method.iter
-/// [`BinaryHeap`]: struct.BinaryHeap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Iter<'a, T: 'a> {
- iter: slice::Iter<'a, T>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Iter")
- .field(&self.iter.as_slice())
- .finish()
- }
-}
-
-// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Iter<'a, T> {
- fn clone(&self) -> Iter<'a, T> {
- Iter { iter: self.iter.clone() }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Iterator for Iter<'a, T> {
- type Item = &'a T;
-
- #[inline]
- fn next(&mut self) -> Option<&'a T> {
- self.iter.next()
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a T> {
- self.iter.next_back()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> ExactSizeIterator for Iter<'a, T> {
- fn is_empty(&self) -> bool {
- self.iter.is_empty()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for Iter<'a, T> {}
-
-/// An owning iterator over the elements of a `BinaryHeap`.
-///
-/// This `struct` is created by the [`into_iter`] method on [`BinaryHeap`][`BinaryHeap`]
-/// (provided by the `IntoIterator` trait). See its documentation for more.
-///
-/// [`into_iter`]: struct.BinaryHeap.html#method.into_iter
-/// [`BinaryHeap`]: struct.BinaryHeap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Clone)]
-pub struct IntoIter<T> {
- iter: vec::IntoIter<T>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("IntoIter")
- .field(&self.iter.as_slice())
- .finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Iterator for IntoIter<T> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- self.iter.next()
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> DoubleEndedIterator for IntoIter<T> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- self.iter.next_back()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> ExactSizeIterator for IntoIter<T> {
- fn is_empty(&self) -> bool {
- self.iter.is_empty()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for IntoIter<T> {}
-
-/// A draining iterator over the elements of a `BinaryHeap`.
-///
-/// This `struct` is created by the [`drain`] method on [`BinaryHeap`]. See its
-/// documentation for more.
-///
-/// [`drain`]: struct.BinaryHeap.html#method.drain
-/// [`BinaryHeap`]: struct.BinaryHeap.html
-#[stable(feature = "drain", since = "1.6.0")]
-#[derive(Debug)]
-pub struct Drain<'a, T: 'a> {
- iter: vec::Drain<'a, T>,
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> Iterator for Drain<'a, T> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- self.iter.next()
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- self.iter.next_back()
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
- fn is_empty(&self) -> bool {
- self.iter.is_empty()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
-
-#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
-impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
- fn from(vec: Vec<T>) -> BinaryHeap<T> {
- let mut heap = BinaryHeap { data: vec };
- heap.rebuild();
- heap
- }
-}
-
-#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
-impl<T> From<BinaryHeap<T>> for Vec<T> {
- fn from(heap: BinaryHeap<T>) -> Vec<T> {
- heap.data
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
- fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
- BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> IntoIterator for BinaryHeap<T> {
- type Item = T;
- type IntoIter = IntoIter<T>;
-
- /// Creates a consuming iterator, that is, one that moves each value out of
- /// the binary heap in arbitrary order. The binary heap cannot be used
- /// after calling this.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BinaryHeap;
- /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
- ///
- /// // Print 1, 2, 3, 4 in arbitrary order
- /// for x in heap.into_iter() {
- /// // x has type i32, not &i32
- /// println!("{}", x);
- /// }
- /// ```
- fn into_iter(self) -> IntoIter<T> {
- IntoIter { iter: self.data.into_iter() }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a BinaryHeap<T>
- where T: Ord
-{
- type Item = &'a T;
- type IntoIter = Iter<'a, T>;
-
- fn into_iter(self) -> Iter<'a, T> {
- self.iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> Extend<T> for BinaryHeap<T> {
- #[inline]
- fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
- <Self as SpecExtend<I>>::spec_extend(self, iter);
- }
-}
-
-impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
- default fn spec_extend(&mut self, iter: I) {
- self.extend_desugared(iter.into_iter());
- }
-}
-
-impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
- fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
- self.append(other);
- }
-}
-
-impl<T: Ord> BinaryHeap<T> {
- fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
- let iterator = iter.into_iter();
- let (lower, _) = iterator.size_hint();
-
- self.reserve(lower);
-
- for elem in iterator {
- self.push(elem);
- }
- }
-}
-
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
- fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
- self.extend(iter.into_iter().cloned());
- }
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use core::cmp::Ordering;
-use core::fmt::Debug;
-use core::hash::{Hash, Hasher};
-use core::iter::{FromIterator, Peekable, FusedIterator};
-use core::marker::PhantomData;
-use core::ops::Bound::{Excluded, Included, Unbounded};
-use core::ops::Index;
-use core::ops::RangeBounds;
-use core::{fmt, intrinsics, mem, ptr};
-
-use borrow::Borrow;
-
-use super::node::{self, Handle, NodeRef, marker};
-use super::search;
-
-use super::node::InsertResult::*;
-use super::node::ForceResult::*;
-use super::search::SearchResult::*;
-use self::UnderflowResult::*;
-use self::Entry::*;
-
-/// A map based on a B-Tree.
-///
-/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
-/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
-/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
-/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
-/// is done is *very* inefficient for modern computer architectures. In particular, every element
-/// is stored in its own individually heap-allocated node. This means that every single insertion
-/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
-/// are both notably expensive things to do in practice, we are forced to at very least reconsider
-/// the BST strategy.
-///
-/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
-/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
-/// searches. However, this does mean that searches will have to do *more* comparisons on average.
-/// The precise number of comparisons depends on the node search strategy used. For optimal cache
-/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
-/// the node using binary search. As a compromise, one could also perform a linear search
-/// that initially only checks every i<sup>th</sup> element for some choice of i.
-///
-/// Currently, our implementation simply performs naive linear search. This provides excellent
-/// performance on *small* nodes of elements which are cheap to compare. However in the future we
-/// would like to further explore choosing the optimal search strategy based on the choice of B,
-/// and possibly other factors. Using linear search, searching for a random element is expected
-/// to take O(B log<sub>B</sub>n) comparisons, which is generally worse than a BST. In practice,
-/// however, performance is excellent.
-///
-/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
-/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
-/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
-///
-/// [`Ord`]: ../../std/cmp/trait.Ord.html
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
-///
-/// # Examples
-///
-/// ```
-/// use std::collections::BTreeMap;
-///
-/// // type inference lets us omit an explicit type signature (which
-/// // would be `BTreeMap<&str, &str>` in this example).
-/// let mut movie_reviews = BTreeMap::new();
-///
-/// // review some movies.
-/// movie_reviews.insert("Office Space", "Deals with real issues in the workplace.");
-/// movie_reviews.insert("Pulp Fiction", "Masterpiece.");
-/// movie_reviews.insert("The Godfather", "Very enjoyable.");
-/// movie_reviews.insert("The Blues Brothers", "Eye lyked it alot.");
-///
-/// // check for a specific one.
-/// if !movie_reviews.contains_key("Les Misérables") {
-/// println!("We've got {} reviews, but Les Misérables ain't one.",
-/// movie_reviews.len());
-/// }
-///
-/// // oops, this review has a lot of spelling mistakes, let's delete it.
-/// movie_reviews.remove("The Blues Brothers");
-///
-/// // look up the values associated with some keys.
-/// let to_find = ["Up!", "Office Space"];
-/// for book in &to_find {
-/// match movie_reviews.get(book) {
-/// Some(review) => println!("{}: {}", book, review),
-/// None => println!("{} is unreviewed.", book)
-/// }
-/// }
-///
-/// // iterate over everything.
-/// for (movie, review) in &movie_reviews {
-/// println!("{}: \"{}\"", movie, review);
-/// }
-/// ```
-///
-/// `BTreeMap` also implements an [`Entry API`](#method.entry), which allows
-/// for more complex methods of getting, setting, updating and removing keys and
-/// their values:
-///
-/// ```
-/// use std::collections::BTreeMap;
-///
-/// // type inference lets us omit an explicit type signature (which
-/// // would be `BTreeMap<&str, u8>` in this example).
-/// let mut player_stats = BTreeMap::new();
-///
-/// fn random_stat_buff() -> u8 {
-/// // could actually return some random value here - let's just return
-/// // some fixed value for now
-/// 42
-/// }
-///
-/// // insert a key only if it doesn't already exist
-/// player_stats.entry("health").or_insert(100);
-///
-/// // insert a key using a function that provides a new value only if it
-/// // doesn't already exist
-/// player_stats.entry("defence").or_insert_with(random_stat_buff);
-///
-/// // update a key, guarding against the key possibly not being set
-/// let stat = player_stats.entry("attack").or_insert(100);
-/// *stat += random_stat_buff();
-/// ```
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct BTreeMap<K, V> {
- root: node::Root<K, V>,
- length: usize,
-}
-
-#[stable(feature = "btree_drop", since = "1.7.0")]
-unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
- fn drop(&mut self) {
- unsafe {
- drop(ptr::read(self).into_iter());
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
- fn clone(&self) -> BTreeMap<K, V> {
- fn clone_subtree<K: Clone, V: Clone>(node: node::NodeRef<marker::Immut,
- K,
- V,
- marker::LeafOrInternal>)
- -> BTreeMap<K, V> {
-
- match node.force() {
- Leaf(leaf) => {
- let mut out_tree = BTreeMap {
- root: node::Root::new_leaf(),
- length: 0,
- };
-
- {
- let mut out_node = match out_tree.root.as_mut().force() {
- Leaf(leaf) => leaf,
- Internal(_) => unreachable!(),
- };
-
- let mut in_edge = leaf.first_edge();
- while let Ok(kv) = in_edge.right_kv() {
- let (k, v) = kv.into_kv();
- in_edge = kv.right_edge();
-
- out_node.push(k.clone(), v.clone());
- out_tree.length += 1;
- }
- }
-
- out_tree
- }
- Internal(internal) => {
- let mut out_tree = clone_subtree(internal.first_edge().descend());
-
- {
- let mut out_node = out_tree.root.push_level();
- let mut in_edge = internal.first_edge();
- while let Ok(kv) = in_edge.right_kv() {
- let (k, v) = kv.into_kv();
- in_edge = kv.right_edge();
-
- let k = (*k).clone();
- let v = (*v).clone();
- let subtree = clone_subtree(in_edge.descend());
-
- // We can't destructure subtree directly
- // because BTreeMap implements Drop
- let (subroot, sublength) = unsafe {
- let root = ptr::read(&subtree.root);
- let length = subtree.length;
- mem::forget(subtree);
- (root, length)
- };
-
- out_node.push(k, v, subroot);
- out_tree.length += 1 + sublength;
- }
- }
-
- out_tree
- }
- }
- }
-
- clone_subtree(self.root.as_ref())
- }
-}
-
-impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
- where K: Borrow<Q> + Ord,
- Q: Ord
-{
- type Key = K;
-
- fn get(&self, key: &Q) -> Option<&K> {
- match search::search_tree(self.root.as_ref(), key) {
- Found(handle) => Some(handle.into_kv().0),
- GoDown(_) => None,
- }
- }
-
- fn take(&mut self, key: &Q) -> Option<K> {
- match search::search_tree(self.root.as_mut(), key) {
- Found(handle) => {
- Some(OccupiedEntry {
- handle,
- length: &mut self.length,
- _marker: PhantomData,
- }
- .remove_kv()
- .0)
- }
- GoDown(_) => None,
- }
- }
-
- fn replace(&mut self, key: K) -> Option<K> {
- self.ensure_root_is_owned();
- match search::search_tree::<marker::Mut, K, (), K>(self.root.as_mut(), &key) {
- Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
- GoDown(handle) => {
- VacantEntry {
- key,
- handle,
- length: &mut self.length,
- _marker: PhantomData,
- }
- .insert(());
- None
- }
- }
- }
-}
-
-/// An iterator over the entries of a `BTreeMap`.
-///
-/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`iter`]: struct.BTreeMap.html#method.iter
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Iter<'a, K: 'a, V: 'a> {
- range: Range<'a, K, V>,
- length: usize,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Iter<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self.clone()).finish()
- }
-}
-
-/// A mutable iterator over the entries of a `BTreeMap`.
-///
-/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`iter_mut`]: struct.BTreeMap.html#method.iter_mut
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Debug)]
-pub struct IterMut<'a, K: 'a, V: 'a> {
- range: RangeMut<'a, K, V>,
- length: usize,
-}
-
-/// An owning iterator over the entries of a `BTreeMap`.
-///
-/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`][`BTreeMap`]
-/// (provided by the `IntoIterator` trait). See its documentation for more.
-///
-/// [`into_iter`]: struct.BTreeMap.html#method.into_iter
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct IntoIter<K, V> {
- front: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
- back: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
- length: usize,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let range = Range {
- front: self.front.reborrow(),
- back: self.back.reborrow(),
- };
- f.debug_list().entries(range).finish()
- }
-}
-
-/// An iterator over the keys of a `BTreeMap`.
-///
-/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`keys`]: struct.BTreeMap.html#method.keys
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Keys<'a, K: 'a, V: 'a> {
- inner: Iter<'a, K, V>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, K: 'a + fmt::Debug, V: 'a> fmt::Debug for Keys<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self.clone()).finish()
- }
-}
-
-/// An iterator over the values of a `BTreeMap`.
-///
-/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`values`]: struct.BTreeMap.html#method.values
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Values<'a, K: 'a, V: 'a> {
- inner: Iter<'a, K, V>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, K: 'a, V: 'a + fmt::Debug> fmt::Debug for Values<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self.clone()).finish()
- }
-}
-
-/// A mutable iterator over the values of a `BTreeMap`.
-///
-/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`values_mut`]: struct.BTreeMap.html#method.values_mut
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "map_values_mut", since = "1.10.0")]
-#[derive(Debug)]
-pub struct ValuesMut<'a, K: 'a, V: 'a> {
- inner: IterMut<'a, K, V>,
-}
-
-/// An iterator over a sub-range of entries in a `BTreeMap`.
-///
-/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`range`]: struct.BTreeMap.html#method.range
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "btree_range", since = "1.17.0")]
-pub struct Range<'a, K: 'a, V: 'a> {
- front: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
- back: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Range<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self.clone()).finish()
- }
-}
-
-/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
-///
-/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
-/// documentation for more.
-///
-/// [`range_mut`]: struct.BTreeMap.html#method.range_mut
-/// [`BTreeMap`]: struct.BTreeMap.html
-#[stable(feature = "btree_range", since = "1.17.0")]
-pub struct RangeMut<'a, K: 'a, V: 'a> {
- front: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
- back: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
-
- // Be invariant in `K` and `V`
- _marker: PhantomData<&'a mut (K, V)>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for RangeMut<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let range = Range {
- front: self.front.reborrow(),
- back: self.back.reborrow(),
- };
- f.debug_list().entries(range).finish()
- }
-}
-
-/// A view into a single entry in a map, which may either be vacant or occupied.
-///
-/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
-///
-/// [`BTreeMap`]: struct.BTreeMap.html
-/// [`entry`]: struct.BTreeMap.html#method.entry
-#[stable(feature = "rust1", since = "1.0.0")]
-pub enum Entry<'a, K: 'a, V: 'a> {
- /// A vacant entry.
- #[stable(feature = "rust1", since = "1.0.0")]
- Vacant(#[stable(feature = "rust1", since = "1.0.0")]
- VacantEntry<'a, K, V>),
-
- /// An occupied entry.
- #[stable(feature = "rust1", since = "1.0.0")]
- Occupied(#[stable(feature = "rust1", since = "1.0.0")]
- OccupiedEntry<'a, K, V>),
-}
-
-#[stable(feature= "debug_btree_map", since = "1.12.0")]
-impl<'a, K: 'a + Debug + Ord, V: 'a + Debug> Debug for Entry<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- Vacant(ref v) => f.debug_tuple("Entry")
- .field(v)
- .finish(),
- Occupied(ref o) => f.debug_tuple("Entry")
- .field(o)
- .finish(),
- }
- }
-}
-
-/// A view into a vacant entry in a `BTreeMap`.
-/// It is part of the [`Entry`] enum.
-///
-/// [`Entry`]: enum.Entry.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct VacantEntry<'a, K: 'a, V: 'a> {
- key: K,
- handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
- length: &'a mut usize,
-
- // Be invariant in `K` and `V`
- _marker: PhantomData<&'a mut (K, V)>,
-}
-
-#[stable(feature= "debug_btree_map", since = "1.12.0")]
-impl<'a, K: 'a + Debug + Ord, V: 'a> Debug for VacantEntry<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("VacantEntry")
- .field(self.key())
- .finish()
- }
-}
-
-/// A view into an occupied entry in a `BTreeMap`.
-/// It is part of the [`Entry`] enum.
-///
-/// [`Entry`]: enum.Entry.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
- handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
-
- length: &'a mut usize,
-
- // Be invariant in `K` and `V`
- _marker: PhantomData<&'a mut (K, V)>,
-}
-
-#[stable(feature= "debug_btree_map", since = "1.12.0")]
-impl<'a, K: 'a + Debug + Ord, V: 'a + Debug> Debug for OccupiedEntry<'a, K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("OccupiedEntry")
- .field("key", self.key())
- .field("value", self.get())
- .finish()
- }
-}
-
-// An iterator for merging two sorted sequences into one
-struct MergeIter<K, V, I: Iterator<Item = (K, V)>> {
- left: Peekable<I>,
- right: Peekable<I>,
-}
-
-impl<K: Ord, V> BTreeMap<K, V> {
- /// Makes a new empty BTreeMap with a reasonable choice for B.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- ///
- /// // entries can now be inserted into the empty map
- /// map.insert(1, "a");
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn new() -> BTreeMap<K, V> {
- BTreeMap {
- root: node::Root::shared_empty_root(),
- length: 0,
- }
- }
-
- /// Clears the map, removing all values.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// a.insert(1, "a");
- /// a.clear();
- /// assert!(a.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn clear(&mut self) {
- *self = BTreeMap::new();
- }
-
- /// Returns a reference to the value corresponding to the key.
- ///
- /// The key may be any borrowed form of the map's key type, but the ordering
- /// on the borrowed form *must* match the ordering on the key type.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(1, "a");
- /// assert_eq!(map.get(&1), Some(&"a"));
- /// assert_eq!(map.get(&2), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
- where K: Borrow<Q>,
- Q: Ord
- {
- match search::search_tree(self.root.as_ref(), key) {
- Found(handle) => Some(handle.into_kv().1),
- GoDown(_) => None,
- }
- }
-
- /// Returns the key-value pair corresponding to the supplied key.
- ///
- /// The supplied key may be any borrowed form of the map's key type, but the ordering
- /// on the borrowed form *must* match the ordering on the key type.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(map_get_key_value)]
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(1, "a");
- /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
- /// assert_eq!(map.get_key_value(&2), None);
- /// ```
- #[unstable(feature = "map_get_key_value", issue = "49347")]
- pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
- where K: Borrow<Q>,
- Q: Ord
- {
- match search::search_tree(self.root.as_ref(), k) {
- Found(handle) => Some(handle.into_kv()),
- GoDown(_) => None,
- }
- }
-
- /// Returns `true` if the map contains a value for the specified key.
- ///
- /// The key may be any borrowed form of the map's key type, but the ordering
- /// on the borrowed form *must* match the ordering on the key type.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(1, "a");
- /// assert_eq!(map.contains_key(&1), true);
- /// assert_eq!(map.contains_key(&2), false);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
- where K: Borrow<Q>,
- Q: Ord
- {
- self.get(key).is_some()
- }
-
- /// Returns a mutable reference to the value corresponding to the key.
- ///
- /// The key may be any borrowed form of the map's key type, but the ordering
- /// on the borrowed form *must* match the ordering on the key type.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(1, "a");
- /// if let Some(x) = map.get_mut(&1) {
- /// *x = "b";
- /// }
- /// assert_eq!(map[&1], "b");
- /// ```
- // See `get` for implementation notes, this is basically a copy-paste with mut's added
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
- where K: Borrow<Q>,
- Q: Ord
- {
- match search::search_tree(self.root.as_mut(), key) {
- Found(handle) => Some(handle.into_kv_mut().1),
- GoDown(_) => None,
- }
- }
-
- /// Inserts a key-value pair into the map.
- ///
- /// If the map did not have this key present, `None` is returned.
- ///
- /// If the map did have this key present, the value is updated, and the old
- /// value is returned. The key is not updated, though; this matters for
- /// types that can be `==` without being identical. See the [module-level
- /// documentation] for more.
- ///
- /// [module-level documentation]: index.html#insert-and-complex-keys
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// assert_eq!(map.insert(37, "a"), None);
- /// assert_eq!(map.is_empty(), false);
- ///
- /// map.insert(37, "b");
- /// assert_eq!(map.insert(37, "c"), Some("b"));
- /// assert_eq!(map[&37], "c");
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(&mut self, key: K, value: V) -> Option<V> {
- match self.entry(key) {
- Occupied(mut entry) => Some(entry.insert(value)),
- Vacant(entry) => {
- entry.insert(value);
- None
- }
- }
- }
-
- /// Removes a key from the map, returning the value at the key if the key
- /// was previously in the map.
- ///
- /// The key may be any borrowed form of the map's key type, but the ordering
- /// on the borrowed form *must* match the ordering on the key type.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(1, "a");
- /// assert_eq!(map.remove(&1), Some("a"));
- /// assert_eq!(map.remove(&1), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
- where K: Borrow<Q>,
- Q: Ord
- {
- match search::search_tree(self.root.as_mut(), key) {
- Found(handle) => {
- Some(OccupiedEntry {
- handle,
- length: &mut self.length,
- _marker: PhantomData,
- }
- .remove())
- }
- GoDown(_) => None,
- }
- }
-
- /// Moves all elements from `other` into `Self`, leaving `other` empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// a.insert(1, "a");
- /// a.insert(2, "b");
- /// a.insert(3, "c");
- ///
- /// let mut b = BTreeMap::new();
- /// b.insert(3, "d");
- /// b.insert(4, "e");
- /// b.insert(5, "f");
- ///
- /// a.append(&mut b);
- ///
- /// assert_eq!(a.len(), 5);
- /// assert_eq!(b.len(), 0);
- ///
- /// assert_eq!(a[&1], "a");
- /// assert_eq!(a[&2], "b");
- /// assert_eq!(a[&3], "d");
- /// assert_eq!(a[&4], "e");
- /// assert_eq!(a[&5], "f");
- /// ```
- #[stable(feature = "btree_append", since = "1.11.0")]
- pub fn append(&mut self, other: &mut Self) {
- // Do we have to append anything at all?
- if other.len() == 0 {
- return;
- }
-
- // We can just swap `self` and `other` if `self` is empty.
- if self.len() == 0 {
- mem::swap(self, other);
- return;
- }
-
- // First, we merge `self` and `other` into a sorted sequence in linear time.
- let self_iter = mem::replace(self, BTreeMap::new()).into_iter();
- let other_iter = mem::replace(other, BTreeMap::new()).into_iter();
- let iter = MergeIter {
- left: self_iter.peekable(),
- right: other_iter.peekable(),
- };
-
- // Second, we build a tree from the sorted sequence in linear time.
- self.from_sorted_iter(iter);
- self.fix_right_edge();
- }
-
- /// Constructs a double-ended iterator over a sub-range of elements in the map.
- /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
- /// yield elements from min (inclusive) to max (exclusive).
- /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
- /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
- /// range from 4 to 10.
- ///
- /// # Panics
- ///
- /// Panics if range `start > end`.
- /// Panics if range `start == end` and both bounds are `Excluded`.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::ops::Bound::Included;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(3, "a");
- /// map.insert(5, "b");
- /// map.insert(8, "c");
- /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
- /// println!("{}: {}", key, value);
- /// }
- /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
- /// ```
- #[stable(feature = "btree_range", since = "1.17.0")]
- pub fn range<T: ?Sized, R>(&self, range: R) -> Range<K, V>
- where T: Ord, K: Borrow<T>, R: RangeBounds<T>
- {
- let root1 = self.root.as_ref();
- let root2 = self.root.as_ref();
- let (f, b) = range_search(root1, root2, range);
-
- Range { front: f, back: b}
- }
-
- /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
- /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
- /// yield elements from min (inclusive) to max (exclusive).
- /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
- /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
- /// range from 4 to 10.
- ///
- /// # Panics
- ///
- /// Panics if range `start > end`.
- /// Panics if range `start == end` and both bounds are `Excluded`.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter()
- /// .map(|&s| (s, 0))
- /// .collect();
- /// for (_, balance) in map.range_mut("B".."Cheryl") {
- /// *balance += 100;
- /// }
- /// for (name, balance) in &map {
- /// println!("{} => {}", name, balance);
- /// }
- /// ```
- #[stable(feature = "btree_range", since = "1.17.0")]
- pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<K, V>
- where T: Ord, K: Borrow<T>, R: RangeBounds<T>
- {
- let root1 = self.root.as_mut();
- let root2 = unsafe { ptr::read(&root1) };
- let (f, b) = range_search(root1, root2, range);
-
- RangeMut {
- front: f,
- back: b,
- _marker: PhantomData,
- }
- }
-
- /// Gets the given key's corresponding entry in the map for in-place manipulation.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
- ///
- /// // count the number of occurrences of letters in the vec
- /// for x in vec!["a","b","a","c","a","b"] {
- /// *count.entry(x).or_insert(0) += 1;
- /// }
- ///
- /// assert_eq!(count["a"], 3);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn entry(&mut self, key: K) -> Entry<K, V> {
- // FIXME(@porglezomp) Avoid allocating if we don't insert
- self.ensure_root_is_owned();
- match search::search_tree(self.root.as_mut(), &key) {
- Found(handle) => {
- Occupied(OccupiedEntry {
- handle,
- length: &mut self.length,
- _marker: PhantomData,
- })
- }
- GoDown(handle) => {
- Vacant(VacantEntry {
- key,
- handle,
- length: &mut self.length,
- _marker: PhantomData,
- })
- }
- }
- }
-
- fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
- self.ensure_root_is_owned();
- let mut cur_node = last_leaf_edge(self.root.as_mut()).into_node();
- // Iterate through all key-value pairs, pushing them into nodes at the right level.
- for (key, value) in iter {
- // Try to push key-value pair into the current leaf node.
- if cur_node.len() < node::CAPACITY {
- cur_node.push(key, value);
- } else {
- // No space left, go up and push there.
- let mut open_node;
- let mut test_node = cur_node.forget_type();
- loop {
- match test_node.ascend() {
- Ok(parent) => {
- let parent = parent.into_node();
- if parent.len() < node::CAPACITY {
- // Found a node with space left, push here.
- open_node = parent;
- break;
- } else {
- // Go up again.
- test_node = parent.forget_type();
- }
- }
- Err(node) => {
- // We are at the top, create a new root node and push there.
- open_node = node.into_root_mut().push_level();
- break;
- }
- }
- }
-
- // Push key-value pair and new right subtree.
- let tree_height = open_node.height() - 1;
- let mut right_tree = node::Root::new_leaf();
- for _ in 0..tree_height {
- right_tree.push_level();
- }
- open_node.push(key, value, right_tree);
-
- // Go down to the right-most leaf again.
- cur_node = last_leaf_edge(open_node.forget_type()).into_node();
- }
-
- self.length += 1;
- }
- }
-
- fn fix_right_edge(&mut self) {
- // Handle underfull nodes, start from the top.
- let mut cur_node = self.root.as_mut();
- while let Internal(internal) = cur_node.force() {
- // Check if right-most child is underfull.
- let mut last_edge = internal.last_edge();
- let right_child_len = last_edge.reborrow().descend().len();
- if right_child_len < node::MIN_LEN {
- // We need to steal.
- let mut last_kv = match last_edge.left_kv() {
- Ok(left) => left,
- Err(_) => unreachable!(),
- };
- last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
- last_edge = last_kv.right_edge();
- }
-
- // Go further down.
- cur_node = last_edge.descend();
- }
- }
-
- /// Splits the collection into two at the given key. Returns everything after the given key,
- /// including the key.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// a.insert(1, "a");
- /// a.insert(2, "b");
- /// a.insert(3, "c");
- /// a.insert(17, "d");
- /// a.insert(41, "e");
- ///
- /// let b = a.split_off(&3);
- ///
- /// assert_eq!(a.len(), 2);
- /// assert_eq!(b.len(), 3);
- ///
- /// assert_eq!(a[&1], "a");
- /// assert_eq!(a[&2], "b");
- ///
- /// assert_eq!(b[&3], "c");
- /// assert_eq!(b[&17], "d");
- /// assert_eq!(b[&41], "e");
- /// ```
- #[stable(feature = "btree_split_off", since = "1.11.0")]
- pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
- where K: Borrow<Q>
- {
- if self.is_empty() {
- return Self::new();
- }
-
- let total_num = self.len();
-
- let mut right = Self::new();
- right.root = node::Root::new_leaf();
- for _ in 0..(self.root.as_ref().height()) {
- right.root.push_level();
- }
-
- {
- let mut left_node = self.root.as_mut();
- let mut right_node = right.root.as_mut();
-
- loop {
- let mut split_edge = match search::search_node(left_node, key) {
- // key is going to the right tree
- Found(handle) => handle.left_edge(),
- GoDown(handle) => handle,
- };
-
- split_edge.move_suffix(&mut right_node);
-
- match (split_edge.force(), right_node.force()) {
- (Internal(edge), Internal(node)) => {
- left_node = edge.descend();
- right_node = node.first_edge().descend();
- }
- (Leaf(_), Leaf(_)) => {
- break;
- }
- _ => {
- unreachable!();
- }
- }
- }
- }
-
- self.fix_right_border();
- right.fix_left_border();
-
- if self.root.as_ref().height() < right.root.as_ref().height() {
- self.recalc_length();
- right.length = total_num - self.len();
- } else {
- right.recalc_length();
- self.length = total_num - right.len();
- }
-
- right
- }
-
- /// Calculates the number of elements if it is incorrect.
- fn recalc_length(&mut self) {
- fn dfs<K, V>(node: NodeRef<marker::Immut, K, V, marker::LeafOrInternal>) -> usize {
- let mut res = node.len();
-
- if let Internal(node) = node.force() {
- let mut edge = node.first_edge();
- loop {
- res += dfs(edge.reborrow().descend());
- match edge.right_kv() {
- Ok(right_kv) => {
- edge = right_kv.right_edge();
- }
- Err(_) => {
- break;
- }
- }
- }
- }
-
- res
- }
-
- self.length = dfs(self.root.as_ref());
- }
-
- /// Removes empty levels on the top.
- fn fix_top(&mut self) {
- loop {
- {
- let node = self.root.as_ref();
- if node.height() == 0 || node.len() > 0 {
- break;
- }
- }
- self.root.pop_level();
- }
- }
-
- fn fix_right_border(&mut self) {
- self.fix_top();
-
- {
- let mut cur_node = self.root.as_mut();
-
- while let Internal(node) = cur_node.force() {
- let mut last_kv = node.last_kv();
-
- if last_kv.can_merge() {
- cur_node = last_kv.merge().descend();
- } else {
- let right_len = last_kv.reborrow().right_edge().descend().len();
- // `MINLEN + 1` to avoid readjust if merge happens on the next level.
- if right_len < node::MIN_LEN + 1 {
- last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
- }
- cur_node = last_kv.right_edge().descend();
- }
- }
- }
-
- self.fix_top();
- }
-
- /// The symmetric clone of `fix_right_border`.
- fn fix_left_border(&mut self) {
- self.fix_top();
-
- {
- let mut cur_node = self.root.as_mut();
-
- while let Internal(node) = cur_node.force() {
- let mut first_kv = node.first_kv();
-
- if first_kv.can_merge() {
- cur_node = first_kv.merge().descend();
- } else {
- let left_len = first_kv.reborrow().left_edge().descend().len();
- if left_len < node::MIN_LEN + 1 {
- first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
- }
- cur_node = first_kv.left_edge().descend();
- }
- }
- }
-
- self.fix_top();
- }
-
- /// If the root node is the shared root node, allocate our own node.
- fn ensure_root_is_owned(&mut self) {
- if self.root.is_shared_root() {
- self.root = node::Root::new_leaf();
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
- type Item = (&'a K, &'a V);
- type IntoIter = Iter<'a, K, V>;
-
- fn into_iter(self) -> Iter<'a, K, V> {
- self.iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
- type Item = (&'a K, &'a V);
-
- fn next(&mut self) -> Option<(&'a K, &'a V)> {
- if self.length == 0 {
- None
- } else {
- self.length -= 1;
- unsafe { Some(self.range.next_unchecked()) }
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.length, Some(self.length))
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
- if self.length == 0 {
- None
- } else {
- self.length -= 1;
- unsafe { Some(self.range.next_back_unchecked()) }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> ExactSizeIterator for Iter<'a, K, V> {
- fn len(&self) -> usize {
- self.length
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Clone for Iter<'a, K, V> {
- fn clone(&self) -> Iter<'a, K, V> {
- Iter {
- range: self.range.clone(),
- length: self.length,
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap<K, V> {
- type Item = (&'a K, &'a mut V);
- type IntoIter = IterMut<'a, K, V>;
-
- fn into_iter(self) -> IterMut<'a, K, V> {
- self.iter_mut()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
- type Item = (&'a K, &'a mut V);
-
- fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
- if self.length == 0 {
- None
- } else {
- self.length -= 1;
- unsafe { Some(self.range.next_unchecked()) }
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.length, Some(self.length))
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
- if self.length == 0 {
- None
- } else {
- self.length -= 1;
- unsafe { Some(self.range.next_back_unchecked()) }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: 'a, V: 'a> ExactSizeIterator for IterMut<'a, K, V> {
- fn len(&self) -> usize {
- self.length
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> IntoIterator for BTreeMap<K, V> {
- type Item = (K, V);
- type IntoIter = IntoIter<K, V>;
-
- fn into_iter(self) -> IntoIter<K, V> {
- let root1 = unsafe { ptr::read(&self.root).into_ref() };
- let root2 = unsafe { ptr::read(&self.root).into_ref() };
- let len = self.length;
- mem::forget(self);
-
- IntoIter {
- front: first_leaf_edge(root1),
- back: last_leaf_edge(root2),
- length: len,
- }
- }
-}
-
-#[stable(feature = "btree_drop", since = "1.7.0")]
-impl<K, V> Drop for IntoIter<K, V> {
- fn drop(&mut self) {
- self.for_each(drop);
- unsafe {
- let leaf_node = ptr::read(&self.front).into_node();
- if leaf_node.is_shared_root() {
- return;
- }
-
- if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
- let mut cur_node = first_parent.into_node();
- while let Some(parent) = cur_node.deallocate_and_ascend() {
- cur_node = parent.into_node()
- }
- }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> Iterator for IntoIter<K, V> {
- type Item = (K, V);
-
- fn next(&mut self) -> Option<(K, V)> {
- if self.length == 0 {
- return None;
- } else {
- self.length -= 1;
- }
-
- let handle = unsafe { ptr::read(&self.front) };
-
- let mut cur_handle = match handle.right_kv() {
- Ok(kv) => {
- let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
- let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
- self.front = kv.right_edge();
- return Some((k, v));
- }
- Err(last_edge) => unsafe {
- unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
- },
- };
-
- loop {
- match cur_handle.right_kv() {
- Ok(kv) => {
- let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
- let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
- self.front = first_leaf_edge(kv.right_edge().descend());
- return Some((k, v));
- }
- Err(last_edge) => unsafe {
- cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
- },
- }
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.length, Some(self.length))
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
- fn next_back(&mut self) -> Option<(K, V)> {
- if self.length == 0 {
- return None;
- } else {
- self.length -= 1;
- }
-
- let handle = unsafe { ptr::read(&self.back) };
-
- let mut cur_handle = match handle.left_kv() {
- Ok(kv) => {
- let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
- let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
- self.back = kv.left_edge();
- return Some((k, v));
- }
- Err(last_edge) => unsafe {
- unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
- },
- };
-
- loop {
- match cur_handle.left_kv() {
- Ok(kv) => {
- let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
- let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
- self.back = last_leaf_edge(kv.left_edge().descend());
- return Some((k, v));
- }
- Err(last_edge) => unsafe {
- cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
- },
- }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> ExactSizeIterator for IntoIter<K, V> {
- fn len(&self) -> usize {
- self.length
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<K, V> FusedIterator for IntoIter<K, V> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Iterator for Keys<'a, K, V> {
- type Item = &'a K;
-
- fn next(&mut self) -> Option<&'a K> {
- self.inner.next().map(|(k, _)| k)
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
- fn next_back(&mut self) -> Option<&'a K> {
- self.inner.next_back().map(|(k, _)| k)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {
- fn len(&self) -> usize {
- self.inner.len()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for Keys<'a, K, V> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Clone for Keys<'a, K, V> {
- fn clone(&self) -> Keys<'a, K, V> {
- Keys { inner: self.inner.clone() }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Iterator for Values<'a, K, V> {
- type Item = &'a V;
-
- fn next(&mut self) -> Option<&'a V> {
- self.inner.next().map(|(_, v)| v)
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
- fn next_back(&mut self) -> Option<&'a V> {
- self.inner.next_back().map(|(_, v)| v)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {
- fn len(&self) -> usize {
- self.inner.len()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for Values<'a, K, V> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Clone for Values<'a, K, V> {
- fn clone(&self) -> Values<'a, K, V> {
- Values { inner: self.inner.clone() }
- }
-}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, K, V> Iterator for Range<'a, K, V> {
- type Item = (&'a K, &'a V);
-
- fn next(&mut self) -> Option<(&'a K, &'a V)> {
- if self.front == self.back {
- None
- } else {
- unsafe { Some(self.next_unchecked()) }
- }
- }
-}
-
-#[stable(feature = "map_values_mut", since = "1.10.0")]
-impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
- type Item = &'a mut V;
-
- fn next(&mut self) -> Option<&'a mut V> {
- self.inner.next().map(|(_, v)| v)
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-
-#[stable(feature = "map_values_mut", since = "1.10.0")]
-impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
- fn next_back(&mut self) -> Option<&'a mut V> {
- self.inner.next_back().map(|(_, v)| v)
- }
-}
-
-#[stable(feature = "map_values_mut", since = "1.10.0")]
-impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> {
- fn len(&self) -> usize {
- self.inner.len()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {}
-
-
-impl<'a, K, V> Range<'a, K, V> {
- unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
- let handle = self.front;
-
- let mut cur_handle = match handle.right_kv() {
- Ok(kv) => {
- let ret = kv.into_kv();
- self.front = kv.right_edge();
- return ret;
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- unwrap_unchecked(next_level)
- }
- };
-
- loop {
- match cur_handle.right_kv() {
- Ok(kv) => {
- let ret = kv.into_kv();
- self.front = first_leaf_edge(kv.right_edge().descend());
- return ret;
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- cur_handle = unwrap_unchecked(next_level);
- }
- }
- }
- }
-}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
- if self.front == self.back {
- None
- } else {
- unsafe { Some(self.next_back_unchecked()) }
- }
- }
-}
-
-impl<'a, K, V> Range<'a, K, V> {
- unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
- let handle = self.back;
-
- let mut cur_handle = match handle.left_kv() {
- Ok(kv) => {
- let ret = kv.into_kv();
- self.back = kv.left_edge();
- return ret;
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- unwrap_unchecked(next_level)
- }
- };
-
- loop {
- match cur_handle.left_kv() {
- Ok(kv) => {
- let ret = kv.into_kv();
- self.back = last_leaf_edge(kv.left_edge().descend());
- return ret;
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- cur_handle = unwrap_unchecked(next_level);
- }
- }
- }
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for Range<'a, K, V> {}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, K, V> Clone for Range<'a, K, V> {
- fn clone(&self) -> Range<'a, K, V> {
- Range {
- front: self.front,
- back: self.back,
- }
- }
-}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
- type Item = (&'a K, &'a mut V);
-
- fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
- if self.front == self.back {
- None
- } else {
- unsafe { Some(self.next_unchecked()) }
- }
- }
-}
-
-impl<'a, K, V> RangeMut<'a, K, V> {
- unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
- let handle = ptr::read(&self.front);
-
- let mut cur_handle = match handle.right_kv() {
- Ok(kv) => {
- let (k, v) = ptr::read(&kv).into_kv_mut();
- self.front = kv.right_edge();
- return (k, v);
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- unwrap_unchecked(next_level)
- }
- };
-
- loop {
- match cur_handle.right_kv() {
- Ok(kv) => {
- let (k, v) = ptr::read(&kv).into_kv_mut();
- self.front = first_leaf_edge(kv.right_edge().descend());
- return (k, v);
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- cur_handle = unwrap_unchecked(next_level);
- }
- }
- }
- }
-}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
- if self.front == self.back {
- None
- } else {
- unsafe { Some(self.next_back_unchecked()) }
- }
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, K, V> FusedIterator for RangeMut<'a, K, V> {}
-
-impl<'a, K, V> RangeMut<'a, K, V> {
- unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
- let handle = ptr::read(&self.back);
-
- let mut cur_handle = match handle.left_kv() {
- Ok(kv) => {
- let (k, v) = ptr::read(&kv).into_kv_mut();
- self.back = kv.left_edge();
- return (k, v);
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- unwrap_unchecked(next_level)
- }
- };
-
- loop {
- match cur_handle.left_kv() {
- Ok(kv) => {
- let (k, v) = ptr::read(&kv).into_kv_mut();
- self.back = last_leaf_edge(kv.left_edge().descend());
- return (k, v);
- }
- Err(last_edge) => {
- let next_level = last_edge.into_node().ascend().ok();
- cur_handle = unwrap_unchecked(next_level);
- }
- }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
- fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
- let mut map = BTreeMap::new();
- map.extend(iter);
- map
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
- #[inline]
- fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
- for (k, v) in iter {
- self.insert(k, v);
- }
- }
-}
-
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
- fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
- self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- for elt in self {
- elt.hash(state);
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V> Default for BTreeMap<K, V> {
- /// Creates an empty `BTreeMap<K, V>`.
- fn default() -> BTreeMap<K, V> {
- BTreeMap::new()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
- fn eq(&self, other: &BTreeMap<K, V>) -> bool {
- self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
- #[inline]
- fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
- self.iter().partial_cmp(other.iter())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
- #[inline]
- fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
- self.iter().cmp(other.iter())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_map().entries(self.iter()).finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap<K, V>
- where K: Borrow<Q>,
- Q: Ord
-{
- type Output = V;
-
- /// Returns a reference to the value corresponding to the supplied key.
- ///
- /// # Panics
- ///
- /// Panics if the key is not present in the `BTreeMap`.
- #[inline]
- fn index(&self, key: &Q) -> &V {
- self.get(key).expect("no entry found for key")
- }
-}
-
-fn first_leaf_edge<BorrowType, K, V>
- (mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>)
- -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
- loop {
- match node.force() {
- Leaf(leaf) => return leaf.first_edge(),
- Internal(internal) => {
- node = internal.first_edge().descend();
- }
- }
- }
-}
-
-fn last_leaf_edge<BorrowType, K, V>
- (mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>)
- -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
- loop {
- match node.force() {
- Leaf(leaf) => return leaf.last_edge(),
- Internal(internal) => {
- node = internal.last_edge().descend();
- }
- }
- }
-}
-
-fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
- root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
- root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
- range: R
-)-> (Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
- Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>)
- where Q: Ord, K: Borrow<Q>
-{
- match (range.start_bound(), range.end_bound()) {
- (Excluded(s), Excluded(e)) if s==e =>
- panic!("range start and end are equal and excluded in BTreeMap"),
- (Included(s), Included(e)) |
- (Included(s), Excluded(e)) |
- (Excluded(s), Included(e)) |
- (Excluded(s), Excluded(e)) if s>e =>
- panic!("range start is greater than range end in BTreeMap"),
- _ => {},
- };
-
- let mut min_node = root1;
- let mut max_node = root2;
- let mut min_found = false;
- let mut max_found = false;
- let mut diverged = false;
-
- loop {
- let min_edge = match (min_found, range.start_bound()) {
- (false, Included(key)) => match search::search_linear(&min_node, key) {
- (i, true) => { min_found = true; i },
- (i, false) => i,
- },
- (false, Excluded(key)) => match search::search_linear(&min_node, key) {
- (i, true) => { min_found = true; i+1 },
- (i, false) => i,
- },
- (_, Unbounded) => 0,
- (true, Included(_)) => min_node.keys().len(),
- (true, Excluded(_)) => 0,
- };
-
- let max_edge = match (max_found, range.end_bound()) {
- (false, Included(key)) => match search::search_linear(&max_node, key) {
- (i, true) => { max_found = true; i+1 },
- (i, false) => i,
- },
- (false, Excluded(key)) => match search::search_linear(&max_node, key) {
- (i, true) => { max_found = true; i },
- (i, false) => i,
- },
- (_, Unbounded) => max_node.keys().len(),
- (true, Included(_)) => 0,
- (true, Excluded(_)) => max_node.keys().len(),
- };
-
- if !diverged {
- if max_edge < min_edge { panic!("Ord is ill-defined in BTreeMap range") }
- if min_edge != max_edge { diverged = true; }
- }
-
- let front = Handle::new_edge(min_node, min_edge);
- let back = Handle::new_edge(max_node, max_edge);
- match (front.force(), back.force()) {
- (Leaf(f), Leaf(b)) => {
- return (f, b);
- },
- (Internal(min_int), Internal(max_int)) => {
- min_node = min_int.descend();
- max_node = max_int.descend();
- },
- _ => unreachable!("BTreeMap has different depths"),
- };
- }
-}
-
-#[inline(always)]
-unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
- val.unwrap_or_else(|| {
- if cfg!(debug_assertions) {
- panic!("'unchecked' unwrap on None in BTreeMap");
- } else {
- intrinsics::unreachable();
- }
- })
-}
-
-impl<K, V> BTreeMap<K, V> {
- /// Gets an iterator over the entries of the map, sorted by key.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(3, "c");
- /// map.insert(2, "b");
- /// map.insert(1, "a");
- ///
- /// for (key, value) in map.iter() {
- /// println!("{}: {}", key, value);
- /// }
- ///
- /// let (first_key, first_value) = map.iter().next().unwrap();
- /// assert_eq!((*first_key, *first_value), (1, "a"));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter(&self) -> Iter<K, V> {
- Iter {
- range: Range {
- front: first_leaf_edge(self.root.as_ref()),
- back: last_leaf_edge(self.root.as_ref()),
- },
- length: self.length,
- }
- }
-
- /// Gets a mutable iterator over the entries of the map, sorted by key.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert("a", 1);
- /// map.insert("b", 2);
- /// map.insert("c", 3);
- ///
- /// // add 10 to the value if the key isn't "a"
- /// for (key, value) in map.iter_mut() {
- /// if key != &"a" {
- /// *value += 10;
- /// }
- /// }
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter_mut(&mut self) -> IterMut<K, V> {
- let root1 = self.root.as_mut();
- let root2 = unsafe { ptr::read(&root1) };
- IterMut {
- range: RangeMut {
- front: first_leaf_edge(root1),
- back: last_leaf_edge(root2),
- _marker: PhantomData,
- },
- length: self.length,
- }
- }
-
- /// Gets an iterator over the keys of the map, in sorted order.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// a.insert(2, "b");
- /// a.insert(1, "a");
- ///
- /// let keys: Vec<_> = a.keys().cloned().collect();
- /// assert_eq!(keys, [1, 2]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
- Keys { inner: self.iter() }
- }
-
- /// Gets an iterator over the values of the map, in order by key.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// a.insert(1, "hello");
- /// a.insert(2, "goodbye");
- ///
- /// let values: Vec<&str> = a.values().cloned().collect();
- /// assert_eq!(values, ["hello", "goodbye"]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn values<'a>(&'a self) -> Values<'a, K, V> {
- Values { inner: self.iter() }
- }
-
- /// Gets a mutable iterator over the values of the map, in order by key.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// a.insert(1, String::from("hello"));
- /// a.insert(2, String::from("goodbye"));
- ///
- /// for value in a.values_mut() {
- /// value.push_str("!");
- /// }
- ///
- /// let values: Vec<String> = a.values().cloned().collect();
- /// assert_eq!(values, [String::from("hello!"),
- /// String::from("goodbye!")]);
- /// ```
- #[stable(feature = "map_values_mut", since = "1.10.0")]
- pub fn values_mut(&mut self) -> ValuesMut<K, V> {
- ValuesMut { inner: self.iter_mut() }
- }
-
- /// Returns the number of elements in the map.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// assert_eq!(a.len(), 0);
- /// a.insert(1, "a");
- /// assert_eq!(a.len(), 1);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn len(&self) -> usize {
- self.length
- }
-
- /// Returns `true` if the map contains no elements.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut a = BTreeMap::new();
- /// assert!(a.is_empty());
- /// a.insert(1, "a");
- /// assert!(!a.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-}
-
-impl<'a, K: Ord, V> Entry<'a, K, V> {
- /// Ensures a value is in the entry by inserting the default if empty, and returns
- /// a mutable reference to the value in the entry.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// assert_eq!(map["poneyland"], 12);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn or_insert(self, default: V) -> &'a mut V {
- match self {
- Occupied(entry) => entry.into_mut(),
- Vacant(entry) => entry.insert(default),
- }
- }
-
- /// Ensures a value is in the entry by inserting the result of the default function if empty,
- /// and returns a mutable reference to the value in the entry.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
- /// let s = "hoho".to_string();
- ///
- /// map.entry("poneyland").or_insert_with(|| s);
- ///
- /// assert_eq!(map["poneyland"], "hoho".to_string());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
- match self {
- Occupied(entry) => entry.into_mut(),
- Vacant(entry) => entry.insert(default()),
- }
- }
-
- /// Returns a reference to this entry's key.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
- /// ```
- #[stable(feature = "map_entry_keys", since = "1.10.0")]
- pub fn key(&self) -> &K {
- match *self {
- Occupied(ref entry) => entry.key(),
- Vacant(ref entry) => entry.key(),
- }
- }
-
- /// Provides in-place mutable access to an occupied entry before any
- /// potential inserts into the map.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- ///
- /// map.entry("poneyland")
- /// .and_modify(|e| { *e += 1 })
- /// .or_insert(42);
- /// assert_eq!(map["poneyland"], 42);
- ///
- /// map.entry("poneyland")
- /// .and_modify(|e| { *e += 1 })
- /// .or_insert(42);
- /// assert_eq!(map["poneyland"], 43);
- /// ```
- #[stable(feature = "entry_and_modify", since = "1.26.0")]
- pub fn and_modify<F>(self, f: F) -> Self
- where F: FnOnce(&mut V)
- {
- match self {
- Occupied(mut entry) => {
- f(entry.get_mut());
- Occupied(entry)
- },
- Vacant(entry) => Vacant(entry),
- }
- }
-}
-
-impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
- #[stable(feature = "entry_or_default", since = "1.28.0")]
- /// Ensures a value is in the entry by inserting the default value if empty,
- /// and returns a mutable reference to the value in the entry.
- ///
- /// # Examples
- ///
- /// ```
- /// # fn main() {
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
- /// map.entry("poneyland").or_default();
- ///
- /// assert_eq!(map["poneyland"], None);
- /// # }
- /// ```
- pub fn or_default(self) -> &'a mut V {
- match self {
- Occupied(entry) => entry.into_mut(),
- Vacant(entry) => entry.insert(Default::default()),
- }
- }
-
-}
-
-impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
- /// Gets a reference to the key that would be used when inserting a value
- /// through the VacantEntry.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
- /// ```
- #[stable(feature = "map_entry_keys", since = "1.10.0")]
- pub fn key(&self) -> &K {
- &self.key
- }
-
- /// Take ownership of the key.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- ///
- /// if let Entry::Vacant(v) = map.entry("poneyland") {
- /// v.into_key();
- /// }
- /// ```
- #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
- pub fn into_key(self) -> K {
- self.key
- }
-
- /// Sets the value of the entry with the `VacantEntry`'s key,
- /// and returns a mutable reference to it.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
- ///
- /// // count the number of occurrences of letters in the vec
- /// for x in vec!["a","b","a","c","a","b"] {
- /// *count.entry(x).or_insert(0) += 1;
- /// }
- ///
- /// assert_eq!(count["a"], 3);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(self, value: V) -> &'a mut V {
- *self.length += 1;
-
- let out_ptr;
-
- let mut ins_k;
- let mut ins_v;
- let mut ins_edge;
-
- let mut cur_parent = match self.handle.insert(self.key, value) {
- (Fit(handle), _) => return handle.into_kv_mut().1,
- (Split(left, k, v, right), ptr) => {
- ins_k = k;
- ins_v = v;
- ins_edge = right;
- out_ptr = ptr;
- left.ascend().map_err(|n| n.into_root_mut())
- }
- };
-
- loop {
- match cur_parent {
- Ok(parent) => {
- match parent.insert(ins_k, ins_v, ins_edge) {
- Fit(_) => return unsafe { &mut *out_ptr },
- Split(left, k, v, right) => {
- ins_k = k;
- ins_v = v;
- ins_edge = right;
- cur_parent = left.ascend().map_err(|n| n.into_root_mut());
- }
- }
- }
- Err(root) => {
- root.push_level().push(ins_k, ins_v, ins_edge);
- return unsafe { &mut *out_ptr };
- }
- }
- }
- }
-}
-
-impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
- /// Gets a reference to the key in the entry.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
- /// ```
- #[stable(feature = "map_entry_keys", since = "1.10.0")]
- pub fn key(&self) -> &K {
- self.handle.reborrow().into_kv().0
- }
-
- /// Take ownership of the key and value from the map.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// if let Entry::Occupied(o) = map.entry("poneyland") {
- /// // We delete the entry from the map.
- /// o.remove_entry();
- /// }
- ///
- /// // If now try to get the value, it will panic:
- /// // println!("{}", map["poneyland"]);
- /// ```
- #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
- pub fn remove_entry(self) -> (K, V) {
- self.remove_kv()
- }
-
- /// Gets a reference to the value in the entry.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// if let Entry::Occupied(o) = map.entry("poneyland") {
- /// assert_eq!(o.get(), &12);
- /// }
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get(&self) -> &V {
- self.handle.reborrow().into_kv().1
- }
-
- /// Gets a mutable reference to the value in the entry.
- ///
- /// If you need a reference to the `OccupiedEntry` which may outlive the
- /// destruction of the `Entry` value, see [`into_mut`].
- ///
- /// [`into_mut`]: #method.into_mut
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// assert_eq!(map["poneyland"], 12);
- /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
- /// *o.get_mut() += 10;
- /// assert_eq!(*o.get(), 22);
- ///
- /// // We can use the same Entry multiple times.
- /// *o.get_mut() += 2;
- /// }
- /// assert_eq!(map["poneyland"], 24);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get_mut(&mut self) -> &mut V {
- self.handle.kv_mut().1
- }
-
- /// Converts the entry into a mutable reference to its value.
- ///
- /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
- ///
- /// [`get_mut`]: #method.get_mut
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// assert_eq!(map["poneyland"], 12);
- /// if let Entry::Occupied(o) = map.entry("poneyland") {
- /// *o.into_mut() += 10;
- /// }
- /// assert_eq!(map["poneyland"], 22);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn into_mut(self) -> &'a mut V {
- self.handle.into_kv_mut().1
- }
-
- /// Sets the value of the entry with the `OccupiedEntry`'s key,
- /// and returns the entry's old value.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
- /// assert_eq!(o.insert(15), 12);
- /// }
- /// assert_eq!(map["poneyland"], 15);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(&mut self, value: V) -> V {
- mem::replace(self.get_mut(), value)
- }
-
- /// Takes the value of the entry out of the map, and returns it.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- /// use std::collections::btree_map::Entry;
- ///
- /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
- /// map.entry("poneyland").or_insert(12);
- ///
- /// if let Entry::Occupied(o) = map.entry("poneyland") {
- /// assert_eq!(o.remove(), 12);
- /// }
- /// // If we try to get "poneyland"'s value, it'll panic:
- /// // println!("{}", map["poneyland"]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn remove(self) -> V {
- self.remove_kv().1
- }
-
- fn remove_kv(self) -> (K, V) {
- *self.length -= 1;
-
- let (small_leaf, old_key, old_val) = match self.handle.force() {
- Leaf(leaf) => {
- let (hole, old_key, old_val) = leaf.remove();
- (hole.into_node(), old_key, old_val)
- }
- Internal(mut internal) => {
- let key_loc = internal.kv_mut().0 as *mut K;
- let val_loc = internal.kv_mut().1 as *mut V;
-
- let to_remove = first_leaf_edge(internal.right_edge().descend()).right_kv().ok();
- let to_remove = unsafe { unwrap_unchecked(to_remove) };
-
- let (hole, key, val) = to_remove.remove();
-
- let old_key = unsafe { mem::replace(&mut *key_loc, key) };
- let old_val = unsafe { mem::replace(&mut *val_loc, val) };
-
- (hole.into_node(), old_key, old_val)
- }
- };
-
- // Handle underflow
- let mut cur_node = small_leaf.forget_type();
- while cur_node.len() < node::CAPACITY / 2 {
- match handle_underfull_node(cur_node) {
- AtRoot => break,
- EmptyParent(_) => unreachable!(),
- Merged(parent) => {
- if parent.len() == 0 {
- // We must be at the root
- parent.into_root_mut().pop_level();
- break;
- } else {
- cur_node = parent.forget_type();
- }
- }
- Stole(_) => break,
- }
- }
-
- (old_key, old_val)
- }
-}
-
-enum UnderflowResult<'a, K, V> {
- AtRoot,
- EmptyParent(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
- Merged(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
- Stole(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
-}
-
-fn handle_underfull_node<'a, K, V>(node: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>)
- -> UnderflowResult<'a, K, V> {
- let parent = if let Ok(parent) = node.ascend() {
- parent
- } else {
- return AtRoot;
- };
-
- let (is_left, mut handle) = match parent.left_kv() {
- Ok(left) => (true, left),
- Err(parent) => {
- match parent.right_kv() {
- Ok(right) => (false, right),
- Err(parent) => {
- return EmptyParent(parent.into_node());
- }
- }
- }
- };
-
- if handle.can_merge() {
- Merged(handle.merge().into_node())
- } else {
- if is_left {
- handle.steal_left();
- } else {
- handle.steal_right();
- }
- Stole(handle.into_node())
- }
-}
-
-impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
- type Item = (K, V);
-
- fn next(&mut self) -> Option<(K, V)> {
- let res = match (self.left.peek(), self.right.peek()) {
- (Some(&(ref left_key, _)), Some(&(ref right_key, _))) => left_key.cmp(right_key),
- (Some(_), None) => Ordering::Less,
- (None, Some(_)) => Ordering::Greater,
- (None, None) => return None,
- };
-
- // Check which elements comes first and only advance the corresponding iterator.
- // If two keys are equal, take the value from `right`.
- match res {
- Ordering::Less => self.left.next(),
- Ordering::Greater => self.right.next(),
- Ordering::Equal => {
- self.left.next();
- self.right.next()
- }
- }
- }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-mod node;
-mod search;
-pub mod map;
-pub mod set;
-
-#[doc(hidden)]
-trait Recover<Q: ?Sized> {
- type Key;
-
- fn get(&self, key: &Q) -> Option<&Self::Key>;
- fn take(&mut self, key: &Q) -> Option<Self::Key>;
- fn replace(&mut self, key: Self::Key) -> Option<Self::Key>;
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// This is an attempt at an implementation following the ideal
-//
-// ```
-// struct BTreeMap<K, V> {
-// height: usize,
-// root: Option<Box<Node<K, V, height>>>
-// }
-//
-// struct Node<K, V, height: usize> {
-// keys: [K; 2 * B - 1],
-// vals: [V; 2 * B - 1],
-// edges: if height > 0 {
-// [Box<Node<K, V, height - 1>>; 2 * B]
-// } else { () },
-// parent: *const Node<K, V, height + 1>,
-// parent_idx: u16,
-// len: u16,
-// }
-// ```
-//
-// Since Rust doesn't actually have dependent types and polymorphic recursion,
-// we make do with lots of unsafety.
-
-// A major goal of this module is to avoid complexity by treating the tree as a generic (if
-// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
-// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
-// even what underfull means. However, we do rely on a few invariants:
-//
-// - Trees must have uniform depth/height. This means that every path down to a leaf from a
-// given node has exactly the same length.
-// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
-// This implies that even an empty internal node has at least one edge.
-
-use core::marker::PhantomData;
-use core::mem;
-use core::ptr::{self, Unique, NonNull};
-use core::slice;
-
-use alloc::{Global, Alloc, Layout};
-use boxed::Box;
-
-const B: usize = 6;
-pub const MIN_LEN: usize = B - 1;
-pub const CAPACITY: usize = 2 * B - 1;
-
-/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
-/// these, since only the first `len` keys and values are assumed to be initialized. As such,
-/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
-/// case.
-///
-/// See also rust-lang/rfcs#197, which would make this structure significantly more safe by
-/// avoiding accidentally dropping unused and uninitialized keys and values.
-///
-/// We put the metadata first so that its position is the same for every `K` and `V`, in order
-/// to statically allocate a single dummy node to avoid allocations. This struct is `repr(C)` to
-/// prevent them from being reordered.
-#[repr(C)]
-struct LeafNode<K, V> {
- /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
- /// This either points to an actual node or is null.
- parent: *const InternalNode<K, V>,
-
- /// This node's index into the parent node's `edges` array.
- /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
- /// This is only guaranteed to be initialized when `parent` is nonnull.
- parent_idx: u16,
-
- /// The number of keys and values this node stores.
- ///
- /// This next to `parent_idx` to encourage the compiler to join `len` and
- /// `parent_idx` into the same 32-bit word, reducing space overhead.
- len: u16,
-
- /// The arrays storing the actual data of the node. Only the first `len` elements of each
- /// array are initialized and valid.
- keys: [K; CAPACITY],
- vals: [V; CAPACITY],
-}
-
-impl<K, V> LeafNode<K, V> {
- /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
- /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
- unsafe fn new() -> Self {
- LeafNode {
- // As a general policy, we leave fields uninitialized if they can be, as this should
- // be both slightly faster and easier to track in Valgrind.
- keys: mem::uninitialized(),
- vals: mem::uninitialized(),
- parent: ptr::null(),
- parent_idx: mem::uninitialized(),
- len: 0
- }
- }
-
- fn is_shared_root(&self) -> bool {
- self as *const _ == &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V>
- }
-}
-
-// We need to implement Sync here in order to make a static instance.
-unsafe impl Sync for LeafNode<(), ()> {}
-
-// An empty node used as a placeholder for the root node, to avoid allocations.
-// We use () in order to save space, since no operation on an empty tree will
-// ever take a pointer past the first key.
-static EMPTY_ROOT_NODE: LeafNode<(), ()> = LeafNode {
- parent: ptr::null(),
- parent_idx: 0,
- len: 0,
- keys: [(); CAPACITY],
- vals: [(); CAPACITY],
-};
-
-/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
-/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
-/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
-/// node, allowing code to act on leaf and internal nodes generically without having to even check
-/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
-#[repr(C)]
-struct InternalNode<K, V> {
- data: LeafNode<K, V>,
-
- /// The pointers to the children of this node. `len + 1` of these are considered
- /// initialized and valid.
- edges: [BoxedNode<K, V>; 2 * B],
-}
-
-impl<K, V> InternalNode<K, V> {
- /// Creates a new `InternalNode`.
- ///
- /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
- /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
- /// edges are initialized and valid, meaning that even when the node is empty (having a
- /// `len` of 0), there must be one initialized and valid edge. This function does not set up
- /// such an edge.
- unsafe fn new() -> Self {
- InternalNode {
- data: LeafNode::new(),
- edges: mem::uninitialized()
- }
- }
-}
-
-/// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
-/// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
-/// of nodes is actually behind the box, and, partially due to this lack of information, has no
-/// destructor.
-struct BoxedNode<K, V> {
- ptr: Unique<LeafNode<K, V>>
-}
-
-impl<K, V> BoxedNode<K, V> {
- fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
- BoxedNode { ptr: Box::into_unique(node) }
- }
-
- fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
- unsafe {
- BoxedNode { ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode<K, V>) }
- }
- }
-
- unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
- BoxedNode { ptr: Unique::from(ptr) }
- }
-
- fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
- NonNull::from(self.ptr)
- }
-}
-
-/// An owned tree. Note that despite being owned, this does not have a destructor,
-/// and must be cleaned up manually.
-pub struct Root<K, V> {
- node: BoxedNode<K, V>,
- height: usize
-}
-
-unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
-unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
-
-impl<K, V> Root<K, V> {
- pub fn is_shared_root(&self) -> bool {
- self.as_ref().is_shared_root()
- }
-
- pub fn shared_empty_root() -> Self {
- Root {
- node: unsafe {
- BoxedNode::from_ptr(NonNull::new_unchecked(
- &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _
- ))
- },
- height: 0,
- }
- }
-
- pub fn new_leaf() -> Self {
- Root {
- node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
- height: 0
- }
- }
-
- pub fn as_ref(&self)
- -> NodeRef<marker::Immut, K, V, marker::LeafOrInternal> {
- NodeRef {
- height: self.height,
- node: self.node.as_ptr(),
- root: self as *const _ as *mut _,
- _marker: PhantomData,
- }
- }
-
- pub fn as_mut(&mut self)
- -> NodeRef<marker::Mut, K, V, marker::LeafOrInternal> {
- NodeRef {
- height: self.height,
- node: self.node.as_ptr(),
- root: self as *mut _,
- _marker: PhantomData,
- }
- }
-
- pub fn into_ref(self)
- -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
- NodeRef {
- height: self.height,
- node: self.node.as_ptr(),
- root: ptr::null_mut(), // FIXME: Is there anything better to do here?
- _marker: PhantomData,
- }
- }
-
- /// Adds a new internal node with a single edge, pointing to the previous root, and make that
- /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
- pub fn push_level(&mut self)
- -> NodeRef<marker::Mut, K, V, marker::Internal> {
- debug_assert!(!self.is_shared_root());
- let mut new_node = Box::new(unsafe { InternalNode::new() });
- new_node.edges[0] = unsafe { BoxedNode::from_ptr(self.node.as_ptr()) };
-
- self.node = BoxedNode::from_internal(new_node);
- self.height += 1;
-
- let mut ret = NodeRef {
- height: self.height,
- node: self.node.as_ptr(),
- root: self as *mut _,
- _marker: PhantomData
- };
-
- unsafe {
- ret.reborrow_mut().first_edge().correct_parent_link();
- }
-
- ret
- }
-
- /// Removes the root node, using its first child as the new root. This cannot be called when
- /// the tree consists only of a leaf node. As it is intended only to be called when the root
- /// has only one edge, no cleanup is done on any of the other children are elements of the root.
- /// This decreases the height by 1 and is the opposite of `push_level`.
- pub fn pop_level(&mut self) {
- debug_assert!(self.height > 0);
-
- let top = self.node.ptr;
-
- self.node = unsafe {
- BoxedNode::from_ptr(self.as_mut()
- .cast_unchecked::<marker::Internal>()
- .first_edge()
- .descend()
- .node)
- };
- self.height -= 1;
- self.as_mut().as_leaf_mut().parent = ptr::null();
-
- unsafe {
- Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
- }
- }
-}
-
-// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
-// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
-// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
-// However, whenever a public type wraps `NodeRef`, make sure that it has the
-// correct variance.
-/// A reference to a node.
-///
-/// This type has a number of parameters that controls how it acts:
-/// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
-/// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
-/// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
-/// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
-/// - `K` and `V`: These control what types of things are stored in the nodes.
-/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
-/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
-/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
-/// `NodeRef` could be pointing to either type of node.
-pub struct NodeRef<BorrowType, K, V, Type> {
- height: usize,
- node: NonNull<LeafNode<K, V>>,
- // This is null unless the borrow type is `Mut`
- root: *const Root<K, V>,
- _marker: PhantomData<(BorrowType, Type)>
-}
-
-impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
-impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
- fn clone(&self) -> Self {
- *self
- }
-}
-
-unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
- for NodeRef<BorrowType, K, V, Type> { }
-
-unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
- for NodeRef<marker::Immut<'a>, K, V, Type> { }
-unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
- for NodeRef<marker::Mut<'a>, K, V, Type> { }
-unsafe impl<K: Send, V: Send, Type> Send
- for NodeRef<marker::Owned, K, V, Type> { }
-
-impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
- fn as_internal(&self) -> &InternalNode<K, V> {
- unsafe {
- &*(self.node.as_ptr() as *mut InternalNode<K, V>)
- }
- }
-}
-
-impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
- fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
- unsafe {
- &mut *(self.node.as_ptr() as *mut InternalNode<K, V>)
- }
- }
-}
-
-
-impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
- /// Finds the length of the node. This is the number of keys or values. In an
- /// internal node, the number of edges is `len() + 1`.
- pub fn len(&self) -> usize {
- self.as_leaf().len as usize
- }
-
- /// Returns the height of this node in the whole tree. Zero height denotes the
- /// leaf level.
- pub fn height(&self) -> usize {
- self.height
- }
-
- /// Removes any static information about whether this node is a `Leaf` or an
- /// `Internal` node.
- pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
- NodeRef {
- height: self.height,
- node: self.node,
- root: self.root,
- _marker: PhantomData
- }
- }
-
- /// Temporarily takes out another, immutable reference to the same node.
- fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
- NodeRef {
- height: self.height,
- node: self.node,
- root: self.root,
- _marker: PhantomData
- }
- }
-
- fn as_leaf(&self) -> &LeafNode<K, V> {
- unsafe {
- self.node.as_ref()
- }
- }
-
- pub fn is_shared_root(&self) -> bool {
- self.as_leaf().is_shared_root()
- }
-
- pub fn keys(&self) -> &[K] {
- self.reborrow().into_key_slice()
- }
-
- fn vals(&self) -> &[V] {
- self.reborrow().into_val_slice()
- }
-
- /// Finds the parent of the current node. Returns `Ok(handle)` if the current
- /// node actually has a parent, where `handle` points to the edge of the parent
- /// that points to the current node. Returns `Err(self)` if the current node has
- /// no parent, giving back the original `NodeRef`.
- ///
- /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
- /// both, upon success, do nothing.
- pub fn ascend(self) -> Result<
- Handle<
- NodeRef<
- BorrowType,
- K, V,
- marker::Internal
- >,
- marker::Edge
- >,
- Self
- > {
- let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
- if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
- Ok(Handle {
- node: NodeRef {
- height: self.height + 1,
- node: non_zero,
- root: self.root,
- _marker: PhantomData
- },
- idx: self.as_leaf().parent_idx as usize,
- _marker: PhantomData
- })
- } else {
- Err(self)
- }
- }
-
- pub fn first_edge(self) -> Handle<Self, marker::Edge> {
- Handle::new_edge(self, 0)
- }
-
- pub fn last_edge(self) -> Handle<Self, marker::Edge> {
- let len = self.len();
- Handle::new_edge(self, len)
- }
-
- /// Note that `self` must be nonempty.
- pub fn first_kv(self) -> Handle<Self, marker::KV> {
- debug_assert!(self.len() > 0);
- Handle::new_kv(self, 0)
- }
-
- /// Note that `self` must be nonempty.
- pub fn last_kv(self) -> Handle<Self, marker::KV> {
- let len = self.len();
- debug_assert!(len > 0);
- Handle::new_kv(self, len - 1)
- }
-}
-
-impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
- /// Similar to `ascend`, gets a reference to a node's parent node, but also
- /// deallocate the current node in the process. This is unsafe because the
- /// current node will still be accessible despite being deallocated.
- pub unsafe fn deallocate_and_ascend(self) -> Option<
- Handle<
- NodeRef<
- marker::Owned,
- K, V,
- marker::Internal
- >,
- marker::Edge
- >
- > {
- debug_assert!(!self.is_shared_root());
- let node = self.node;
- let ret = self.ascend().ok();
- Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
- ret
- }
-}
-
-impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
- /// Similar to `ascend`, gets a reference to a node's parent node, but also
- /// deallocate the current node in the process. This is unsafe because the
- /// current node will still be accessible despite being deallocated.
- pub unsafe fn deallocate_and_ascend(self) -> Option<
- Handle<
- NodeRef<
- marker::Owned,
- K, V,
- marker::Internal
- >,
- marker::Edge
- >
- > {
- let node = self.node;
- let ret = self.ascend().ok();
- Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
- ret
- }
-}
-
-impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
- /// Unsafely asserts to the compiler some static information about whether this
- /// node is a `Leaf`.
- unsafe fn cast_unchecked<NewType>(&mut self)
- -> NodeRef<marker::Mut, K, V, NewType> {
-
- NodeRef {
- height: self.height,
- node: self.node,
- root: self.root,
- _marker: PhantomData
- }
- }
-
- /// Temporarily takes out another, mutable reference to the same node. Beware, as
- /// this method is very dangerous, doubly so since it may not immediately appear
- /// dangerous.
- ///
- /// Because mutable pointers can roam anywhere around the tree and can even (through
- /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
- /// can easily be used to make the original mutable pointer dangling, or, in the case
- /// of a reborrowed handle, out of bounds.
- // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
- // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
- unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut, K, V, Type> {
- NodeRef {
- height: self.height,
- node: self.node,
- root: self.root,
- _marker: PhantomData
- }
- }
-
- fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
- unsafe {
- self.node.as_mut()
- }
- }
-
- fn keys_mut(&mut self) -> &mut [K] {
- unsafe { self.reborrow_mut().into_key_slice_mut() }
- }
-
- fn vals_mut(&mut self) -> &mut [V] {
- unsafe { self.reborrow_mut().into_val_slice_mut() }
- }
-}
-
-impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
- fn into_key_slice(self) -> &'a [K] {
- // When taking a pointer to the keys, if our key has a stricter
- // alignment requirement than the shared root does, then the pointer
- // would be out of bounds, which LLVM assumes will not happen. If the
- // alignment is more strict, we need to make an empty slice that doesn't
- // use an out of bounds pointer.
- if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
- &[]
- } else {
- // Here either it's not the root, or the alignment is less strict,
- // in which case the keys pointer will point "one-past-the-end" of
- // the node, which is allowed by LLVM.
- unsafe {
- slice::from_raw_parts(
- self.as_leaf().keys.as_ptr(),
- self.len()
- )
- }
- }
- }
-
- fn into_val_slice(self) -> &'a [V] {
- debug_assert!(!self.is_shared_root());
- unsafe {
- slice::from_raw_parts(
- self.as_leaf().vals.as_ptr(),
- self.len()
- )
- }
- }
-
- fn into_slices(self) -> (&'a [K], &'a [V]) {
- let k = unsafe { ptr::read(&self) };
- (k.into_key_slice(), self.into_val_slice())
- }
-}
-
-impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
- /// Gets a mutable reference to the root itself. This is useful primarily when the
- /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
- pub fn into_root_mut(self) -> &'a mut Root<K, V> {
- unsafe {
- &mut *(self.root as *mut Root<K, V>)
- }
- }
-
- fn into_key_slice_mut(mut self) -> &'a mut [K] {
- if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
- &mut []
- } else {
- unsafe {
- slice::from_raw_parts_mut(
- &mut self.as_leaf_mut().keys as *mut [K] as *mut K,
- self.len()
- )
- }
- }
- }
-
- fn into_val_slice_mut(mut self) -> &'a mut [V] {
- debug_assert!(!self.is_shared_root());
- unsafe {
- slice::from_raw_parts_mut(
- &mut self.as_leaf_mut().vals as *mut [V] as *mut V,
- self.len()
- )
- }
- }
-
- fn into_slices_mut(self) -> (&'a mut [K], &'a mut [V]) {
- let k = unsafe { ptr::read(&self) };
- (k.into_key_slice_mut(), self.into_val_slice_mut())
- }
-}
-
-impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
- /// Adds a key/value pair the end of the node.
- pub fn push(&mut self, key: K, val: V) {
- // Necessary for correctness, but this is an internal module
- debug_assert!(self.len() < CAPACITY);
- debug_assert!(!self.is_shared_root());
-
- let idx = self.len();
-
- unsafe {
- ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
- ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
- }
-
- self.as_leaf_mut().len += 1;
- }
-
- /// Adds a key/value pair to the beginning of the node.
- pub fn push_front(&mut self, key: K, val: V) {
- // Necessary for correctness, but this is an internal module
- debug_assert!(self.len() < CAPACITY);
- debug_assert!(!self.is_shared_root());
-
- unsafe {
- slice_insert(self.keys_mut(), 0, key);
- slice_insert(self.vals_mut(), 0, val);
- }
-
- self.as_leaf_mut().len += 1;
- }
-}
-
-impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
- /// Adds a key/value pair and an edge to go to the right of that pair to
- /// the end of the node.
- pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
- // Necessary for correctness, but this is an internal module
- debug_assert!(edge.height == self.height - 1);
- debug_assert!(self.len() < CAPACITY);
-
- let idx = self.len();
-
- unsafe {
- ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
- ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
- ptr::write(self.as_internal_mut().edges.get_unchecked_mut(idx + 1), edge.node);
-
- self.as_leaf_mut().len += 1;
-
- Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
- }
- }
-
- fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
- for i in first..after_last {
- Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
- }
- }
-
- fn correct_all_childrens_parent_links(&mut self) {
- let len = self.len();
- self.correct_childrens_parent_links(0, len + 1);
- }
-
- /// Adds a key/value pair and an edge to go to the left of that pair to
- /// the beginning of the node.
- pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
- // Necessary for correctness, but this is an internal module
- debug_assert!(edge.height == self.height - 1);
- debug_assert!(self.len() < CAPACITY);
-
- unsafe {
- slice_insert(self.keys_mut(), 0, key);
- slice_insert(self.vals_mut(), 0, val);
- slice_insert(
- slice::from_raw_parts_mut(
- self.as_internal_mut().edges.as_mut_ptr(),
- self.len()+1
- ),
- 0,
- edge.node
- );
-
- self.as_leaf_mut().len += 1;
-
- self.correct_all_childrens_parent_links();
- }
- }
-}
-
-impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
- /// Removes a key/value pair from the end of this node. If this is an internal node,
- /// also removes the edge that was to the right of that pair.
- pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
- // Necessary for correctness, but this is an internal module
- debug_assert!(self.len() > 0);
-
- let idx = self.len() - 1;
-
- unsafe {
- let key = ptr::read(self.keys().get_unchecked(idx));
- let val = ptr::read(self.vals().get_unchecked(idx));
- let edge = match self.reborrow_mut().force() {
- ForceResult::Leaf(_) => None,
- ForceResult::Internal(internal) => {
- let edge = ptr::read(internal.as_internal().edges.get_unchecked(idx + 1));
- let mut new_root = Root { node: edge, height: internal.height - 1 };
- new_root.as_mut().as_leaf_mut().parent = ptr::null();
- Some(new_root)
- }
- };
-
- self.as_leaf_mut().len -= 1;
- (key, val, edge)
- }
- }
-
- /// Removes a key/value pair from the beginning of this node. If this is an internal node,
- /// also removes the edge that was to the left of that pair.
- pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
- // Necessary for correctness, but this is an internal module
- debug_assert!(self.len() > 0);
-
- let old_len = self.len();
-
- unsafe {
- let key = slice_remove(self.keys_mut(), 0);
- let val = slice_remove(self.vals_mut(), 0);
- let edge = match self.reborrow_mut().force() {
- ForceResult::Leaf(_) => None,
- ForceResult::Internal(mut internal) => {
- let edge = slice_remove(
- slice::from_raw_parts_mut(
- internal.as_internal_mut().edges.as_mut_ptr(),
- old_len+1
- ),
- 0
- );
-
- let mut new_root = Root { node: edge, height: internal.height - 1 };
- new_root.as_mut().as_leaf_mut().parent = ptr::null();
-
- for i in 0..old_len {
- Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
- }
-
- Some(new_root)
- }
- };
-
- self.as_leaf_mut().len -= 1;
-
- (key, val, edge)
- }
- }
-
- fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
- (
- self.keys_mut().as_mut_ptr(),
- self.vals_mut().as_mut_ptr()
- )
- }
-}
-
-impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
- /// Checks whether a node is an `Internal` node or a `Leaf` node.
- pub fn force(self) -> ForceResult<
- NodeRef<BorrowType, K, V, marker::Leaf>,
- NodeRef<BorrowType, K, V, marker::Internal>
- > {
- if self.height == 0 {
- ForceResult::Leaf(NodeRef {
- height: self.height,
- node: self.node,
- root: self.root,
- _marker: PhantomData
- })
- } else {
- ForceResult::Internal(NodeRef {
- height: self.height,
- node: self.node,
- root: self.root,
- _marker: PhantomData
- })
- }
- }
-}
-
-/// A reference to a specific key/value pair or edge within a node. The `Node` parameter
-/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
-/// pair) or `Edge` (signifying a handle on an edge).
-///
-/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
-/// a child node, these represent the spaces where child pointers would go between the key/value
-/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
-/// to the left of the node, one between the two pairs, and one at the right of the node.
-pub struct Handle<Node, Type> {
- node: Node,
- idx: usize,
- _marker: PhantomData<Type>
-}
-
-impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
-// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
-// `Clone`able is when it is an immutable reference and therefore `Copy`.
-impl<Node: Copy, Type> Clone for Handle<Node, Type> {
- fn clone(&self) -> Self {
- *self
- }
-}
-
-impl<Node, Type> Handle<Node, Type> {
- /// Retrieves the node that contains the edge of key/value pair this handle points to.
- pub fn into_node(self) -> Node {
- self.node
- }
-}
-
-impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
- /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
- pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
- // Necessary for correctness, but in a private module
- debug_assert!(idx < node.len());
-
- Handle {
- node,
- idx,
- _marker: PhantomData
- }
- }
-
- pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
- Handle::new_edge(self.node, self.idx)
- }
-
- pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
- Handle::new_edge(self.node, self.idx + 1)
- }
-}
-
-impl<BorrowType, K, V, NodeType, HandleType> PartialEq
- for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
-
- fn eq(&self, other: &Self) -> bool {
- self.node.node == other.node.node && self.idx == other.idx
- }
-}
-
-impl<BorrowType, K, V, NodeType, HandleType>
- Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
-
- /// Temporarily takes out another, immutable handle on the same location.
- pub fn reborrow(&self)
- -> Handle<NodeRef<marker::Immut, K, V, NodeType>, HandleType> {
-
- // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
- Handle {
- node: self.node.reborrow(),
- idx: self.idx,
- _marker: PhantomData
- }
- }
-}
-
-impl<'a, K, V, NodeType, HandleType>
- Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
-
- /// Temporarily takes out another, mutable handle on the same location. Beware, as
- /// this method is very dangerous, doubly so since it may not immediately appear
- /// dangerous.
- ///
- /// Because mutable pointers can roam anywhere around the tree and can even (through
- /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
- /// can easily be used to make the original mutable pointer dangling, or, in the case
- /// of a reborrowed handle, out of bounds.
- // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
- // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
- pub unsafe fn reborrow_mut(&mut self)
- -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
-
- // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
- Handle {
- node: self.node.reborrow_mut(),
- idx: self.idx,
- _marker: PhantomData
- }
- }
-}
-
-impl<BorrowType, K, V, NodeType>
- Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
-
- /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
- /// `node.len()`.
- pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
- // Necessary for correctness, but in a private module
- debug_assert!(idx <= node.len());
-
- Handle {
- node,
- idx,
- _marker: PhantomData
- }
- }
-
- pub fn left_kv(self)
- -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
-
- if self.idx > 0 {
- Ok(Handle::new_kv(self.node, self.idx - 1))
- } else {
- Err(self)
- }
- }
-
- pub fn right_kv(self)
- -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
-
- if self.idx < self.node.len() {
- Ok(Handle::new_kv(self.node, self.idx))
- } else {
- Err(self)
- }
- }
-}
-
-impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
- /// Inserts a new key/value pair between the key/value pairs to the right and left of
- /// this edge. This method assumes that there is enough space in the node for the new
- /// pair to fit.
- ///
- /// The returned pointer points to the inserted value.
- fn insert_fit(&mut self, key: K, val: V) -> *mut V {
- // Necessary for correctness, but in a private module
- debug_assert!(self.node.len() < CAPACITY);
- debug_assert!(!self.node.is_shared_root());
-
- unsafe {
- slice_insert(self.node.keys_mut(), self.idx, key);
- slice_insert(self.node.vals_mut(), self.idx, val);
-
- self.node.as_leaf_mut().len += 1;
-
- self.node.vals_mut().get_unchecked_mut(self.idx)
- }
- }
-
- /// Inserts a new key/value pair between the key/value pairs to the right and left of
- /// this edge. This method splits the node if there isn't enough room.
- ///
- /// The returned pointer points to the inserted value.
- pub fn insert(mut self, key: K, val: V)
- -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
-
- if self.node.len() < CAPACITY {
- let ptr = self.insert_fit(key, val);
- (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
- } else {
- let middle = Handle::new_kv(self.node, B);
- let (mut left, k, v, mut right) = middle.split();
- let ptr = if self.idx <= B {
- unsafe {
- Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
- }
- } else {
- unsafe {
- Handle::new_edge(
- right.as_mut().cast_unchecked::<marker::Leaf>(),
- self.idx - (B + 1)
- ).insert_fit(key, val)
- }
- };
- (InsertResult::Split(left, k, v, right), ptr)
- }
- }
-}
-
-impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
- /// Fixes the parent pointer and index in the child node below this edge. This is useful
- /// when the ordering of edges has been changed, such as in the various `insert` methods.
- fn correct_parent_link(mut self) {
- let idx = self.idx as u16;
- let ptr = self.node.as_internal_mut() as *mut _;
- let mut child = self.descend();
- child.as_leaf_mut().parent = ptr;
- child.as_leaf_mut().parent_idx = idx;
- }
-
- /// Unsafely asserts to the compiler some static information about whether the underlying
- /// node of this handle is a `Leaf`.
- unsafe fn cast_unchecked<NewType>(&mut self)
- -> Handle<NodeRef<marker::Mut, K, V, NewType>, marker::Edge> {
-
- Handle::new_edge(self.node.cast_unchecked(), self.idx)
- }
-
- /// Inserts a new key/value pair and an edge that will go to the right of that new pair
- /// between this edge and the key/value pair to the right of this edge. This method assumes
- /// that there is enough space in the node for the new pair to fit.
- fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
- // Necessary for correctness, but in an internal module
- debug_assert!(self.node.len() < CAPACITY);
- debug_assert!(edge.height == self.node.height - 1);
-
- unsafe {
- // This cast is a lie, but it allows us to reuse the key/value insertion logic.
- self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
-
- slice_insert(
- slice::from_raw_parts_mut(
- self.node.as_internal_mut().edges.as_mut_ptr(),
- self.node.len()
- ),
- self.idx + 1,
- edge.node
- );
-
- for i in (self.idx+1)..(self.node.len()+1) {
- Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
- }
- }
- }
-
- /// Inserts a new key/value pair and an edge that will go to the right of that new pair
- /// between this edge and the key/value pair to the right of this edge. This method splits
- /// the node if there isn't enough room.
- pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
- -> InsertResult<'a, K, V, marker::Internal> {
-
- // Necessary for correctness, but this is an internal module
- debug_assert!(edge.height == self.node.height - 1);
-
- if self.node.len() < CAPACITY {
- self.insert_fit(key, val, edge);
- InsertResult::Fit(Handle::new_kv(self.node, self.idx))
- } else {
- let middle = Handle::new_kv(self.node, B);
- let (mut left, k, v, mut right) = middle.split();
- if self.idx <= B {
- unsafe {
- Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
- }
- } else {
- unsafe {
- Handle::new_edge(
- right.as_mut().cast_unchecked::<marker::Internal>(),
- self.idx - (B + 1)
- ).insert_fit(key, val, edge);
- }
- }
- InsertResult::Split(left, k, v, right)
- }
- }
-}
-
-impl<BorrowType, K, V>
- Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
-
- /// Finds the node pointed to by this edge.
- ///
- /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
- /// both, upon success, do nothing.
- pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
- NodeRef {
- height: self.node.height - 1,
- node: unsafe { self.node.as_internal().edges.get_unchecked(self.idx).as_ptr() },
- root: self.node.root,
- _marker: PhantomData
- }
- }
-}
-
-impl<'a, K: 'a, V: 'a, NodeType>
- Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
-
- pub fn into_kv(self) -> (&'a K, &'a V) {
- let (keys, vals) = self.node.into_slices();
- unsafe {
- (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
- }
- }
-}
-
-impl<'a, K: 'a, V: 'a, NodeType>
- Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
-
- pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
- let (keys, vals) = self.node.into_slices_mut();
- unsafe {
- (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
- }
- }
-}
-
-impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
- pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
- unsafe {
- let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
- (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
- }
- }
-}
-
-impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
- /// Splits the underlying node into three parts:
- ///
- /// - The node is truncated to only contain the key/value pairs to the right of
- /// this handle.
- /// - The key and value pointed to by this handle and extracted.
- /// - All the key/value pairs to the right of this handle are put into a newly
- /// allocated node.
- pub fn split(mut self)
- -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
- debug_assert!(!self.node.is_shared_root());
- unsafe {
- let mut new_node = Box::new(LeafNode::new());
-
- let k = ptr::read(self.node.keys().get_unchecked(self.idx));
- let v = ptr::read(self.node.vals().get_unchecked(self.idx));
-
- let new_len = self.node.len() - self.idx - 1;
-
- ptr::copy_nonoverlapping(
- self.node.keys().as_ptr().offset(self.idx as isize + 1),
- new_node.keys.as_mut_ptr(),
- new_len
- );
- ptr::copy_nonoverlapping(
- self.node.vals().as_ptr().offset(self.idx as isize + 1),
- new_node.vals.as_mut_ptr(),
- new_len
- );
-
- self.node.as_leaf_mut().len = self.idx as u16;
- new_node.len = new_len as u16;
-
- (
- self.node,
- k, v,
- Root {
- node: BoxedNode::from_leaf(new_node),
- height: 0
- }
- )
- }
- }
-
- /// Removes the key/value pair pointed to by this handle, returning the edge between the
- /// now adjacent key/value pairs to the left and right of this handle.
- pub fn remove(mut self)
- -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
- debug_assert!(!self.node.is_shared_root());
- unsafe {
- let k = slice_remove(self.node.keys_mut(), self.idx);
- let v = slice_remove(self.node.vals_mut(), self.idx);
- self.node.as_leaf_mut().len -= 1;
- (self.left_edge(), k, v)
- }
- }
-}
-
-impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
- /// Splits the underlying node into three parts:
- ///
- /// - The node is truncated to only contain the edges and key/value pairs to the
- /// right of this handle.
- /// - The key and value pointed to by this handle and extracted.
- /// - All the edges and key/value pairs to the right of this handle are put into
- /// a newly allocated node.
- pub fn split(mut self)
- -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
- unsafe {
- let mut new_node = Box::new(InternalNode::new());
-
- let k = ptr::read(self.node.keys().get_unchecked(self.idx));
- let v = ptr::read(self.node.vals().get_unchecked(self.idx));
-
- let height = self.node.height;
- let new_len = self.node.len() - self.idx - 1;
-
- ptr::copy_nonoverlapping(
- self.node.keys().as_ptr().offset(self.idx as isize + 1),
- new_node.data.keys.as_mut_ptr(),
- new_len
- );
- ptr::copy_nonoverlapping(
- self.node.vals().as_ptr().offset(self.idx as isize + 1),
- new_node.data.vals.as_mut_ptr(),
- new_len
- );
- ptr::copy_nonoverlapping(
- self.node.as_internal().edges.as_ptr().offset(self.idx as isize + 1),
- new_node.edges.as_mut_ptr(),
- new_len + 1
- );
-
- self.node.as_leaf_mut().len = self.idx as u16;
- new_node.data.len = new_len as u16;
-
- let mut new_root = Root {
- node: BoxedNode::from_internal(new_node),
- height,
- };
-
- for i in 0..(new_len+1) {
- Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
- }
-
- (
- self.node,
- k, v,
- new_root
- )
- }
- }
-
- /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in
- /// a node to hold the combination of the nodes to the left and right of this handle along
- /// with the key/value pair at this handle.
- pub fn can_merge(&self) -> bool {
- (
- self.reborrow()
- .left_edge()
- .descend()
- .len()
- + self.reborrow()
- .right_edge()
- .descend()
- .len()
- + 1
- ) <= CAPACITY
- }
-
- /// Combines the node immediately to the left of this handle, the key/value pair pointed
- /// to by this handle, and the node immediately to the right of this handle into one new
- /// child of the underlying node, returning an edge referencing that new child.
- ///
- /// Assumes that this edge `.can_merge()`.
- pub fn merge(mut self)
- -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
- let self1 = unsafe { ptr::read(&self) };
- let self2 = unsafe { ptr::read(&self) };
- let mut left_node = self1.left_edge().descend();
- let left_len = left_node.len();
- let mut right_node = self2.right_edge().descend();
- let right_len = right_node.len();
-
- // necessary for correctness, but in a private module
- debug_assert!(left_len + right_len + 1 <= CAPACITY);
-
- unsafe {
- ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
- slice_remove(self.node.keys_mut(), self.idx));
- ptr::copy_nonoverlapping(
- right_node.keys().as_ptr(),
- left_node.keys_mut().as_mut_ptr().offset(left_len as isize + 1),
- right_len
- );
- ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
- slice_remove(self.node.vals_mut(), self.idx));
- ptr::copy_nonoverlapping(
- right_node.vals().as_ptr(),
- left_node.vals_mut().as_mut_ptr().offset(left_len as isize + 1),
- right_len
- );
-
- slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
- for i in self.idx+1..self.node.len() {
- Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
- }
- self.node.as_leaf_mut().len -= 1;
-
- left_node.as_leaf_mut().len += right_len as u16 + 1;
-
- if self.node.height > 1 {
- ptr::copy_nonoverlapping(
- right_node.cast_unchecked().as_internal().edges.as_ptr(),
- left_node.cast_unchecked()
- .as_internal_mut()
- .edges
- .as_mut_ptr()
- .offset(left_len as isize + 1),
- right_len + 1
- );
-
- for i in left_len+1..left_len+right_len+2 {
- Handle::new_edge(
- left_node.cast_unchecked().reborrow_mut(),
- i
- ).correct_parent_link();
- }
-
- Global.dealloc(
- right_node.node.cast(),
- Layout::new::<InternalNode<K, V>>(),
- );
- } else {
- Global.dealloc(
- right_node.node.cast(),
- Layout::new::<LeafNode<K, V>>(),
- );
- }
-
- Handle::new_edge(self.node, self.idx)
- }
- }
-
- /// This removes a key/value pair from the left child and replaces it with the key/value pair
- /// pointed to by this handle while pushing the old key/value pair of this handle into the right
- /// child.
- pub fn steal_left(&mut self) {
- unsafe {
- let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
-
- let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
- let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
-
- match self.reborrow_mut().right_edge().descend().force() {
- ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
- ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
- }
- }
- }
-
- /// This removes a key/value pair from the right child and replaces it with the key/value pair
- /// pointed to by this handle while pushing the old key/value pair of this handle into the left
- /// child.
- pub fn steal_right(&mut self) {
- unsafe {
- let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
-
- let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
- let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
-
- match self.reborrow_mut().left_edge().descend().force() {
- ForceResult::Leaf(mut leaf) => leaf.push(k, v),
- ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap())
- }
- }
- }
-
- /// This does stealing similar to `steal_left` but steals multiple elements at once.
- pub fn bulk_steal_left(&mut self, count: usize) {
- unsafe {
- let mut left_node = ptr::read(self).left_edge().descend();
- let left_len = left_node.len();
- let mut right_node = ptr::read(self).right_edge().descend();
- let right_len = right_node.len();
-
- // Make sure that we may steal safely.
- debug_assert!(right_len + count <= CAPACITY);
- debug_assert!(left_len >= count);
-
- let new_left_len = left_len - count;
-
- // Move data.
- {
- let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
- let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
- let parent_kv = {
- let kv = self.reborrow_mut().into_kv_mut();
- (kv.0 as *mut K, kv.1 as *mut V)
- };
-
- // Make room for stolen elements in the right child.
- ptr::copy(right_kv.0,
- right_kv.0.offset(count as isize),
- right_len);
- ptr::copy(right_kv.1,
- right_kv.1.offset(count as isize),
- right_len);
-
- // Move elements from the left child to the right one.
- move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
-
- // Move parent's key/value pair to the right child.
- move_kv(parent_kv, 0, right_kv, count - 1, 1);
-
- // Move the left-most stolen pair to the parent.
- move_kv(left_kv, new_left_len, parent_kv, 0, 1);
- }
-
- left_node.reborrow_mut().as_leaf_mut().len -= count as u16;
- right_node.reborrow_mut().as_leaf_mut().len += count as u16;
-
- match (left_node.force(), right_node.force()) {
- (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
- // Make room for stolen edges.
- let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
- ptr::copy(right_edges,
- right_edges.offset(count as isize),
- right_len + 1);
- right.correct_childrens_parent_links(count, count + right_len + 1);
-
- move_edges(left, new_left_len + 1, right, 0, count);
- },
- (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
- _ => { unreachable!(); }
- }
- }
- }
-
- /// The symmetric clone of `bulk_steal_left`.
- pub fn bulk_steal_right(&mut self, count: usize) {
- unsafe {
- let mut left_node = ptr::read(self).left_edge().descend();
- let left_len = left_node.len();
- let mut right_node = ptr::read(self).right_edge().descend();
- let right_len = right_node.len();
-
- // Make sure that we may steal safely.
- debug_assert!(left_len + count <= CAPACITY);
- debug_assert!(right_len >= count);
-
- let new_right_len = right_len - count;
-
- // Move data.
- {
- let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
- let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
- let parent_kv = {
- let kv = self.reborrow_mut().into_kv_mut();
- (kv.0 as *mut K, kv.1 as *mut V)
- };
-
- // Move parent's key/value pair to the left child.
- move_kv(parent_kv, 0, left_kv, left_len, 1);
-
- // Move elements from the right child to the left one.
- move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
-
- // Move the right-most stolen pair to the parent.
- move_kv(right_kv, count - 1, parent_kv, 0, 1);
-
- // Fix right indexing
- ptr::copy(right_kv.0.offset(count as isize),
- right_kv.0,
- new_right_len);
- ptr::copy(right_kv.1.offset(count as isize),
- right_kv.1,
- new_right_len);
- }
-
- left_node.reborrow_mut().as_leaf_mut().len += count as u16;
- right_node.reborrow_mut().as_leaf_mut().len -= count as u16;
-
- match (left_node.force(), right_node.force()) {
- (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
- move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
-
- // Fix right indexing.
- let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
- ptr::copy(right_edges.offset(count as isize),
- right_edges,
- new_right_len + 1);
- right.correct_childrens_parent_links(0, new_right_len + 1);
- },
- (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
- _ => { unreachable!(); }
- }
- }
- }
-}
-
-unsafe fn move_kv<K, V>(
- source: (*mut K, *mut V), source_offset: usize,
- dest: (*mut K, *mut V), dest_offset: usize,
- count: usize)
-{
- ptr::copy_nonoverlapping(source.0.offset(source_offset as isize),
- dest.0.offset(dest_offset as isize),
- count);
- ptr::copy_nonoverlapping(source.1.offset(source_offset as isize),
- dest.1.offset(dest_offset as isize),
- count);
-}
-
-// Source and destination must have the same height.
-unsafe fn move_edges<K, V>(
- mut source: NodeRef<marker::Mut, K, V, marker::Internal>, source_offset: usize,
- mut dest: NodeRef<marker::Mut, K, V, marker::Internal>, dest_offset: usize,
- count: usize)
-{
- let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
- let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
- ptr::copy_nonoverlapping(source_ptr.offset(source_offset as isize),
- dest_ptr.offset(dest_offset as isize),
- count);
- dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
-}
-
-impl<BorrowType, K, V, HandleType>
- Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
-
- /// Check whether the underlying node is an `Internal` node or a `Leaf` node.
- pub fn force(self) -> ForceResult<
- Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
- Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
- > {
- match self.node.force() {
- ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
- node,
- idx: self.idx,
- _marker: PhantomData
- }),
- ForceResult::Internal(node) => ForceResult::Internal(Handle {
- node,
- idx: self.idx,
- _marker: PhantomData
- })
- }
- }
-}
-
-impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
- /// Move the suffix after `self` from one node to another one. `right` must be empty.
- /// The first edge of `right` remains unchanged.
- pub fn move_suffix(&mut self,
- right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
- unsafe {
- let left_new_len = self.idx;
- let mut left_node = self.reborrow_mut().into_node();
-
- let right_new_len = left_node.len() - left_new_len;
- let mut right_node = right.reborrow_mut();
-
- debug_assert!(right_node.len() == 0);
- debug_assert!(left_node.height == right_node.height);
-
- let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
- let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
-
-
- move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
-
- left_node.reborrow_mut().as_leaf_mut().len = left_new_len as u16;
- right_node.reborrow_mut().as_leaf_mut().len = right_new_len as u16;
-
- match (left_node.force(), right_node.force()) {
- (ForceResult::Internal(left), ForceResult::Internal(right)) => {
- move_edges(left, left_new_len + 1, right, 1, right_new_len);
- },
- (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
- _ => { unreachable!(); }
- }
- }
- }
-}
-
-pub enum ForceResult<Leaf, Internal> {
- Leaf(Leaf),
- Internal(Internal)
-}
-
-pub enum InsertResult<'a, K, V, Type> {
- Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
- Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
-}
-
-pub mod marker {
- use core::marker::PhantomData;
-
- pub enum Leaf { }
- pub enum Internal { }
- pub enum LeafOrInternal { }
-
- pub enum Owned { }
- pub struct Immut<'a>(PhantomData<&'a ()>);
- pub struct Mut<'a>(PhantomData<&'a mut ()>);
-
- pub enum KV { }
- pub enum Edge { }
-}
-
-unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
- ptr::copy(
- slice.as_ptr().offset(idx as isize),
- slice.as_mut_ptr().offset(idx as isize + 1),
- slice.len() - idx
- );
- ptr::write(slice.get_unchecked_mut(idx), val);
-}
-
-unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
- let ret = ptr::read(slice.get_unchecked(idx));
- ptr::copy(
- slice.as_ptr().offset(idx as isize + 1),
- slice.as_mut_ptr().offset(idx as isize),
- slice.len() - idx - 1
- );
- ret
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use core::cmp::Ordering;
-
-use borrow::Borrow;
-
-use super::node::{Handle, NodeRef, marker};
-
-use super::node::ForceResult::*;
-use self::SearchResult::*;
-
-pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
- Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
- GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>)
-}
-
-pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
- mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
- key: &Q
-) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
- where Q: Ord, K: Borrow<Q> {
-
- loop {
- match search_node(node, key) {
- Found(handle) => return Found(handle),
- GoDown(handle) => match handle.force() {
- Leaf(leaf) => return GoDown(leaf),
- Internal(internal) => {
- node = internal.descend();
- continue;
- }
- }
- }
- }
-}
-
-pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
- node: NodeRef<BorrowType, K, V, Type>,
- key: &Q
-) -> SearchResult<BorrowType, K, V, Type, Type>
- where Q: Ord, K: Borrow<Q> {
-
- match search_linear(&node, key) {
- (idx, true) => Found(
- Handle::new_kv(node, idx)
- ),
- (idx, false) => SearchResult::GoDown(
- Handle::new_edge(node, idx)
- )
- }
-}
-
-pub fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
- node: &NodeRef<BorrowType, K, V, Type>,
- key: &Q
-) -> (usize, bool)
- where Q: Ord, K: Borrow<Q> {
-
- for (i, k) in node.keys().iter().enumerate() {
- match key.cmp(k.borrow()) {
- Ordering::Greater => {},
- Ordering::Equal => return (i, true),
- Ordering::Less => return (i, false)
- }
- }
- (node.keys().len(), false)
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
-// to TreeMap
-
-use core::cmp::Ordering::{self, Less, Greater, Equal};
-use core::cmp::{min, max};
-use core::fmt::Debug;
-use core::fmt;
-use core::iter::{Peekable, FromIterator, FusedIterator};
-use core::ops::{BitOr, BitAnd, BitXor, Sub, RangeBounds};
-
-use borrow::Borrow;
-use btree_map::{BTreeMap, Keys};
-use super::Recover;
-
-// FIXME(conventions): implement bounded iterators
-
-/// A set based on a B-Tree.
-///
-/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
-/// benefits and drawbacks.
-///
-/// It is a logic error for an item to be modified in such a way that the item's ordering relative
-/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
-/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
-///
-/// [`BTreeMap`]: struct.BTreeMap.html
-/// [`Ord`]: ../../std/cmp/trait.Ord.html
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
-///
-/// # Examples
-///
-/// ```
-/// use std::collections::BTreeSet;
-///
-/// // Type inference lets us omit an explicit type signature (which
-/// // would be `BTreeSet<&str>` in this example).
-/// let mut books = BTreeSet::new();
-///
-/// // Add some books.
-/// books.insert("A Dance With Dragons");
-/// books.insert("To Kill a Mockingbird");
-/// books.insert("The Odyssey");
-/// books.insert("The Great Gatsby");
-///
-/// // Check for a specific one.
-/// if !books.contains("The Winds of Winter") {
-/// println!("We have {} books, but The Winds of Winter ain't one.",
-/// books.len());
-/// }
-///
-/// // Remove a book.
-/// books.remove("The Odyssey");
-///
-/// // Iterate over everything.
-/// for book in &books {
-/// println!("{}", book);
-/// }
-/// ```
-#[derive(Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct BTreeSet<T> {
- map: BTreeMap<T, ()>,
-}
-
-/// An iterator over the items of a `BTreeSet`.
-///
-/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
-/// See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`iter`]: struct.BTreeSet.html#method.iter
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Iter<'a, T: 'a> {
- iter: Keys<'a, T, ()>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Iter")
- .field(&self.iter.clone())
- .finish()
- }
-}
-
-/// An owning iterator over the items of a `BTreeSet`.
-///
-/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`][`BTreeSet`]
-/// (provided by the `IntoIterator` trait). See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`into_iter`]: struct.BTreeSet.html#method.into_iter
-#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Debug)]
-pub struct IntoIter<T> {
- iter: ::btree_map::IntoIter<T, ()>,
-}
-
-/// An iterator over a sub-range of items in a `BTreeSet`.
-///
-/// This `struct` is created by the [`range`] method on [`BTreeSet`].
-/// See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`range`]: struct.BTreeSet.html#method.range
-#[derive(Debug)]
-#[stable(feature = "btree_range", since = "1.17.0")]
-pub struct Range<'a, T: 'a> {
- iter: ::btree_map::Range<'a, T, ()>,
-}
-
-/// A lazy iterator producing elements in the difference of `BTreeSet`s.
-///
-/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
-/// See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`difference`]: struct.BTreeSet.html#method.difference
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Difference<'a, T: 'a> {
- a: Peekable<Iter<'a, T>>,
- b: Peekable<Iter<'a, T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Difference<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Difference")
- .field(&self.a)
- .field(&self.b)
- .finish()
- }
-}
-
-/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
-///
-/// This `struct` is created by the [`symmetric_difference`] method on
-/// [`BTreeSet`]. See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct SymmetricDifference<'a, T: 'a> {
- a: Peekable<Iter<'a, T>>,
- b: Peekable<Iter<'a, T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for SymmetricDifference<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("SymmetricDifference")
- .field(&self.a)
- .field(&self.b)
- .finish()
- }
-}
-
-/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
-///
-/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
-/// See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`intersection`]: struct.BTreeSet.html#method.intersection
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Intersection<'a, T: 'a> {
- a: Peekable<Iter<'a, T>>,
- b: Peekable<Iter<'a, T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Intersection<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Intersection")
- .field(&self.a)
- .field(&self.b)
- .finish()
- }
-}
-
-/// A lazy iterator producing elements in the union of `BTreeSet`s.
-///
-/// This `struct` is created by the [`union`] method on [`BTreeSet`].
-/// See its documentation for more.
-///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`union`]: struct.BTreeSet.html#method.union
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Union<'a, T: 'a> {
- a: Peekable<Iter<'a, T>>,
- b: Peekable<Iter<'a, T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Union<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Union")
- .field(&self.a)
- .field(&self.b)
- .finish()
- }
-}
-
-impl<T: Ord> BTreeSet<T> {
- /// Makes a new `BTreeSet` with a reasonable choice of B.
- ///
- /// # Examples
- ///
- /// ```
- /// # #![allow(unused_mut)]
- /// use std::collections::BTreeSet;
- ///
- /// let mut set: BTreeSet<i32> = BTreeSet::new();
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn new() -> BTreeSet<T> {
- BTreeSet { map: BTreeMap::new() }
- }
-
- /// Constructs a double-ended iterator over a sub-range of elements in the set.
- /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
- /// yield elements from min (inclusive) to max (exclusive).
- /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
- /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
- /// range from 4 to 10.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- /// use std::ops::Bound::Included;
- ///
- /// let mut set = BTreeSet::new();
- /// set.insert(3);
- /// set.insert(5);
- /// set.insert(8);
- /// for &elem in set.range((Included(&4), Included(&8))) {
- /// println!("{}", elem);
- /// }
- /// assert_eq!(Some(&5), set.range(4..).next());
- /// ```
- #[stable(feature = "btree_range", since = "1.17.0")]
- pub fn range<K: ?Sized, R>(&self, range: R) -> Range<T>
- where K: Ord, T: Borrow<K>, R: RangeBounds<K>
- {
- Range { iter: self.map.range(range) }
- }
-
- /// Visits the values representing the difference,
- /// i.e. the values that are in `self` but not in `other`,
- /// in ascending order.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut a = BTreeSet::new();
- /// a.insert(1);
- /// a.insert(2);
- ///
- /// let mut b = BTreeSet::new();
- /// b.insert(2);
- /// b.insert(3);
- ///
- /// let diff: Vec<_> = a.difference(&b).cloned().collect();
- /// assert_eq!(diff, [1]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
- Difference {
- a: self.iter().peekable(),
- b: other.iter().peekable(),
- }
- }
-
- /// Visits the values representing the symmetric difference,
- /// i.e. the values that are in `self` or in `other` but not in both,
- /// in ascending order.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut a = BTreeSet::new();
- /// a.insert(1);
- /// a.insert(2);
- ///
- /// let mut b = BTreeSet::new();
- /// b.insert(2);
- /// b.insert(3);
- ///
- /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
- /// assert_eq!(sym_diff, [1, 3]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn symmetric_difference<'a>(&'a self,
- other: &'a BTreeSet<T>)
- -> SymmetricDifference<'a, T> {
- SymmetricDifference {
- a: self.iter().peekable(),
- b: other.iter().peekable(),
- }
- }
-
- /// Visits the values representing the intersection,
- /// i.e. the values that are both in `self` and `other`,
- /// in ascending order.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut a = BTreeSet::new();
- /// a.insert(1);
- /// a.insert(2);
- ///
- /// let mut b = BTreeSet::new();
- /// b.insert(2);
- /// b.insert(3);
- ///
- /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
- /// assert_eq!(intersection, [2]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T> {
- Intersection {
- a: self.iter().peekable(),
- b: other.iter().peekable(),
- }
- }
-
- /// Visits the values representing the union,
- /// i.e. all the values in `self` or `other`, without duplicates,
- /// in ascending order.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut a = BTreeSet::new();
- /// a.insert(1);
- ///
- /// let mut b = BTreeSet::new();
- /// b.insert(2);
- ///
- /// let union: Vec<_> = a.union(&b).cloned().collect();
- /// assert_eq!(union, [1, 2]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
- Union {
- a: self.iter().peekable(),
- b: other.iter().peekable(),
- }
- }
-
- /// Clears the set, removing all values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut v = BTreeSet::new();
- /// v.insert(1);
- /// v.clear();
- /// assert!(v.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn clear(&mut self) {
- self.map.clear()
- }
-
- /// Returns `true` if the set contains a value.
- ///
- /// The value may be any borrowed form of the set's value type,
- /// but the ordering on the borrowed form *must* match the
- /// ordering on the value type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
- /// assert_eq!(set.contains(&1), true);
- /// assert_eq!(set.contains(&4), false);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
- where T: Borrow<Q>,
- Q: Ord
- {
- self.map.contains_key(value)
- }
-
- /// Returns a reference to the value in the set, if any, that is equal to the given value.
- ///
- /// The value may be any borrowed form of the set's value type,
- /// but the ordering on the borrowed form *must* match the
- /// ordering on the value type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
- /// assert_eq!(set.get(&2), Some(&2));
- /// assert_eq!(set.get(&4), None);
- /// ```
- #[stable(feature = "set_recovery", since = "1.9.0")]
- pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
- where T: Borrow<Q>,
- Q: Ord
- {
- Recover::get(&self.map, value)
- }
-
- /// Returns `true` if `self` has no elements in common with `other`.
- /// This is equivalent to checking for an empty intersection.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
- /// let mut b = BTreeSet::new();
- ///
- /// assert_eq!(a.is_disjoint(&b), true);
- /// b.insert(4);
- /// assert_eq!(a.is_disjoint(&b), true);
- /// b.insert(1);
- /// assert_eq!(a.is_disjoint(&b), false);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool {
- self.intersection(other).next().is_none()
- }
-
- /// Returns `true` if the set is a subset of another,
- /// i.e. `other` contains at least all the values in `self`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
- /// let mut set = BTreeSet::new();
- ///
- /// assert_eq!(set.is_subset(&sup), true);
- /// set.insert(2);
- /// assert_eq!(set.is_subset(&sup), true);
- /// set.insert(4);
- /// assert_eq!(set.is_subset(&sup), false);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_subset(&self, other: &BTreeSet<T>) -> bool {
- // Stolen from TreeMap
- let mut x = self.iter();
- let mut y = other.iter();
- let mut a = x.next();
- let mut b = y.next();
- while a.is_some() {
- if b.is_none() {
- return false;
- }
-
- let a1 = a.unwrap();
- let b1 = b.unwrap();
-
- match b1.cmp(a1) {
- Less => (),
- Greater => return false,
- Equal => a = x.next(),
- }
-
- b = y.next();
- }
- true
- }
-
- /// Returns `true` if the set is a superset of another,
- /// i.e. `self` contains at least all the values in `other`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect();
- /// let mut set = BTreeSet::new();
- ///
- /// assert_eq!(set.is_superset(&sub), false);
- ///
- /// set.insert(0);
- /// set.insert(1);
- /// assert_eq!(set.is_superset(&sub), false);
- ///
- /// set.insert(2);
- /// assert_eq!(set.is_superset(&sub), true);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_superset(&self, other: &BTreeSet<T>) -> bool {
- other.is_subset(self)
- }
-
- /// Adds a value to the set.
- ///
- /// If the set did not have this value present, `true` is returned.
- ///
- /// If the set did have this value present, `false` is returned, and the
- /// entry is not updated. See the [module-level documentation] for more.
- ///
- /// [module-level documentation]: index.html#insert-and-complex-keys
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut set = BTreeSet::new();
- ///
- /// assert_eq!(set.insert(2), true);
- /// assert_eq!(set.insert(2), false);
- /// assert_eq!(set.len(), 1);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(&mut self, value: T) -> bool {
- self.map.insert(value, ()).is_none()
- }
-
- /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
- /// one. Returns the replaced value.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut set = BTreeSet::new();
- /// set.insert(Vec::<i32>::new());
- ///
- /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
- /// set.replace(Vec::with_capacity(10));
- /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
- /// ```
- #[stable(feature = "set_recovery", since = "1.9.0")]
- pub fn replace(&mut self, value: T) -> Option<T> {
- Recover::replace(&mut self.map, value)
- }
-
- /// Removes a value from the set. Returns `true` if the value was
- /// present in the set.
- ///
- /// The value may be any borrowed form of the set's value type,
- /// but the ordering on the borrowed form *must* match the
- /// ordering on the value type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut set = BTreeSet::new();
- ///
- /// set.insert(2);
- /// assert_eq!(set.remove(&2), true);
- /// assert_eq!(set.remove(&2), false);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
- where T: Borrow<Q>,
- Q: Ord
- {
- self.map.remove(value).is_some()
- }
-
- /// Removes and returns the value in the set, if any, that is equal to the given one.
- ///
- /// The value may be any borrowed form of the set's value type,
- /// but the ordering on the borrowed form *must* match the
- /// ordering on the value type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
- /// assert_eq!(set.take(&2), Some(2));
- /// assert_eq!(set.take(&2), None);
- /// ```
- #[stable(feature = "set_recovery", since = "1.9.0")]
- pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
- where T: Borrow<Q>,
- Q: Ord
- {
- Recover::take(&mut self.map, value)
- }
-
- /// Moves all elements from `other` into `Self`, leaving `other` empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut a = BTreeSet::new();
- /// a.insert(1);
- /// a.insert(2);
- /// a.insert(3);
- ///
- /// let mut b = BTreeSet::new();
- /// b.insert(3);
- /// b.insert(4);
- /// b.insert(5);
- ///
- /// a.append(&mut b);
- ///
- /// assert_eq!(a.len(), 5);
- /// assert_eq!(b.len(), 0);
- ///
- /// assert!(a.contains(&1));
- /// assert!(a.contains(&2));
- /// assert!(a.contains(&3));
- /// assert!(a.contains(&4));
- /// assert!(a.contains(&5));
- /// ```
- #[stable(feature = "btree_append", since = "1.11.0")]
- pub fn append(&mut self, other: &mut Self) {
- self.map.append(&mut other.map);
- }
-
- /// Splits the collection into two at the given key. Returns everything after the given key,
- /// including the key.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut a = BTreeSet::new();
- /// a.insert(1);
- /// a.insert(2);
- /// a.insert(3);
- /// a.insert(17);
- /// a.insert(41);
- ///
- /// let b = a.split_off(&3);
- ///
- /// assert_eq!(a.len(), 2);
- /// assert_eq!(b.len(), 3);
- ///
- /// assert!(a.contains(&1));
- /// assert!(a.contains(&2));
- ///
- /// assert!(b.contains(&3));
- /// assert!(b.contains(&17));
- /// assert!(b.contains(&41));
- /// ```
- #[stable(feature = "btree_split_off", since = "1.11.0")]
- pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where T: Borrow<Q> {
- BTreeSet { map: self.map.split_off(key) }
- }
-}
-
-impl<T> BTreeSet<T> {
- /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
- /// let mut set_iter = set.iter();
- /// assert_eq!(set_iter.next(), Some(&1));
- /// assert_eq!(set_iter.next(), Some(&2));
- /// assert_eq!(set_iter.next(), Some(&3));
- /// assert_eq!(set_iter.next(), None);
- /// ```
- ///
- /// Values returned by the iterator are returned in ascending order:
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
- /// let mut set_iter = set.iter();
- /// assert_eq!(set_iter.next(), Some(&1));
- /// assert_eq!(set_iter.next(), Some(&2));
- /// assert_eq!(set_iter.next(), Some(&3));
- /// assert_eq!(set_iter.next(), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter(&self) -> Iter<T> {
- Iter { iter: self.map.keys() }
- }
-
- /// Returns the number of elements in the set.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut v = BTreeSet::new();
- /// assert_eq!(v.len(), 0);
- /// v.insert(1);
- /// assert_eq!(v.len(), 1);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn len(&self) -> usize {
- self.map.len()
- }
-
- /// Returns `true` if the set contains no elements.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let mut v = BTreeSet::new();
- /// assert!(v.is_empty());
- /// v.insert(1);
- /// assert!(!v.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> FromIterator<T> for BTreeSet<T> {
- fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
- let mut set = BTreeSet::new();
- set.extend(iter);
- set
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> IntoIterator for BTreeSet<T> {
- type Item = T;
- type IntoIter = IntoIter<T>;
-
- /// Gets an iterator for moving out the `BTreeSet`'s contents.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect();
- ///
- /// let v: Vec<_> = set.into_iter().collect();
- /// assert_eq!(v, [1, 2, 3, 4]);
- /// ```
- fn into_iter(self) -> IntoIter<T> {
- IntoIter { iter: self.map.into_iter() }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a BTreeSet<T> {
- type Item = &'a T;
- type IntoIter = Iter<'a, T>;
-
- fn into_iter(self) -> Iter<'a, T> {
- self.iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> Extend<T> for BTreeSet<T> {
- #[inline]
- fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
- for elem in iter {
- self.insert(elem);
- }
- }
-}
-
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
- fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
- self.extend(iter.into_iter().cloned());
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> Default for BTreeSet<T> {
- /// Makes an empty `BTreeSet<T>` with a reasonable choice of B.
- fn default() -> BTreeSet<T> {
- BTreeSet::new()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, 'b, T: Ord + Clone> Sub<&'b BTreeSet<T>> for &'a BTreeSet<T> {
- type Output = BTreeSet<T>;
-
- /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
- /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
- ///
- /// let result = &a - &b;
- /// let result_vec: Vec<_> = result.into_iter().collect();
- /// assert_eq!(result_vec, [1, 2]);
- /// ```
- fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
- self.difference(rhs).cloned().collect()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, 'b, T: Ord + Clone> BitXor<&'b BTreeSet<T>> for &'a BTreeSet<T> {
- type Output = BTreeSet<T>;
-
- /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
- /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
- ///
- /// let result = &a ^ &b;
- /// let result_vec: Vec<_> = result.into_iter().collect();
- /// assert_eq!(result_vec, [1, 4]);
- /// ```
- fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
- self.symmetric_difference(rhs).cloned().collect()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, 'b, T: Ord + Clone> BitAnd<&'b BTreeSet<T>> for &'a BTreeSet<T> {
- type Output = BTreeSet<T>;
-
- /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
- /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
- ///
- /// let result = &a & &b;
- /// let result_vec: Vec<_> = result.into_iter().collect();
- /// assert_eq!(result_vec, [2, 3]);
- /// ```
- fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
- self.intersection(rhs).cloned().collect()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, 'b, T: Ord + Clone> BitOr<&'b BTreeSet<T>> for &'a BTreeSet<T> {
- type Output = BTreeSet<T>;
-
- /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
- /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
- /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
- ///
- /// let result = &a | &b;
- /// let result_vec: Vec<_> = result.into_iter().collect();
- /// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
- /// ```
- fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
- self.union(rhs).cloned().collect()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Debug> Debug for BTreeSet<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_set().entries(self.iter()).finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Iter<'a, T> {
- fn clone(&self) -> Iter<'a, T> {
- Iter { iter: self.iter.clone() }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Iterator for Iter<'a, T> {
- type Item = &'a T;
-
- fn next(&mut self) -> Option<&'a T> {
- self.iter.next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
- fn next_back(&mut self) -> Option<&'a T> {
- self.iter.next_back()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> ExactSizeIterator for Iter<'a, T> {
- fn len(&self) -> usize { self.iter.len() }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for Iter<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Iterator for IntoIter<T> {
- type Item = T;
-
- fn next(&mut self) -> Option<T> {
- self.iter.next().map(|(k, _)| k)
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> DoubleEndedIterator for IntoIter<T> {
- fn next_back(&mut self) -> Option<T> {
- self.iter.next_back().map(|(k, _)| k)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> ExactSizeIterator for IntoIter<T> {
- fn len(&self) -> usize { self.iter.len() }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for IntoIter<T> {}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, T> Clone for Range<'a, T> {
- fn clone(&self) -> Range<'a, T> {
- Range { iter: self.iter.clone() }
- }
-}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, T> Iterator for Range<'a, T> {
- type Item = &'a T;
-
- fn next(&mut self) -> Option<&'a T> {
- self.iter.next().map(|(k, _)| k)
- }
-}
-
-#[stable(feature = "btree_range", since = "1.17.0")]
-impl<'a, T> DoubleEndedIterator for Range<'a, T> {
- fn next_back(&mut self) -> Option<&'a T> {
- self.iter.next_back().map(|(k, _)| k)
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for Range<'a, T> {}
-
-/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None
-fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering {
- match (x, y) {
- (None, _) => short,
- (_, None) => long,
- (Some(x1), Some(y1)) => x1.cmp(y1),
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Difference<'a, T> {
- fn clone(&self) -> Difference<'a, T> {
- Difference {
- a: self.a.clone(),
- b: self.b.clone(),
- }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T: Ord> Iterator for Difference<'a, T> {
- type Item = &'a T;
-
- fn next(&mut self) -> Option<&'a T> {
- loop {
- match cmp_opt(self.a.peek(), self.b.peek(), Less, Less) {
- Less => return self.a.next(),
- Equal => {
- self.a.next();
- self.b.next();
- }
- Greater => {
- self.b.next();
- }
- }
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- let a_len = self.a.len();
- let b_len = self.b.len();
- (a_len.saturating_sub(b_len), Some(a_len))
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T: Ord> FusedIterator for Difference<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for SymmetricDifference<'a, T> {
- fn clone(&self) -> SymmetricDifference<'a, T> {
- SymmetricDifference {
- a: self.a.clone(),
- b: self.b.clone(),
- }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
- type Item = &'a T;
-
- fn next(&mut self) -> Option<&'a T> {
- loop {
- match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
- Less => return self.a.next(),
- Equal => {
- self.a.next();
- self.b.next();
- }
- Greater => return self.b.next(),
- }
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (0, Some(self.a.len() + self.b.len()))
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T: Ord> FusedIterator for SymmetricDifference<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Intersection<'a, T> {
- fn clone(&self) -> Intersection<'a, T> {
- Intersection {
- a: self.a.clone(),
- b: self.b.clone(),
- }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T: Ord> Iterator for Intersection<'a, T> {
- type Item = &'a T;
-
- fn next(&mut self) -> Option<&'a T> {
- loop {
- match Ord::cmp(self.a.peek()?, self.b.peek()?) {
- Less => {
- self.a.next();
- }
- Equal => {
- self.b.next();
- return self.a.next();
- }
- Greater => {
- self.b.next();
- }
- }
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (0, Some(min(self.a.len(), self.b.len())))
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T: Ord> FusedIterator for Intersection<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Union<'a, T> {
- fn clone(&self) -> Union<'a, T> {
- Union {
- a: self.a.clone(),
- b: self.b.clone(),
- }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T: Ord> Iterator for Union<'a, T> {
- type Item = &'a T;
-
- fn next(&mut self) -> Option<&'a T> {
- match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
- Less => self.a.next(),
- Equal => {
- self.b.next();
- self.a.next()
- }
- Greater => self.b.next(),
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- let a_len = self.a.len();
- let b_len = self.b.len();
- (max(a_len, b_len), Some(a_len + b_len))
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T: Ord> FusedIterator for Union<'a, T> {}
--- /dev/null
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A priority queue implemented with a binary heap.
+//!
+//! Insertion and popping the largest element have `O(log n)` time complexity.
+//! Checking the largest element is `O(1)`. Converting a vector to a binary heap
+//! can be done in-place, and has `O(n)` complexity. A binary heap can also be
+//! converted to a sorted vector in-place, allowing it to be used for an `O(n
+//! log n)` in-place heapsort.
+//!
+//! # Examples
+//!
+//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
+//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
+//! It shows how to use [`BinaryHeap`] with custom types.
+//!
+//! [dijkstra]: http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
+//! [sssp]: http://en.wikipedia.org/wiki/Shortest_path_problem
+//! [dir_graph]: http://en.wikipedia.org/wiki/Directed_graph
+//! [`BinaryHeap`]: struct.BinaryHeap.html
+//!
+//! ```
+//! use std::cmp::Ordering;
+//! use std::collections::BinaryHeap;
+//! use std::usize;
+//!
+//! #[derive(Copy, Clone, Eq, PartialEq)]
+//! struct State {
+//! cost: usize,
+//! position: usize,
+//! }
+//!
+//! // The priority queue depends on `Ord`.
+//! // Explicitly implement the trait so the queue becomes a min-heap
+//! // instead of a max-heap.
+//! impl Ord for State {
+//! fn cmp(&self, other: &State) -> Ordering {
+//! // Notice that the we flip the ordering on costs.
+//! // In case of a tie we compare positions - this step is necessary
+//! // to make implementations of `PartialEq` and `Ord` consistent.
+//! other.cost.cmp(&self.cost)
+//! .then_with(|| self.position.cmp(&other.position))
+//! }
+//! }
+//!
+//! // `PartialOrd` needs to be implemented as well.
+//! impl PartialOrd for State {
+//! fn partial_cmp(&self, other: &State) -> Option<Ordering> {
+//! Some(self.cmp(other))
+//! }
+//! }
+//!
+//! // Each node is represented as an `usize`, for a shorter implementation.
+//! struct Edge {
+//! node: usize,
+//! cost: usize,
+//! }
+//!
+//! // Dijkstra's shortest path algorithm.
+//!
+//! // Start at `start` and use `dist` to track the current shortest distance
+//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
+//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
+//! // for a simpler implementation.
+//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
+//! // dist[node] = current shortest distance from `start` to `node`
+//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
+//!
+//! let mut heap = BinaryHeap::new();
+//!
+//! // We're at `start`, with a zero cost
+//! dist[start] = 0;
+//! heap.push(State { cost: 0, position: start });
+//!
+//! // Examine the frontier with lower cost nodes first (min-heap)
+//! while let Some(State { cost, position }) = heap.pop() {
+//! // Alternatively we could have continued to find all shortest paths
+//! if position == goal { return Some(cost); }
+//!
+//! // Important as we may have already found a better way
+//! if cost > dist[position] { continue; }
+//!
+//! // For each node we can reach, see if we can find a way with
+//! // a lower cost going through this node
+//! for edge in &adj_list[position] {
+//! let next = State { cost: cost + edge.cost, position: edge.node };
+//!
+//! // If so, add it to the frontier and continue
+//! if next.cost < dist[next.position] {
+//! heap.push(next);
+//! // Relaxation, we have now found a better way
+//! dist[next.position] = next.cost;
+//! }
+//! }
+//! }
+//!
+//! // Goal not reachable
+//! None
+//! }
+//!
+//! fn main() {
+//! // This is the directed graph we're going to use.
+//! // The node numbers correspond to the different states,
+//! // and the edge weights symbolize the cost of moving
+//! // from one node to another.
+//! // Note that the edges are one-way.
+//! //
+//! // 7
+//! // +-----------------+
+//! // | |
+//! // v 1 2 | 2
+//! // 0 -----> 1 -----> 3 ---> 4
+//! // | ^ ^ ^
+//! // | | 1 | |
+//! // | | | 3 | 1
+//! // +------> 2 -------+ |
+//! // 10 | |
+//! // +---------------+
+//! //
+//! // The graph is represented as an adjacency list where each index,
+//! // corresponding to a node value, has a list of outgoing edges.
+//! // Chosen for its efficiency.
+//! let graph = vec![
+//! // Node 0
+//! vec![Edge { node: 2, cost: 10 },
+//! Edge { node: 1, cost: 1 }],
+//! // Node 1
+//! vec![Edge { node: 3, cost: 2 }],
+//! // Node 2
+//! vec![Edge { node: 1, cost: 1 },
+//! Edge { node: 3, cost: 3 },
+//! Edge { node: 4, cost: 1 }],
+//! // Node 3
+//! vec![Edge { node: 0, cost: 7 },
+//! Edge { node: 4, cost: 2 }],
+//! // Node 4
+//! vec![]];
+//!
+//! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
+//! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
+//! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
+//! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
+//! assert_eq!(shortest_path(&graph, 4, 0), None);
+//! }
+//! ```
+
+#![allow(missing_docs)]
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::ops::{Deref, DerefMut};
+use core::iter::{FromIterator, FusedIterator};
+use core::mem::{swap, size_of, ManuallyDrop};
+use core::ptr;
+use core::fmt;
+
+use slice;
+use vec::{self, Vec};
+
+use super::SpecExtend;
+
+/// A priority queue implemented with a binary heap.
+///
+/// This will be a max-heap.
+///
+/// It is a logic error for an item to be modified in such a way that the
+/// item's ordering relative to any other item, as determined by the `Ord`
+/// trait, changes while it is in the heap. This is normally only possible
+/// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BinaryHeap;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BinaryHeap<i32>` in this example).
+/// let mut heap = BinaryHeap::new();
+///
+/// // We can use peek to look at the next item in the heap. In this case,
+/// // there's no items in there yet so we get None.
+/// assert_eq!(heap.peek(), None);
+///
+/// // Let's add some scores...
+/// heap.push(1);
+/// heap.push(5);
+/// heap.push(2);
+///
+/// // Now peek shows the most important item in the heap.
+/// assert_eq!(heap.peek(), Some(&5));
+///
+/// // We can check the length of a heap.
+/// assert_eq!(heap.len(), 3);
+///
+/// // We can iterate over the items in the heap, although they are returned in
+/// // a random order.
+/// for x in &heap {
+/// println!("{}", x);
+/// }
+///
+/// // If we instead pop these scores, they should come back in order.
+/// assert_eq!(heap.pop(), Some(5));
+/// assert_eq!(heap.pop(), Some(2));
+/// assert_eq!(heap.pop(), Some(1));
+/// assert_eq!(heap.pop(), None);
+///
+/// // We can clear the heap of any remaining items.
+/// heap.clear();
+///
+/// // The heap should now be empty.
+/// assert!(heap.is_empty())
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BinaryHeap<T> {
+ data: Vec<T>,
+}
+
+/// Structure wrapping a mutable reference to the greatest item on a
+/// `BinaryHeap`.
+///
+/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
+/// its documentation for more.
+///
+/// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+pub struct PeekMut<'a, T: 'a + Ord> {
+ heap: &'a mut BinaryHeap<T>,
+ sift: bool,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: Ord + fmt::Debug> fmt::Debug for PeekMut<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("PeekMut")
+ .field(&self.heap.data[0])
+ .finish()
+ }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<'a, T: Ord> Drop for PeekMut<'a, T> {
+ fn drop(&mut self) {
+ if self.sift {
+ self.heap.sift_down(0);
+ }
+ }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<'a, T: Ord> Deref for PeekMut<'a, T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ &self.heap.data[0]
+ }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<'a, T: Ord> DerefMut for PeekMut<'a, T> {
+ fn deref_mut(&mut self) -> &mut T {
+ &mut self.heap.data[0]
+ }
+}
+
+impl<'a, T: Ord> PeekMut<'a, T> {
+ /// Removes the peeked value from the heap and returns it.
+ #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
+ pub fn pop(mut this: PeekMut<'a, T>) -> T {
+ let value = this.heap.pop().unwrap();
+ this.sift = false;
+ value
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for BinaryHeap<T> {
+ fn clone(&self) -> Self {
+ BinaryHeap { data: self.data.clone() }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ self.data.clone_from(&source.data);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Default for BinaryHeap<T> {
+ /// Creates an empty `BinaryHeap<T>`.
+ #[inline]
+ fn default() -> BinaryHeap<T> {
+ BinaryHeap::new()
+ }
+}
+
+#[stable(feature = "binaryheap_debug", since = "1.4.0")]
+impl<T: fmt::Debug + Ord> fmt::Debug for BinaryHeap<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+ /// Creates an empty `BinaryHeap` as a max-heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> BinaryHeap<T> {
+ BinaryHeap { data: vec![] }
+ }
+
+ /// Creates an empty `BinaryHeap` with a specific capacity.
+ /// This preallocates enough memory for `capacity` elements,
+ /// so that the `BinaryHeap` does not have to be reallocated
+ /// until it contains at least that many values.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::with_capacity(10);
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
+ BinaryHeap { data: Vec::with_capacity(capacity) }
+ }
+
+ /// Returns an iterator visiting all values in the underlying vector, in
+ /// arbitrary order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
+ ///
+ /// // Print 1, 2, 3, 4 in arbitrary order
+ /// for x in heap.iter() {
+ /// println!("{}", x);
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<T> {
+ Iter { iter: self.data.iter() }
+ }
+
+ /// Returns the greatest item in the binary heap, or `None` if it is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// assert_eq!(heap.peek(), None);
+ ///
+ /// heap.push(1);
+ /// heap.push(5);
+ /// heap.push(2);
+ /// assert_eq!(heap.peek(), Some(&5));
+ ///
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn peek(&self) -> Option<&T> {
+ self.data.get(0)
+ }
+
+ /// Returns a mutable reference to the greatest item in the binary heap, or
+ /// `None` if it is empty.
+ ///
+ /// Note: If the `PeekMut` value is leaked, the heap may be in an
+ /// inconsistent state.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// assert!(heap.peek_mut().is_none());
+ ///
+ /// heap.push(1);
+ /// heap.push(5);
+ /// heap.push(2);
+ /// {
+ /// let mut val = heap.peek_mut().unwrap();
+ /// *val = 0;
+ /// }
+ /// assert_eq!(heap.peek(), Some(&2));
+ /// ```
+ #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+ pub fn peek_mut(&mut self) -> Option<PeekMut<T>> {
+ if self.is_empty() {
+ None
+ } else {
+ Some(PeekMut {
+ heap: self,
+ sift: true,
+ })
+ }
+ }
+
+ /// Returns the number of elements the binary heap can hold without reallocating.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::with_capacity(100);
+ /// assert!(heap.capacity() >= 100);
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn capacity(&self) -> usize {
+ self.data.capacity()
+ }
+
+ /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
+ /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it requests. Therefore
+ /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
+ /// insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.reserve_exact(100);
+ /// assert!(heap.capacity() >= 100);
+ /// heap.push(4);
+ /// ```
+ ///
+ /// [`reserve`]: #method.reserve
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.data.reserve_exact(additional);
+ }
+
+ /// Reserves capacity for at least `additional` more elements to be inserted in the
+ /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.reserve(100);
+ /// assert!(heap.capacity() >= 100);
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve(&mut self, additional: usize) {
+ self.data.reserve(additional);
+ }
+
+ /// Discards as much additional capacity as possible.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
+ ///
+ /// assert!(heap.capacity() >= 100);
+ /// heap.shrink_to_fit();
+ /// assert!(heap.capacity() == 0);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn shrink_to_fit(&mut self) {
+ self.data.shrink_to_fit();
+ }
+
+ /// Discards capacity with a lower bound.
+ ///
+ /// The capacity will remain at least as large as both the length
+ /// and the supplied value.
+ ///
+ /// Panics if the current capacity is smaller than the supplied
+ /// minimum capacity.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(shrink_to)]
+ /// use std::collections::BinaryHeap;
+ /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
+ ///
+ /// assert!(heap.capacity() >= 100);
+ /// heap.shrink_to(10);
+ /// assert!(heap.capacity() >= 10);
+ /// ```
+ #[inline]
+ #[unstable(feature = "shrink_to", reason = "new API", issue="0")]
+ pub fn shrink_to(&mut self, min_capacity: usize) {
+ self.data.shrink_to(min_capacity)
+ }
+
+ /// Removes the greatest item from the binary heap and returns it, or `None` if it
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert_eq!(heap.pop(), Some(3));
+ /// assert_eq!(heap.pop(), Some(1));
+ /// assert_eq!(heap.pop(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop(&mut self) -> Option<T> {
+ self.data.pop().map(|mut item| {
+ if !self.is_empty() {
+ swap(&mut item, &mut self.data[0]);
+ self.sift_down_to_bottom(0);
+ }
+ item
+ })
+ }
+
+ /// Pushes an item onto the binary heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.push(3);
+ /// heap.push(5);
+ /// heap.push(1);
+ ///
+ /// assert_eq!(heap.len(), 3);
+ /// assert_eq!(heap.peek(), Some(&5));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push(&mut self, item: T) {
+ let old_len = self.len();
+ self.data.push(item);
+ self.sift_up(0, old_len);
+ }
+
+ /// Consumes the `BinaryHeap` and returns the underlying vector
+ /// in arbitrary order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
+ /// let vec = heap.into_vec();
+ ///
+ /// // Will print in some order
+ /// for x in vec {
+ /// println!("{}", x);
+ /// }
+ /// ```
+ #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+ pub fn into_vec(self) -> Vec<T> {
+ self.into()
+ }
+
+ /// Consumes the `BinaryHeap` and returns a vector in sorted
+ /// (ascending) order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
+ /// heap.push(6);
+ /// heap.push(3);
+ ///
+ /// let vec = heap.into_sorted_vec();
+ /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
+ /// ```
+ #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+ pub fn into_sorted_vec(mut self) -> Vec<T> {
+ let mut end = self.len();
+ while end > 1 {
+ end -= 1;
+ self.data.swap(0, end);
+ self.sift_down_range(0, end);
+ }
+ self.into_vec()
+ }
+
+ // The implementations of sift_up and sift_down use unsafe blocks in
+ // order to move an element out of the vector (leaving behind a
+ // hole), shift along the others and move the removed element back into the
+ // vector at the final location of the hole.
+ // The `Hole` type is used to represent this, and make sure
+ // the hole is filled back at the end of its scope, even on panic.
+ // Using a hole reduces the constant factor compared to using swaps,
+ // which involves twice as many moves.
+ fn sift_up(&mut self, start: usize, pos: usize) -> usize {
+ unsafe {
+ // Take out the value at `pos` and create a hole.
+ let mut hole = Hole::new(&mut self.data, pos);
+
+ while hole.pos() > start {
+ let parent = (hole.pos() - 1) / 2;
+ if hole.element() <= hole.get(parent) {
+ break;
+ }
+ hole.move_to(parent);
+ }
+ hole.pos()
+ }
+ }
+
+ /// Take an element at `pos` and move it down the heap,
+ /// while its children are larger.
+ fn sift_down_range(&mut self, pos: usize, end: usize) {
+ unsafe {
+ let mut hole = Hole::new(&mut self.data, pos);
+ let mut child = 2 * pos + 1;
+ while child < end {
+ let right = child + 1;
+ // compare with the greater of the two children
+ if right < end && !(hole.get(child) > hole.get(right)) {
+ child = right;
+ }
+ // if we are already in order, stop.
+ if hole.element() >= hole.get(child) {
+ break;
+ }
+ hole.move_to(child);
+ child = 2 * hole.pos() + 1;
+ }
+ }
+ }
+
+ fn sift_down(&mut self, pos: usize) {
+ let len = self.len();
+ self.sift_down_range(pos, len);
+ }
+
+ /// Take an element at `pos` and move it all the way down the heap,
+ /// then sift it up to its position.
+ ///
+ /// Note: This is faster when the element is known to be large / should
+ /// be closer to the bottom.
+ fn sift_down_to_bottom(&mut self, mut pos: usize) {
+ let end = self.len();
+ let start = pos;
+ unsafe {
+ let mut hole = Hole::new(&mut self.data, pos);
+ let mut child = 2 * pos + 1;
+ while child < end {
+ let right = child + 1;
+ // compare with the greater of the two children
+ if right < end && !(hole.get(child) > hole.get(right)) {
+ child = right;
+ }
+ hole.move_to(child);
+ child = 2 * hole.pos() + 1;
+ }
+ pos = hole.pos;
+ }
+ self.sift_up(start, pos);
+ }
+
+ /// Returns the length of the binary heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert_eq!(heap.len(), 2);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Checks if the binary heap is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ ///
+ /// assert!(heap.is_empty());
+ ///
+ /// heap.push(3);
+ /// heap.push(5);
+ /// heap.push(1);
+ ///
+ /// assert!(!heap.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Clears the binary heap, returning an iterator over the removed elements.
+ ///
+ /// The elements are removed in arbitrary order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert!(!heap.is_empty());
+ ///
+ /// for x in heap.drain() {
+ /// println!("{}", x);
+ /// }
+ ///
+ /// assert!(heap.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "drain", since = "1.6.0")]
+ pub fn drain(&mut self) -> Drain<T> {
+ Drain { iter: self.data.drain(..) }
+ }
+
+ /// Drops all items from the binary heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert!(!heap.is_empty());
+ ///
+ /// heap.clear();
+ ///
+ /// assert!(heap.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ self.drain();
+ }
+
+ fn rebuild(&mut self) {
+ let mut n = self.len() / 2;
+ while n > 0 {
+ n -= 1;
+ self.sift_down(n);
+ }
+ }
+
+ /// Moves all the elements of `other` into `self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let v = vec![-10, 1, 2, 3, 3];
+ /// let mut a = BinaryHeap::from(v);
+ ///
+ /// let v = vec![-20, 5, 43];
+ /// let mut b = BinaryHeap::from(v);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+ /// assert!(b.is_empty());
+ /// ```
+ #[stable(feature = "binary_heap_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ if self.len() < other.len() {
+ swap(self, other);
+ }
+
+ if other.is_empty() {
+ return;
+ }
+
+ #[inline(always)]
+ fn log2_fast(x: usize) -> usize {
+ 8 * size_of::<usize>() - (x.leading_zeros() as usize) - 1
+ }
+
+ // `rebuild` takes O(len1 + len2) operations
+ // and about 2 * (len1 + len2) comparisons in the worst case
+ // while `extend` takes O(len2 * log_2(len1)) operations
+ // and about 1 * len2 * log_2(len1) comparisons in the worst case,
+ // assuming len1 >= len2.
+ #[inline]
+ fn better_to_rebuild(len1: usize, len2: usize) -> bool {
+ 2 * (len1 + len2) < len2 * log2_fast(len1)
+ }
+
+ if better_to_rebuild(self.len(), other.len()) {
+ self.data.append(&mut other.data);
+ self.rebuild();
+ } else {
+ self.extend(other.drain());
+ }
+ }
+}
+
+/// Hole represents a hole in a slice i.e. an index without valid value
+/// (because it was moved from or duplicated).
+/// In drop, `Hole` will restore the slice by filling the hole
+/// position with the value that was originally removed.
+struct Hole<'a, T: 'a> {
+ data: &'a mut [T],
+ elt: ManuallyDrop<T>,
+ pos: usize,
+}
+
+impl<'a, T> Hole<'a, T> {
+ /// Create a new Hole at index `pos`.
+ ///
+ /// Unsafe because pos must be within the data slice.
+ #[inline]
+ unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
+ debug_assert!(pos < data.len());
+ let elt = ptr::read(&data[pos]);
+ Hole {
+ data,
+ elt: ManuallyDrop::new(elt),
+ pos,
+ }
+ }
+
+ #[inline]
+ fn pos(&self) -> usize {
+ self.pos
+ }
+
+ /// Returns a reference to the element removed.
+ #[inline]
+ fn element(&self) -> &T {
+ &self.elt
+ }
+
+ /// Returns a reference to the element at `index`.
+ ///
+ /// Unsafe because index must be within the data slice and not equal to pos.
+ #[inline]
+ unsafe fn get(&self, index: usize) -> &T {
+ debug_assert!(index != self.pos);
+ debug_assert!(index < self.data.len());
+ self.data.get_unchecked(index)
+ }
+
+ /// Move hole to new location
+ ///
+ /// Unsafe because index must be within the data slice and not equal to pos.
+ #[inline]
+ unsafe fn move_to(&mut self, index: usize) {
+ debug_assert!(index != self.pos);
+ debug_assert!(index < self.data.len());
+ let index_ptr: *const _ = self.data.get_unchecked(index);
+ let hole_ptr = self.data.get_unchecked_mut(self.pos);
+ ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
+ self.pos = index;
+ }
+}
+
+impl<'a, T> Drop for Hole<'a, T> {
+ #[inline]
+ fn drop(&mut self) {
+ // fill the hole again
+ unsafe {
+ let pos = self.pos;
+ ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
+ }
+ }
+}
+
+/// An iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.BinaryHeap.html#method.iter
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ iter: slice::Iter<'a, T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Iter")
+ .field(&self.iter.as_slice())
+ .finish()
+ }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Iter<'a, T> {
+ Iter { iter: self.iter.clone() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for Iter<'a, T> {}
+
+/// An owning iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BinaryHeap`][`BinaryHeap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.BinaryHeap.html#method.into_iter
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Clone)]
+pub struct IntoIter<T> {
+ iter: vec::IntoIter<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("IntoIter")
+ .field(&self.iter.as_slice())
+ .finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+/// A draining iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`drain`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.BinaryHeap.html#method.drain
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "drain", since = "1.6.0")]
+#[derive(Debug)]
+pub struct Drain<'a, T: 'a> {
+ iter: vec::Drain<'a, T>,
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> Iterator for Drain<'a, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
+
+#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
+ fn from(vec: Vec<T>) -> BinaryHeap<T> {
+ let mut heap = BinaryHeap { data: vec };
+ heap.rebuild();
+ heap
+ }
+}
+
+#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+impl<T> From<BinaryHeap<T>> for Vec<T> {
+ fn from(heap: BinaryHeap<T>) -> Vec<T> {
+ heap.data
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
+ BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> IntoIterator for BinaryHeap<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Creates a consuming iterator, that is, one that moves each value out of
+ /// the binary heap in arbitrary order. The binary heap cannot be used
+ /// after calling this.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
+ ///
+ /// // Print 1, 2, 3, 4 in arbitrary order
+ /// for x in heap.into_iter() {
+ /// // x has type i32, not &i32
+ /// println!("{}", x);
+ /// }
+ /// ```
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { iter: self.data.into_iter() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a BinaryHeap<T>
+ where T: Ord
+{
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Extend<T> for BinaryHeap<T> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ <Self as SpecExtend<I>>::spec_extend(self, iter);
+ }
+}
+
+impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
+ default fn spec_extend(&mut self, iter: I) {
+ self.extend_desugared(iter.into_iter());
+ }
+}
+
+impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
+ fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
+ self.append(other);
+ }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+ fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ let iterator = iter.into_iter();
+ let (lower, _) = iterator.size_hint();
+
+ self.reserve(lower);
+
+ for elem in iterator {
+ self.push(elem);
+ }
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use core::cmp::Ordering;
+use core::fmt::Debug;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, Peekable, FusedIterator};
+use core::marker::PhantomData;
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::Index;
+use core::ops::RangeBounds;
+use core::{fmt, intrinsics, mem, ptr};
+
+use borrow::Borrow;
+
+use super::node::{self, Handle, NodeRef, marker};
+use super::search;
+
+use super::node::InsertResult::*;
+use super::node::ForceResult::*;
+use super::search::SearchResult::*;
+use self::UnderflowResult::*;
+use self::Entry::*;
+
+/// A map based on a B-Tree.
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
+/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
+/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
+/// is done is *very* inefficient for modern computer architectures. In particular, every element
+/// is stored in its own individually heap-allocated node. This means that every single insertion
+/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
+/// are both notably expensive things to do in practice, we are forced to at very least reconsider
+/// the BST strategy.
+///
+/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
+/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
+/// searches. However, this does mean that searches will have to do *more* comparisons on average.
+/// The precise number of comparisons depends on the node search strategy used. For optimal cache
+/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
+/// the node using binary search. As a compromise, one could also perform a linear search
+/// that initially only checks every i<sup>th</sup> element for some choice of i.
+///
+/// Currently, our implementation simply performs naive linear search. This provides excellent
+/// performance on *small* nodes of elements which are cheap to compare. However in the future we
+/// would like to further explore choosing the optimal search strategy based on the choice of B,
+/// and possibly other factors. Using linear search, searching for a random element is expected
+/// to take O(B log<sub>B</sub>n) comparisons, which is generally worse than a BST. In practice,
+/// however, performance is excellent.
+///
+/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
+/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`Ord`]: ../../std/cmp/trait.Ord.html
+/// [`Cell`]: ../../std/cell/struct.Cell.html
+/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, &str>` in this example).
+/// let mut movie_reviews = BTreeMap::new();
+///
+/// // review some movies.
+/// movie_reviews.insert("Office Space", "Deals with real issues in the workplace.");
+/// movie_reviews.insert("Pulp Fiction", "Masterpiece.");
+/// movie_reviews.insert("The Godfather", "Very enjoyable.");
+/// movie_reviews.insert("The Blues Brothers", "Eye lyked it alot.");
+///
+/// // check for a specific one.
+/// if !movie_reviews.contains_key("Les Misérables") {
+/// println!("We've got {} reviews, but Les Misérables ain't one.",
+/// movie_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// movie_reviews.remove("The Blues Brothers");
+///
+/// // look up the values associated with some keys.
+/// let to_find = ["Up!", "Office Space"];
+/// for book in &to_find {
+/// match movie_reviews.get(book) {
+/// Some(review) => println!("{}: {}", book, review),
+/// None => println!("{} is unreviewed.", book)
+/// }
+/// }
+///
+/// // iterate over everything.
+/// for (movie, review) in &movie_reviews {
+/// println!("{}: \"{}\"", movie, review);
+/// }
+/// ```
+///
+/// `BTreeMap` also implements an [`Entry API`](#method.entry), which allows
+/// for more complex methods of getting, setting, updating and removing keys and
+/// their values:
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, u8>` in this example).
+/// let mut player_stats = BTreeMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+/// // could actually return some random value here - let's just return
+/// // some fixed value for now
+/// 42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BTreeMap<K, V> {
+ root: node::Root<K, V>,
+ length: usize,
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
+ fn drop(&mut self) {
+ unsafe {
+ drop(ptr::read(self).into_iter());
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
+ fn clone(&self) -> BTreeMap<K, V> {
+ fn clone_subtree<K: Clone, V: Clone>(node: node::NodeRef<marker::Immut,
+ K,
+ V,
+ marker::LeafOrInternal>)
+ -> BTreeMap<K, V> {
+
+ match node.force() {
+ Leaf(leaf) => {
+ let mut out_tree = BTreeMap {
+ root: node::Root::new_leaf(),
+ length: 0,
+ };
+
+ {
+ let mut out_node = match out_tree.root.as_mut().force() {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!(),
+ };
+
+ let mut in_edge = leaf.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ out_node.push(k.clone(), v.clone());
+ out_tree.length += 1;
+ }
+ }
+
+ out_tree
+ }
+ Internal(internal) => {
+ let mut out_tree = clone_subtree(internal.first_edge().descend());
+
+ {
+ let mut out_node = out_tree.root.push_level();
+ let mut in_edge = internal.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ let k = (*k).clone();
+ let v = (*v).clone();
+ let subtree = clone_subtree(in_edge.descend());
+
+ // We can't destructure subtree directly
+ // because BTreeMap implements Drop
+ let (subroot, sublength) = unsafe {
+ let root = ptr::read(&subtree.root);
+ let length = subtree.length;
+ mem::forget(subtree);
+ (root, length)
+ };
+
+ out_node.push(k, v, subroot);
+ out_tree.length += 1 + sublength;
+ }
+ }
+
+ out_tree
+ }
+ }
+ }
+
+ clone_subtree(self.root.as_ref())
+ }
+}
+
+impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
+ where K: Borrow<Q> + Ord,
+ Q: Ord
+{
+ type Key = K;
+
+ fn get(&self, key: &Q) -> Option<&K> {
+ match search::search_tree(self.root.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().0),
+ GoDown(_) => None,
+ }
+ }
+
+ fn take(&mut self, key: &Q) -> Option<K> {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => {
+ Some(OccupiedEntry {
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }
+ .remove_kv()
+ .0)
+ }
+ GoDown(_) => None,
+ }
+ }
+
+ fn replace(&mut self, key: K) -> Option<K> {
+ self.ensure_root_is_owned();
+ match search::search_tree::<marker::Mut, K, (), K>(self.root.as_mut(), &key) {
+ Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
+ GoDown(handle) => {
+ VacantEntry {
+ key,
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }
+ .insert(());
+ None
+ }
+ }
+ }
+}
+
+/// An iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.BTreeMap.html#method.iter
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, K: 'a, V: 'a> {
+ range: Range<'a, K, V>,
+ length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Iter<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.BTreeMap.html#method.iter_mut
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IterMut<'a, K: 'a, V: 'a> {
+ range: RangeMut<'a, K, V>,
+ length: usize,
+}
+
+/// An owning iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`][`BTreeMap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.BTreeMap.html#method.into_iter
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<K, V> {
+ front: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let range = Range {
+ front: self.front.reborrow(),
+ back: self.back.reborrow(),
+ };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// An iterator over the keys of a `BTreeMap`.
+///
+/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: struct.BTreeMap.html#method.keys
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Keys<'a, K: 'a, V: 'a> {
+ inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, K: 'a + fmt::Debug, V: 'a> fmt::Debug for Keys<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// An iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: struct.BTreeMap.html#method.values
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Values<'a, K: 'a, V: 'a> {
+ inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, K: 'a, V: 'a + fmt::Debug> fmt::Debug for Values<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: struct.BTreeMap.html#method.values_mut
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+#[derive(Debug)]
+pub struct ValuesMut<'a, K: 'a, V: 'a> {
+ inner: IterMut<'a, K, V>,
+}
+
+/// An iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range`]: struct.BTreeMap.html#method.range
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, K: 'a, V: 'a> {
+ front: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Range<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range_mut`]: struct.BTreeMap.html#method.range_mut
+/// [`BTreeMap`]: struct.BTreeMap.html
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct RangeMut<'a, K: 'a, V: 'a> {
+ front: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for RangeMut<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let range = Range {
+ front: self.front.reborrow(),
+ back: self.back.reborrow(),
+ };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
+///
+/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`entry`]: struct.BTreeMap.html#method.entry
+#[stable(feature = "rust1", since = "1.0.0")]
+pub enum Entry<'a, K: 'a, V: 'a> {
+ /// A vacant entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Vacant(#[stable(feature = "rust1", since = "1.0.0")]
+ VacantEntry<'a, K, V>),
+
+ /// An occupied entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Occupied(#[stable(feature = "rust1", since = "1.0.0")]
+ OccupiedEntry<'a, K, V>),
+}
+
+#[stable(feature= "debug_btree_map", since = "1.12.0")]
+impl<'a, K: 'a + Debug + Ord, V: 'a + Debug> Debug for Entry<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ Vacant(ref v) => f.debug_tuple("Entry")
+ .field(v)
+ .finish(),
+ Occupied(ref o) => f.debug_tuple("Entry")
+ .field(o)
+ .finish(),
+ }
+ }
+}
+
+/// A view into a vacant entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VacantEntry<'a, K: 'a, V: 'a> {
+ key: K,
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ length: &'a mut usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature= "debug_btree_map", since = "1.12.0")]
+impl<'a, K: 'a + Debug + Ord, V: 'a> Debug for VacantEntry<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("VacantEntry")
+ .field(self.key())
+ .finish()
+ }
+}
+
+/// A view into an occupied entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
+
+ length: &'a mut usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature= "debug_btree_map", since = "1.12.0")]
+impl<'a, K: 'a + Debug + Ord, V: 'a + Debug> Debug for OccupiedEntry<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("OccupiedEntry")
+ .field("key", self.key())
+ .field("value", self.get())
+ .finish()
+ }
+}
+
+// An iterator for merging two sorted sequences into one
+struct MergeIter<K, V, I: Iterator<Item = (K, V)>> {
+ left: Peekable<I>,
+ right: Peekable<I>,
+}
+
+impl<K: Ord, V> BTreeMap<K, V> {
+ /// Makes a new empty BTreeMap with a reasonable choice for B.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ ///
+ /// // entries can now be inserted into the empty map
+ /// map.insert(1, "a");
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> BTreeMap<K, V> {
+ BTreeMap {
+ root: node::Root::shared_empty_root(),
+ length: 0,
+ }
+ }
+
+ /// Clears the map, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.clear();
+ /// assert!(a.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ *self = BTreeMap::new();
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
+ where K: Borrow<Q>,
+ Q: Ord
+ {
+ match search::search_tree(self.root.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns the key-value pair corresponding to the supplied key.
+ ///
+ /// The supplied key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_get_key_value)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+ /// assert_eq!(map.get_key_value(&2), None);
+ /// ```
+ #[unstable(feature = "map_get_key_value", issue = "49347")]
+ pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+ where K: Borrow<Q>,
+ Q: Ord
+ {
+ match search::search_tree(self.root.as_ref(), k) {
+ Found(handle) => Some(handle.into_kv()),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns `true` if the map contains a value for the specified key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.contains_key(&1), true);
+ /// assert_eq!(map.contains_key(&2), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
+ where K: Borrow<Q>,
+ Q: Ord
+ {
+ self.get(key).is_some()
+ }
+
+ /// Returns a mutable reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// if let Some(x) = map.get_mut(&1) {
+ /// *x = "b";
+ /// }
+ /// assert_eq!(map[&1], "b");
+ /// ```
+ // See `get` for implementation notes, this is basically a copy-paste with mut's added
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
+ where K: Borrow<Q>,
+ Q: Ord
+ {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => Some(handle.into_kv_mut().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Inserts a key-value pair into the map.
+ ///
+ /// If the map did not have this key present, `None` is returned.
+ ///
+ /// If the map did have this key present, the value is updated, and the old
+ /// value is returned. The key is not updated, though; this matters for
+ /// types that can be `==` without being identical. See the [module-level
+ /// documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.insert(37, "a"), None);
+ /// assert_eq!(map.is_empty(), false);
+ ///
+ /// map.insert(37, "b");
+ /// assert_eq!(map.insert(37, "c"), Some("b"));
+ /// assert_eq!(map[&37], "c");
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, key: K, value: V) -> Option<V> {
+ match self.entry(key) {
+ Occupied(mut entry) => Some(entry.insert(value)),
+ Vacant(entry) => {
+ entry.insert(value);
+ None
+ }
+ }
+ }
+
+ /// Removes a key from the map, returning the value at the key if the key
+ /// was previously in the map.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.remove(&1), Some("a"));
+ /// assert_eq!(map.remove(&1), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
+ where K: Borrow<Q>,
+ Q: Ord
+ {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => {
+ Some(OccupiedEntry {
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }
+ .remove())
+ }
+ GoDown(_) => None,
+ }
+ }
+
+ /// Moves all elements from `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ ///
+ /// let mut b = BTreeMap::new();
+ /// b.insert(3, "d");
+ /// b.insert(4, "e");
+ /// b.insert(5, "f");
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ /// assert_eq!(a[&3], "d");
+ /// assert_eq!(a[&4], "e");
+ /// assert_eq!(a[&5], "f");
+ /// ```
+ #[stable(feature = "btree_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ // Do we have to append anything at all?
+ if other.len() == 0 {
+ return;
+ }
+
+ // We can just swap `self` and `other` if `self` is empty.
+ if self.len() == 0 {
+ mem::swap(self, other);
+ return;
+ }
+
+ // First, we merge `self` and `other` into a sorted sequence in linear time.
+ let self_iter = mem::replace(self, BTreeMap::new()).into_iter();
+ let other_iter = mem::replace(other, BTreeMap::new()).into_iter();
+ let iter = MergeIter {
+ left: self_iter.peekable(),
+ right: other_iter.peekable(),
+ };
+
+ // Second, we build a tree from the sorted sequence in linear time.
+ self.from_sorted_iter(iter);
+ self.fix_right_edge();
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "a");
+ /// map.insert(5, "b");
+ /// map.insert(8, "c");
+ /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
+ /// println!("{}: {}", key, value);
+ /// }
+ /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range<T: ?Sized, R>(&self, range: R) -> Range<K, V>
+ where T: Ord, K: Borrow<T>, R: RangeBounds<T>
+ {
+ let root1 = self.root.as_ref();
+ let root2 = self.root.as_ref();
+ let (f, b) = range_search(root1, root2, range);
+
+ Range { front: f, back: b}
+ }
+
+ /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter()
+ /// .map(|&s| (s, 0))
+ /// .collect();
+ /// for (_, balance) in map.range_mut("B".."Cheryl") {
+ /// *balance += 100;
+ /// }
+ /// for (name, balance) in &map {
+ /// println!("{} => {}", name, balance);
+ /// }
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<K, V>
+ where T: Ord, K: Borrow<T>, R: RangeBounds<T>
+ {
+ let root1 = self.root.as_mut();
+ let root2 = unsafe { ptr::read(&root1) };
+ let (f, b) = range_search(root1, root2, range);
+
+ RangeMut {
+ front: f,
+ back: b,
+ _marker: PhantomData,
+ }
+ }
+
+ /// Gets the given key's corresponding entry in the map for in-place manipulation.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in vec!["a","b","a","c","a","b"] {
+ /// *count.entry(x).or_insert(0) += 1;
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn entry(&mut self, key: K) -> Entry<K, V> {
+ // FIXME(@porglezomp) Avoid allocating if we don't insert
+ self.ensure_root_is_owned();
+ match search::search_tree(self.root.as_mut(), &key) {
+ Found(handle) => {
+ Occupied(OccupiedEntry {
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ })
+ }
+ GoDown(handle) => {
+ Vacant(VacantEntry {
+ key,
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ })
+ }
+ }
+ }
+
+ fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
+ self.ensure_root_is_owned();
+ let mut cur_node = last_leaf_edge(self.root.as_mut()).into_node();
+ // Iterate through all key-value pairs, pushing them into nodes at the right level.
+ for (key, value) in iter {
+ // Try to push key-value pair into the current leaf node.
+ if cur_node.len() < node::CAPACITY {
+ cur_node.push(key, value);
+ } else {
+ // No space left, go up and push there.
+ let mut open_node;
+ let mut test_node = cur_node.forget_type();
+ loop {
+ match test_node.ascend() {
+ Ok(parent) => {
+ let parent = parent.into_node();
+ if parent.len() < node::CAPACITY {
+ // Found a node with space left, push here.
+ open_node = parent;
+ break;
+ } else {
+ // Go up again.
+ test_node = parent.forget_type();
+ }
+ }
+ Err(node) => {
+ // We are at the top, create a new root node and push there.
+ open_node = node.into_root_mut().push_level();
+ break;
+ }
+ }
+ }
+
+ // Push key-value pair and new right subtree.
+ let tree_height = open_node.height() - 1;
+ let mut right_tree = node::Root::new_leaf();
+ for _ in 0..tree_height {
+ right_tree.push_level();
+ }
+ open_node.push(key, value, right_tree);
+
+ // Go down to the right-most leaf again.
+ cur_node = last_leaf_edge(open_node.forget_type()).into_node();
+ }
+
+ self.length += 1;
+ }
+ }
+
+ fn fix_right_edge(&mut self) {
+ // Handle underfull nodes, start from the top.
+ let mut cur_node = self.root.as_mut();
+ while let Internal(internal) = cur_node.force() {
+ // Check if right-most child is underfull.
+ let mut last_edge = internal.last_edge();
+ let right_child_len = last_edge.reborrow().descend().len();
+ if right_child_len < node::MIN_LEN {
+ // We need to steal.
+ let mut last_kv = match last_edge.left_kv() {
+ Ok(left) => left,
+ Err(_) => unreachable!(),
+ };
+ last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
+ last_edge = last_kv.right_edge();
+ }
+
+ // Go further down.
+ cur_node = last_edge.descend();
+ }
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(17, "d");
+ /// a.insert(41, "e");
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ ///
+ /// assert_eq!(b[&3], "c");
+ /// assert_eq!(b[&17], "d");
+ /// assert_eq!(b[&41], "e");
+ /// ```
+ #[stable(feature = "btree_split_off", since = "1.11.0")]
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+ where K: Borrow<Q>
+ {
+ if self.is_empty() {
+ return Self::new();
+ }
+
+ let total_num = self.len();
+
+ let mut right = Self::new();
+ right.root = node::Root::new_leaf();
+ for _ in 0..(self.root.as_ref().height()) {
+ right.root.push_level();
+ }
+
+ {
+ let mut left_node = self.root.as_mut();
+ let mut right_node = right.root.as_mut();
+
+ loop {
+ let mut split_edge = match search::search_node(left_node, key) {
+ // key is going to the right tree
+ Found(handle) => handle.left_edge(),
+ GoDown(handle) => handle,
+ };
+
+ split_edge.move_suffix(&mut right_node);
+
+ match (split_edge.force(), right_node.force()) {
+ (Internal(edge), Internal(node)) => {
+ left_node = edge.descend();
+ right_node = node.first_edge().descend();
+ }
+ (Leaf(_), Leaf(_)) => {
+ break;
+ }
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+
+ self.fix_right_border();
+ right.fix_left_border();
+
+ if self.root.as_ref().height() < right.root.as_ref().height() {
+ self.recalc_length();
+ right.length = total_num - self.len();
+ } else {
+ right.recalc_length();
+ self.length = total_num - right.len();
+ }
+
+ right
+ }
+
+ /// Calculates the number of elements if it is incorrect.
+ fn recalc_length(&mut self) {
+ fn dfs<K, V>(node: NodeRef<marker::Immut, K, V, marker::LeafOrInternal>) -> usize {
+ let mut res = node.len();
+
+ if let Internal(node) = node.force() {
+ let mut edge = node.first_edge();
+ loop {
+ res += dfs(edge.reborrow().descend());
+ match edge.right_kv() {
+ Ok(right_kv) => {
+ edge = right_kv.right_edge();
+ }
+ Err(_) => {
+ break;
+ }
+ }
+ }
+ }
+
+ res
+ }
+
+ self.length = dfs(self.root.as_ref());
+ }
+
+ /// Removes empty levels on the top.
+ fn fix_top(&mut self) {
+ loop {
+ {
+ let node = self.root.as_ref();
+ if node.height() == 0 || node.len() > 0 {
+ break;
+ }
+ }
+ self.root.pop_level();
+ }
+ }
+
+ fn fix_right_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.root.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut last_kv = node.last_kv();
+
+ if last_kv.can_merge() {
+ cur_node = last_kv.merge().descend();
+ } else {
+ let right_len = last_kv.reborrow().right_edge().descend().len();
+ // `MINLEN + 1` to avoid readjust if merge happens on the next level.
+ if right_len < node::MIN_LEN + 1 {
+ last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+ }
+ cur_node = last_kv.right_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+
+ /// The symmetric clone of `fix_right_border`.
+ fn fix_left_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.root.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut first_kv = node.first_kv();
+
+ if first_kv.can_merge() {
+ cur_node = first_kv.merge().descend();
+ } else {
+ let left_len = first_kv.reborrow().left_edge().descend().len();
+ if left_len < node::MIN_LEN + 1 {
+ first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+ }
+ cur_node = first_kv.left_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+
+ /// If the root node is the shared root node, allocate our own node.
+ fn ensure_root_is_owned(&mut self) {
+ if self.root.is_shared_root() {
+ self.root = node::Root::new_leaf();
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ fn into_iter(self) -> Iter<'a, K, V> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> ExactSizeIterator for Iter<'a, K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Clone for Iter<'a, K, V> {
+ fn clone(&self) -> Iter<'a, K, V> {
+ Iter {
+ range: self.range.clone(),
+ length: self.length,
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap<K, V> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> ExactSizeIterator for IterMut<'a, K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> IntoIterator for BTreeMap<K, V> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
+
+ fn into_iter(self) -> IntoIter<K, V> {
+ let root1 = unsafe { ptr::read(&self.root).into_ref() };
+ let root2 = unsafe { ptr::read(&self.root).into_ref() };
+ let len = self.length;
+ mem::forget(self);
+
+ IntoIter {
+ front: first_leaf_edge(root1),
+ back: last_leaf_edge(root2),
+ length: len,
+ }
+ }
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+impl<K, V> Drop for IntoIter<K, V> {
+ fn drop(&mut self) {
+ self.for_each(drop);
+ unsafe {
+ let leaf_node = ptr::read(&self.front).into_node();
+ if leaf_node.is_shared_root() {
+ return;
+ }
+
+ if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
+ let mut cur_node = first_parent.into_node();
+ while let Some(parent) = cur_node.deallocate_and_ascend() {
+ cur_node = parent.into_node()
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Iterator for IntoIter<K, V> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ return None;
+ } else {
+ self.length -= 1;
+ }
+
+ let handle = unsafe { ptr::read(&self.front) };
+
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.front = kv.right_edge();
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
+ },
+ };
+
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
+ },
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
+ fn next_back(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ return None;
+ } else {
+ self.length -= 1;
+ }
+
+ let handle = unsafe { ptr::read(&self.back) };
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.back = kv.left_edge();
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
+ },
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
+ },
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for IntoIter<K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
+
+ fn next(&mut self) -> Option<&'a K> {
+ self.inner.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a K> {
+ self.inner.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for Keys<'a, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Clone for Keys<'a, K, V> {
+ fn clone(&self) -> Keys<'a, K, V> {
+ Keys { inner: self.inner.clone() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
+
+ fn next(&mut self) -> Option<&'a V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for Values<'a, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Clone for Values<'a, K, V> {
+ fn clone(&self) -> Values<'a, K, V> {
+ Values { inner: self.inner.clone() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_unchecked()) }
+ }
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+ type Item = &'a mut V;
+
+ fn next(&mut self) -> Option<&'a mut V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a mut V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {}
+
+
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ let handle = self.front;
+
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.front = kv.right_edge();
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_back_unchecked()) }
+ }
+ }
+}
+
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ let handle = self.back;
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.back = kv.left_edge();
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for Range<'a, K, V> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Clone for Range<'a, K, V> {
+ fn clone(&self) -> Range<'a, K, V> {
+ Range {
+ front: self.front,
+ back: self.back,
+ }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_unchecked()) }
+ }
+ }
+}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let handle = ptr::read(&self.front);
+
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.front = kv.right_edge();
+ return (k, v);
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return (k, v);
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_back_unchecked()) }
+ }
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, K, V> FusedIterator for RangeMut<'a, K, V> {}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let handle = ptr::read(&self.back);
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.back = kv.left_edge();
+ return (k, v);
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return (k, v);
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
+ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
+ let mut map = BTreeMap::new();
+ map.extend(iter);
+ map
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
+ #[inline]
+ fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+ for (k, v) in iter {
+ self.insert(k, v);
+ }
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
+ fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ for elt in self {
+ elt.hash(state);
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> Default for BTreeMap<K, V> {
+ /// Creates an empty `BTreeMap<K, V>`.
+ fn default() -> BTreeMap<K, V> {
+ BTreeMap::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
+ fn eq(&self, other: &BTreeMap<K, V>) -> bool {
+ self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
+ #[inline]
+ fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
+ #[inline]
+ fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_map().entries(self.iter()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap<K, V>
+ where K: Borrow<Q>,
+ Q: Ord
+{
+ type Output = V;
+
+ /// Returns a reference to the value corresponding to the supplied key.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the key is not present in the `BTreeMap`.
+ #[inline]
+ fn index(&self, key: &Q) -> &V {
+ self.get(key).expect("no entry found for key")
+ }
+}
+
+fn first_leaf_edge<BorrowType, K, V>
+ (mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>)
+ -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.first_edge(),
+ Internal(internal) => {
+ node = internal.first_edge().descend();
+ }
+ }
+ }
+}
+
+fn last_leaf_edge<BorrowType, K, V>
+ (mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>)
+ -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.last_edge(),
+ Internal(internal) => {
+ node = internal.last_edge().descend();
+ }
+ }
+ }
+}
+
+fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
+ root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ range: R
+)-> (Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>)
+ where Q: Ord, K: Borrow<Q>
+{
+ match (range.start_bound(), range.end_bound()) {
+ (Excluded(s), Excluded(e)) if s==e =>
+ panic!("range start and end are equal and excluded in BTreeMap"),
+ (Included(s), Included(e)) |
+ (Included(s), Excluded(e)) |
+ (Excluded(s), Included(e)) |
+ (Excluded(s), Excluded(e)) if s>e =>
+ panic!("range start is greater than range end in BTreeMap"),
+ _ => {},
+ };
+
+ let mut min_node = root1;
+ let mut max_node = root2;
+ let mut min_found = false;
+ let mut max_found = false;
+ let mut diverged = false;
+
+ loop {
+ let min_edge = match (min_found, range.start_bound()) {
+ (false, Included(key)) => match search::search_linear(&min_node, key) {
+ (i, true) => { min_found = true; i },
+ (i, false) => i,
+ },
+ (false, Excluded(key)) => match search::search_linear(&min_node, key) {
+ (i, true) => { min_found = true; i+1 },
+ (i, false) => i,
+ },
+ (_, Unbounded) => 0,
+ (true, Included(_)) => min_node.keys().len(),
+ (true, Excluded(_)) => 0,
+ };
+
+ let max_edge = match (max_found, range.end_bound()) {
+ (false, Included(key)) => match search::search_linear(&max_node, key) {
+ (i, true) => { max_found = true; i+1 },
+ (i, false) => i,
+ },
+ (false, Excluded(key)) => match search::search_linear(&max_node, key) {
+ (i, true) => { max_found = true; i },
+ (i, false) => i,
+ },
+ (_, Unbounded) => max_node.keys().len(),
+ (true, Included(_)) => 0,
+ (true, Excluded(_)) => max_node.keys().len(),
+ };
+
+ if !diverged {
+ if max_edge < min_edge { panic!("Ord is ill-defined in BTreeMap range") }
+ if min_edge != max_edge { diverged = true; }
+ }
+
+ let front = Handle::new_edge(min_node, min_edge);
+ let back = Handle::new_edge(max_node, max_edge);
+ match (front.force(), back.force()) {
+ (Leaf(f), Leaf(b)) => {
+ return (f, b);
+ },
+ (Internal(min_int), Internal(max_int)) => {
+ min_node = min_int.descend();
+ max_node = max_int.descend();
+ },
+ _ => unreachable!("BTreeMap has different depths"),
+ };
+ }
+}
+
+#[inline(always)]
+unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
+ val.unwrap_or_else(|| {
+ if cfg!(debug_assertions) {
+ panic!("'unchecked' unwrap on None in BTreeMap");
+ } else {
+ intrinsics::unreachable();
+ }
+ })
+}
+
+impl<K, V> BTreeMap<K, V> {
+ /// Gets an iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "c");
+ /// map.insert(2, "b");
+ /// map.insert(1, "a");
+ ///
+ /// for (key, value) in map.iter() {
+ /// println!("{}: {}", key, value);
+ /// }
+ ///
+ /// let (first_key, first_value) = map.iter().next().unwrap();
+ /// assert_eq!((*first_key, *first_value), (1, "a"));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<K, V> {
+ Iter {
+ range: Range {
+ front: first_leaf_edge(self.root.as_ref()),
+ back: last_leaf_edge(self.root.as_ref()),
+ },
+ length: self.length,
+ }
+ }
+
+ /// Gets a mutable iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert("a", 1);
+ /// map.insert("b", 2);
+ /// map.insert("c", 3);
+ ///
+ /// // add 10 to the value if the key isn't "a"
+ /// for (key, value) in map.iter_mut() {
+ /// if key != &"a" {
+ /// *value += 10;
+ /// }
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<K, V> {
+ let root1 = self.root.as_mut();
+ let root2 = unsafe { ptr::read(&root1) };
+ IterMut {
+ range: RangeMut {
+ front: first_leaf_edge(root1),
+ back: last_leaf_edge(root2),
+ _marker: PhantomData,
+ },
+ length: self.length,
+ }
+ }
+
+ /// Gets an iterator over the keys of the map, in sorted order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(2, "b");
+ /// a.insert(1, "a");
+ ///
+ /// let keys: Vec<_> = a.keys().cloned().collect();
+ /// assert_eq!(keys, [1, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
+ Keys { inner: self.iter() }
+ }
+
+ /// Gets an iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "hello");
+ /// a.insert(2, "goodbye");
+ ///
+ /// let values: Vec<&str> = a.values().cloned().collect();
+ /// assert_eq!(values, ["hello", "goodbye"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn values<'a>(&'a self) -> Values<'a, K, V> {
+ Values { inner: self.iter() }
+ }
+
+ /// Gets a mutable iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, String::from("hello"));
+ /// a.insert(2, String::from("goodbye"));
+ ///
+ /// for value in a.values_mut() {
+ /// value.push_str("!");
+ /// }
+ ///
+ /// let values: Vec<String> = a.values().cloned().collect();
+ /// assert_eq!(values, [String::from("hello!"),
+ /// String::from("goodbye!")]);
+ /// ```
+ #[stable(feature = "map_values_mut", since = "1.10.0")]
+ pub fn values_mut(&mut self) -> ValuesMut<K, V> {
+ ValuesMut { inner: self.iter_mut() }
+ }
+
+ /// Returns the number of elements in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert_eq!(a.len(), 0);
+ /// a.insert(1, "a");
+ /// assert_eq!(a.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.length
+ }
+
+ /// Returns `true` if the map contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert!(a.is_empty());
+ /// a.insert(1, "a");
+ /// assert!(!a.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<'a, K: Ord, V> Entry<'a, K, V> {
+ /// Ensures a value is in the entry by inserting the default if empty, and returns
+ /// a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert(self, default: V) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default),
+ }
+ }
+
+ /// Ensures a value is in the entry by inserting the result of the default function if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
+ /// let s = "hoho".to_string();
+ ///
+ /// map.entry("poneyland").or_insert_with(|| s);
+ ///
+ /// assert_eq!(map["poneyland"], "hoho".to_string());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default()),
+ }
+ }
+
+ /// Returns a reference to this entry's key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ match *self {
+ Occupied(ref entry) => entry.key(),
+ Vacant(ref entry) => entry.key(),
+ }
+ }
+
+ /// Provides in-place mutable access to an occupied entry before any
+ /// potential inserts into the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 42);
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 43);
+ /// ```
+ #[stable(feature = "entry_and_modify", since = "1.26.0")]
+ pub fn and_modify<F>(self, f: F) -> Self
+ where F: FnOnce(&mut V)
+ {
+ match self {
+ Occupied(mut entry) => {
+ f(entry.get_mut());
+ Occupied(entry)
+ },
+ Vacant(entry) => Vacant(entry),
+ }
+ }
+}
+
+impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
+ #[stable(feature = "entry_or_default", since = "1.28.0")]
+ /// Ensures a value is in the entry by inserting the default value if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # fn main() {
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
+ /// map.entry("poneyland").or_default();
+ ///
+ /// assert_eq!(map["poneyland"], None);
+ /// # }
+ /// ```
+ pub fn or_default(self) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(Default::default()),
+ }
+ }
+
+}
+
+impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
+ /// Gets a reference to the key that would be used when inserting a value
+ /// through the VacantEntry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Take ownership of the key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// if let Entry::Vacant(v) = map.entry("poneyland") {
+ /// v.into_key();
+ /// }
+ /// ```
+ #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Sets the value of the entry with the `VacantEntry`'s key,
+ /// and returns a mutable reference to it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in vec!["a","b","a","c","a","b"] {
+ /// *count.entry(x).or_insert(0) += 1;
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(self, value: V) -> &'a mut V {
+ *self.length += 1;
+
+ let out_ptr;
+
+ let mut ins_k;
+ let mut ins_v;
+ let mut ins_edge;
+
+ let mut cur_parent = match self.handle.insert(self.key, value) {
+ (Fit(handle), _) => return handle.into_kv_mut().1,
+ (Split(left, k, v, right), ptr) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ out_ptr = ptr;
+ left.ascend().map_err(|n| n.into_root_mut())
+ }
+ };
+
+ loop {
+ match cur_parent {
+ Ok(parent) => {
+ match parent.insert(ins_k, ins_v, ins_edge) {
+ Fit(_) => return unsafe { &mut *out_ptr },
+ Split(left, k, v, right) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ cur_parent = left.ascend().map_err(|n| n.into_root_mut());
+ }
+ }
+ }
+ Err(root) => {
+ root.push_level().push(ins_k, ins_v, ins_edge);
+ return unsafe { &mut *out_ptr };
+ }
+ }
+ }
+ }
+}
+
+impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
+ /// Gets a reference to the key in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ self.handle.reborrow().into_kv().0
+ }
+
+ /// Take ownership of the key and value from the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// // We delete the entry from the map.
+ /// o.remove_entry();
+ /// }
+ ///
+ /// // If now try to get the value, it will panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+ pub fn remove_entry(self) -> (K, V) {
+ self.remove_kv()
+ }
+
+ /// Gets a reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.get(), &12);
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get(&self) -> &V {
+ self.handle.reborrow().into_kv().1
+ }
+
+ /// Gets a mutable reference to the value in the entry.
+ ///
+ /// If you need a reference to the `OccupiedEntry` which may outlive the
+ /// destruction of the `Entry` value, see [`into_mut`].
+ ///
+ /// [`into_mut`]: #method.into_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// *o.get_mut() += 10;
+ /// assert_eq!(*o.get(), 22);
+ ///
+ /// // We can use the same Entry multiple times.
+ /// *o.get_mut() += 2;
+ /// }
+ /// assert_eq!(map["poneyland"], 24);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.handle.kv_mut().1
+ }
+
+ /// Converts the entry into a mutable reference to its value.
+ ///
+ /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+ ///
+ /// [`get_mut`]: #method.get_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// *o.into_mut() += 10;
+ /// }
+ /// assert_eq!(map["poneyland"], 22);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_mut(self) -> &'a mut V {
+ self.handle.into_kv_mut().1
+ }
+
+ /// Sets the value of the entry with the `OccupiedEntry`'s key,
+ /// and returns the entry's old value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// assert_eq!(o.insert(15), 12);
+ /// }
+ /// assert_eq!(map["poneyland"], 15);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
+
+ /// Takes the value of the entry out of the map, and returns it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.remove(), 12);
+ /// }
+ /// // If we try to get "poneyland"'s value, it'll panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(self) -> V {
+ self.remove_kv().1
+ }
+
+ fn remove_kv(self) -> (K, V) {
+ *self.length -= 1;
+
+ let (small_leaf, old_key, old_val) = match self.handle.force() {
+ Leaf(leaf) => {
+ let (hole, old_key, old_val) = leaf.remove();
+ (hole.into_node(), old_key, old_val)
+ }
+ Internal(mut internal) => {
+ let key_loc = internal.kv_mut().0 as *mut K;
+ let val_loc = internal.kv_mut().1 as *mut V;
+
+ let to_remove = first_leaf_edge(internal.right_edge().descend()).right_kv().ok();
+ let to_remove = unsafe { unwrap_unchecked(to_remove) };
+
+ let (hole, key, val) = to_remove.remove();
+
+ let old_key = unsafe { mem::replace(&mut *key_loc, key) };
+ let old_val = unsafe { mem::replace(&mut *val_loc, val) };
+
+ (hole.into_node(), old_key, old_val)
+ }
+ };
+
+ // Handle underflow
+ let mut cur_node = small_leaf.forget_type();
+ while cur_node.len() < node::CAPACITY / 2 {
+ match handle_underfull_node(cur_node) {
+ AtRoot => break,
+ EmptyParent(_) => unreachable!(),
+ Merged(parent) => {
+ if parent.len() == 0 {
+ // We must be at the root
+ parent.into_root_mut().pop_level();
+ break;
+ } else {
+ cur_node = parent.forget_type();
+ }
+ }
+ Stole(_) => break,
+ }
+ }
+
+ (old_key, old_val)
+ }
+}
+
+enum UnderflowResult<'a, K, V> {
+ AtRoot,
+ EmptyParent(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+ Merged(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+ Stole(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+}
+
+fn handle_underfull_node<'a, K, V>(node: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>)
+ -> UnderflowResult<'a, K, V> {
+ let parent = if let Ok(parent) = node.ascend() {
+ parent
+ } else {
+ return AtRoot;
+ };
+
+ let (is_left, mut handle) = match parent.left_kv() {
+ Ok(left) => (true, left),
+ Err(parent) => {
+ match parent.right_kv() {
+ Ok(right) => (false, right),
+ Err(parent) => {
+ return EmptyParent(parent.into_node());
+ }
+ }
+ }
+ };
+
+ if handle.can_merge() {
+ Merged(handle.merge().into_node())
+ } else {
+ if is_left {
+ handle.steal_left();
+ } else {
+ handle.steal_right();
+ }
+ Stole(handle.into_node())
+ }
+}
+
+impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ let res = match (self.left.peek(), self.right.peek()) {
+ (Some(&(ref left_key, _)), Some(&(ref right_key, _))) => left_key.cmp(right_key),
+ (Some(_), None) => Ordering::Less,
+ (None, Some(_)) => Ordering::Greater,
+ (None, None) => return None,
+ };
+
+ // Check which elements comes first and only advance the corresponding iterator.
+ // If two keys are equal, take the value from `right`.
+ match res {
+ Ordering::Less => self.left.next(),
+ Ordering::Greater => self.right.next(),
+ Ordering::Equal => {
+ self.left.next();
+ self.right.next()
+ }
+ }
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod node;
+mod search;
+pub mod map;
+pub mod set;
+
+#[doc(hidden)]
+trait Recover<Q: ?Sized> {
+ type Key;
+
+ fn get(&self, key: &Q) -> Option<&Self::Key>;
+ fn take(&mut self, key: &Q) -> Option<Self::Key>;
+ fn replace(&mut self, key: Self::Key) -> Option<Self::Key>;
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This is an attempt at an implementation following the ideal
+//
+// ```
+// struct BTreeMap<K, V> {
+// height: usize,
+// root: Option<Box<Node<K, V, height>>>
+// }
+//
+// struct Node<K, V, height: usize> {
+// keys: [K; 2 * B - 1],
+// vals: [V; 2 * B - 1],
+// edges: if height > 0 {
+// [Box<Node<K, V, height - 1>>; 2 * B]
+// } else { () },
+// parent: *const Node<K, V, height + 1>,
+// parent_idx: u16,
+// len: u16,
+// }
+// ```
+//
+// Since Rust doesn't actually have dependent types and polymorphic recursion,
+// we make do with lots of unsafety.
+
+// A major goal of this module is to avoid complexity by treating the tree as a generic (if
+// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
+// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
+// even what underfull means. However, we do rely on a few invariants:
+//
+// - Trees must have uniform depth/height. This means that every path down to a leaf from a
+// given node has exactly the same length.
+// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
+// This implies that even an empty internal node has at least one edge.
+
+use core::marker::PhantomData;
+use core::mem;
+use core::ptr::{self, Unique, NonNull};
+use core::slice;
+
+use alloc::{Global, Alloc, Layout};
+use boxed::Box;
+
+const B: usize = 6;
+pub const MIN_LEN: usize = B - 1;
+pub const CAPACITY: usize = 2 * B - 1;
+
+/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
+/// these, since only the first `len` keys and values are assumed to be initialized. As such,
+/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
+/// case.
+///
+/// See also rust-lang/rfcs#197, which would make this structure significantly more safe by
+/// avoiding accidentally dropping unused and uninitialized keys and values.
+///
+/// We put the metadata first so that its position is the same for every `K` and `V`, in order
+/// to statically allocate a single dummy node to avoid allocations. This struct is `repr(C)` to
+/// prevent them from being reordered.
+#[repr(C)]
+struct LeafNode<K, V> {
+ /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
+ /// This either points to an actual node or is null.
+ parent: *const InternalNode<K, V>,
+
+ /// This node's index into the parent node's `edges` array.
+ /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+ /// This is only guaranteed to be initialized when `parent` is nonnull.
+ parent_idx: u16,
+
+ /// The number of keys and values this node stores.
+ ///
+ /// This next to `parent_idx` to encourage the compiler to join `len` and
+ /// `parent_idx` into the same 32-bit word, reducing space overhead.
+ len: u16,
+
+ /// The arrays storing the actual data of the node. Only the first `len` elements of each
+ /// array are initialized and valid.
+ keys: [K; CAPACITY],
+ vals: [V; CAPACITY],
+}
+
+impl<K, V> LeafNode<K, V> {
+ /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
+ /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
+ unsafe fn new() -> Self {
+ LeafNode {
+ // As a general policy, we leave fields uninitialized if they can be, as this should
+ // be both slightly faster and easier to track in Valgrind.
+ keys: mem::uninitialized(),
+ vals: mem::uninitialized(),
+ parent: ptr::null(),
+ parent_idx: mem::uninitialized(),
+ len: 0
+ }
+ }
+
+ fn is_shared_root(&self) -> bool {
+ self as *const _ == &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V>
+ }
+}
+
+// We need to implement Sync here in order to make a static instance.
+unsafe impl Sync for LeafNode<(), ()> {}
+
+// An empty node used as a placeholder for the root node, to avoid allocations.
+// We use () in order to save space, since no operation on an empty tree will
+// ever take a pointer past the first key.
+static EMPTY_ROOT_NODE: LeafNode<(), ()> = LeafNode {
+ parent: ptr::null(),
+ parent_idx: 0,
+ len: 0,
+ keys: [(); CAPACITY],
+ vals: [(); CAPACITY],
+};
+
+/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
+/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
+/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
+/// node, allowing code to act on leaf and internal nodes generically without having to even check
+/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
+#[repr(C)]
+struct InternalNode<K, V> {
+ data: LeafNode<K, V>,
+
+ /// The pointers to the children of this node. `len + 1` of these are considered
+ /// initialized and valid.
+ edges: [BoxedNode<K, V>; 2 * B],
+}
+
+impl<K, V> InternalNode<K, V> {
+ /// Creates a new `InternalNode`.
+ ///
+ /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
+ /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
+ /// edges are initialized and valid, meaning that even when the node is empty (having a
+ /// `len` of 0), there must be one initialized and valid edge. This function does not set up
+ /// such an edge.
+ unsafe fn new() -> Self {
+ InternalNode {
+ data: LeafNode::new(),
+ edges: mem::uninitialized()
+ }
+ }
+}
+
+/// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
+/// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
+/// of nodes is actually behind the box, and, partially due to this lack of information, has no
+/// destructor.
+struct BoxedNode<K, V> {
+ ptr: Unique<LeafNode<K, V>>
+}
+
+impl<K, V> BoxedNode<K, V> {
+ fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
+ BoxedNode { ptr: Box::into_unique(node) }
+ }
+
+ fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
+ unsafe {
+ BoxedNode { ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode<K, V>) }
+ }
+ }
+
+ unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
+ BoxedNode { ptr: Unique::from(ptr) }
+ }
+
+ fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
+ NonNull::from(self.ptr)
+ }
+}
+
+/// An owned tree. Note that despite being owned, this does not have a destructor,
+/// and must be cleaned up manually.
+pub struct Root<K, V> {
+ node: BoxedNode<K, V>,
+ height: usize
+}
+
+unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
+unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
+
+impl<K, V> Root<K, V> {
+ pub fn is_shared_root(&self) -> bool {
+ self.as_ref().is_shared_root()
+ }
+
+ pub fn shared_empty_root() -> Self {
+ Root {
+ node: unsafe {
+ BoxedNode::from_ptr(NonNull::new_unchecked(
+ &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _
+ ))
+ },
+ height: 0,
+ }
+ }
+
+ pub fn new_leaf() -> Self {
+ Root {
+ node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
+ height: 0
+ }
+ }
+
+ pub fn as_ref(&self)
+ -> NodeRef<marker::Immut, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *const _ as *mut _,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn as_mut(&mut self)
+ -> NodeRef<marker::Mut, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn into_ref(self)
+ -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: ptr::null_mut(), // FIXME: Is there anything better to do here?
+ _marker: PhantomData,
+ }
+ }
+
+ /// Adds a new internal node with a single edge, pointing to the previous root, and make that
+ /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
+ pub fn push_level(&mut self)
+ -> NodeRef<marker::Mut, K, V, marker::Internal> {
+ debug_assert!(!self.is_shared_root());
+ let mut new_node = Box::new(unsafe { InternalNode::new() });
+ new_node.edges[0] = unsafe { BoxedNode::from_ptr(self.node.as_ptr()) };
+
+ self.node = BoxedNode::from_internal(new_node);
+ self.height += 1;
+
+ let mut ret = NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData
+ };
+
+ unsafe {
+ ret.reborrow_mut().first_edge().correct_parent_link();
+ }
+
+ ret
+ }
+
+ /// Removes the root node, using its first child as the new root. This cannot be called when
+ /// the tree consists only of a leaf node. As it is intended only to be called when the root
+ /// has only one edge, no cleanup is done on any of the other children are elements of the root.
+ /// This decreases the height by 1 and is the opposite of `push_level`.
+ pub fn pop_level(&mut self) {
+ debug_assert!(self.height > 0);
+
+ let top = self.node.ptr;
+
+ self.node = unsafe {
+ BoxedNode::from_ptr(self.as_mut()
+ .cast_unchecked::<marker::Internal>()
+ .first_edge()
+ .descend()
+ .node)
+ };
+ self.height -= 1;
+ self.as_mut().as_leaf_mut().parent = ptr::null();
+
+ unsafe {
+ Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
+ }
+ }
+}
+
+// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
+// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
+// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
+// However, whenever a public type wraps `NodeRef`, make sure that it has the
+// correct variance.
+/// A reference to a node.
+///
+/// This type has a number of parameters that controls how it acts:
+/// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
+/// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
+/// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
+/// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
+/// - `K` and `V`: These control what types of things are stored in the nodes.
+/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
+/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
+/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
+/// `NodeRef` could be pointing to either type of node.
+pub struct NodeRef<BorrowType, K, V, Type> {
+ height: usize,
+ node: NonNull<LeafNode<K, V>>,
+ // This is null unless the borrow type is `Mut`
+ root: *const Root<K, V>,
+ _marker: PhantomData<(BorrowType, Type)>
+}
+
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
+impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
+ for NodeRef<BorrowType, K, V, Type> { }
+
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
+ for NodeRef<marker::Immut<'a>, K, V, Type> { }
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
+ for NodeRef<marker::Mut<'a>, K, V, Type> { }
+unsafe impl<K: Send, V: Send, Type> Send
+ for NodeRef<marker::Owned, K, V, Type> { }
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ fn as_internal(&self) -> &InternalNode<K, V> {
+ unsafe {
+ &*(self.node.as_ptr() as *mut InternalNode<K, V>)
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
+ unsafe {
+ &mut *(self.node.as_ptr() as *mut InternalNode<K, V>)
+ }
+ }
+}
+
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Finds the length of the node. This is the number of keys or values. In an
+ /// internal node, the number of edges is `len() + 1`.
+ pub fn len(&self) -> usize {
+ self.as_leaf().len as usize
+ }
+
+ /// Returns the height of this node in the whole tree. Zero height denotes the
+ /// leaf level.
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
+ /// Removes any static information about whether this node is a `Leaf` or an
+ /// `Internal` node.
+ pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ }
+ }
+
+ /// Temporarily takes out another, immutable reference to the same node.
+ fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ }
+ }
+
+ fn as_leaf(&self) -> &LeafNode<K, V> {
+ unsafe {
+ self.node.as_ref()
+ }
+ }
+
+ pub fn is_shared_root(&self) -> bool {
+ self.as_leaf().is_shared_root()
+ }
+
+ pub fn keys(&self) -> &[K] {
+ self.reborrow().into_key_slice()
+ }
+
+ fn vals(&self) -> &[V] {
+ self.reborrow().into_val_slice()
+ }
+
+ /// Finds the parent of the current node. Returns `Ok(handle)` if the current
+ /// node actually has a parent, where `handle` points to the edge of the parent
+ /// that points to the current node. Returns `Err(self)` if the current node has
+ /// no parent, giving back the original `NodeRef`.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn ascend(self) -> Result<
+ Handle<
+ NodeRef<
+ BorrowType,
+ K, V,
+ marker::Internal
+ >,
+ marker::Edge
+ >,
+ Self
+ > {
+ let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
+ if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
+ Ok(Handle {
+ node: NodeRef {
+ height: self.height + 1,
+ node: non_zero,
+ root: self.root,
+ _marker: PhantomData
+ },
+ idx: self.as_leaf().parent_idx as usize,
+ _marker: PhantomData
+ })
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn first_edge(self) -> Handle<Self, marker::Edge> {
+ Handle::new_edge(self, 0)
+ }
+
+ pub fn last_edge(self) -> Handle<Self, marker::Edge> {
+ let len = self.len();
+ Handle::new_edge(self, len)
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn first_kv(self) -> Handle<Self, marker::KV> {
+ debug_assert!(self.len() > 0);
+ Handle::new_kv(self, 0)
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn last_kv(self) -> Handle<Self, marker::KV> {
+ let len = self.len();
+ debug_assert!(len > 0);
+ Handle::new_kv(self, len - 1)
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
+ pub unsafe fn deallocate_and_ascend(self) -> Option<
+ Handle<
+ NodeRef<
+ marker::Owned,
+ K, V,
+ marker::Internal
+ >,
+ marker::Edge
+ >
+ > {
+ debug_assert!(!self.is_shared_root());
+ let node = self.node;
+ let ret = self.ascend().ok();
+ Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
+ ret
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
+ pub unsafe fn deallocate_and_ascend(self) -> Option<
+ Handle<
+ NodeRef<
+ marker::Owned,
+ K, V,
+ marker::Internal
+ >,
+ marker::Edge
+ >
+ > {
+ let node = self.node;
+ let ret = self.ascend().ok();
+ Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
+ ret
+ }
+}
+
+impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Unsafely asserts to the compiler some static information about whether this
+ /// node is a `Leaf`.
+ unsafe fn cast_unchecked<NewType>(&mut self)
+ -> NodeRef<marker::Mut, K, V, NewType> {
+
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ }
+ }
+
+ /// Temporarily takes out another, mutable reference to the same node. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+ unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut, K, V, Type> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ }
+ }
+
+ fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
+ unsafe {
+ self.node.as_mut()
+ }
+ }
+
+ fn keys_mut(&mut self) -> &mut [K] {
+ unsafe { self.reborrow_mut().into_key_slice_mut() }
+ }
+
+ fn vals_mut(&mut self) -> &mut [V] {
+ unsafe { self.reborrow_mut().into_val_slice_mut() }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn into_key_slice(self) -> &'a [K] {
+ // When taking a pointer to the keys, if our key has a stricter
+ // alignment requirement than the shared root does, then the pointer
+ // would be out of bounds, which LLVM assumes will not happen. If the
+ // alignment is more strict, we need to make an empty slice that doesn't
+ // use an out of bounds pointer.
+ if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
+ &[]
+ } else {
+ // Here either it's not the root, or the alignment is less strict,
+ // in which case the keys pointer will point "one-past-the-end" of
+ // the node, which is allowed by LLVM.
+ unsafe {
+ slice::from_raw_parts(
+ self.as_leaf().keys.as_ptr(),
+ self.len()
+ )
+ }
+ }
+ }
+
+ fn into_val_slice(self) -> &'a [V] {
+ debug_assert!(!self.is_shared_root());
+ unsafe {
+ slice::from_raw_parts(
+ self.as_leaf().vals.as_ptr(),
+ self.len()
+ )
+ }
+ }
+
+ fn into_slices(self) -> (&'a [K], &'a [V]) {
+ let k = unsafe { ptr::read(&self) };
+ (k.into_key_slice(), self.into_val_slice())
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Gets a mutable reference to the root itself. This is useful primarily when the
+ /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
+ pub fn into_root_mut(self) -> &'a mut Root<K, V> {
+ unsafe {
+ &mut *(self.root as *mut Root<K, V>)
+ }
+ }
+
+ fn into_key_slice_mut(mut self) -> &'a mut [K] {
+ if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
+ &mut []
+ } else {
+ unsafe {
+ slice::from_raw_parts_mut(
+ &mut self.as_leaf_mut().keys as *mut [K] as *mut K,
+ self.len()
+ )
+ }
+ }
+ }
+
+ fn into_val_slice_mut(mut self) -> &'a mut [V] {
+ debug_assert!(!self.is_shared_root());
+ unsafe {
+ slice::from_raw_parts_mut(
+ &mut self.as_leaf_mut().vals as *mut [V] as *mut V,
+ self.len()
+ )
+ }
+ }
+
+ fn into_slices_mut(self) -> (&'a mut [K], &'a mut [V]) {
+ let k = unsafe { ptr::read(&self) };
+ (k.into_key_slice_mut(), self.into_val_slice_mut())
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ /// Adds a key/value pair the end of the node.
+ pub fn push(&mut self, key: K, val: V) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() < CAPACITY);
+ debug_assert!(!self.is_shared_root());
+
+ let idx = self.len();
+
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+ }
+
+ self.as_leaf_mut().len += 1;
+ }
+
+ /// Adds a key/value pair to the beginning of the node.
+ pub fn push_front(&mut self, key: K, val: V) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() < CAPACITY);
+ debug_assert!(!self.is_shared_root());
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+ }
+
+ self.as_leaf_mut().len += 1;
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Adds a key/value pair and an edge to go to the right of that pair to
+ /// the end of the node.
+ pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.height - 1);
+ debug_assert!(self.len() < CAPACITY);
+
+ let idx = self.len();
+
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+ ptr::write(self.as_internal_mut().edges.get_unchecked_mut(idx + 1), edge.node);
+
+ self.as_leaf_mut().len += 1;
+
+ Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
+ }
+ }
+
+ fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
+ for i in first..after_last {
+ Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
+ }
+ }
+
+ fn correct_all_childrens_parent_links(&mut self) {
+ let len = self.len();
+ self.correct_childrens_parent_links(0, len + 1);
+ }
+
+ /// Adds a key/value pair and an edge to go to the left of that pair to
+ /// the beginning of the node.
+ pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.height - 1);
+ debug_assert!(self.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+ slice_insert(
+ slice::from_raw_parts_mut(
+ self.as_internal_mut().edges.as_mut_ptr(),
+ self.len()+1
+ ),
+ 0,
+ edge.node
+ );
+
+ self.as_leaf_mut().len += 1;
+
+ self.correct_all_childrens_parent_links();
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Removes a key/value pair from the end of this node. If this is an internal node,
+ /// also removes the edge that was to the right of that pair.
+ pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() > 0);
+
+ let idx = self.len() - 1;
+
+ unsafe {
+ let key = ptr::read(self.keys().get_unchecked(idx));
+ let val = ptr::read(self.vals().get_unchecked(idx));
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(internal) => {
+ let edge = ptr::read(internal.as_internal().edges.get_unchecked(idx + 1));
+ let mut new_root = Root { node: edge, height: internal.height - 1 };
+ new_root.as_mut().as_leaf_mut().parent = ptr::null();
+ Some(new_root)
+ }
+ };
+
+ self.as_leaf_mut().len -= 1;
+ (key, val, edge)
+ }
+ }
+
+ /// Removes a key/value pair from the beginning of this node. If this is an internal node,
+ /// also removes the edge that was to the left of that pair.
+ pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() > 0);
+
+ let old_len = self.len();
+
+ unsafe {
+ let key = slice_remove(self.keys_mut(), 0);
+ let val = slice_remove(self.vals_mut(), 0);
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(mut internal) => {
+ let edge = slice_remove(
+ slice::from_raw_parts_mut(
+ internal.as_internal_mut().edges.as_mut_ptr(),
+ old_len+1
+ ),
+ 0
+ );
+
+ let mut new_root = Root { node: edge, height: internal.height - 1 };
+ new_root.as_mut().as_leaf_mut().parent = ptr::null();
+
+ for i in 0..old_len {
+ Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
+ }
+
+ Some(new_root)
+ }
+ };
+
+ self.as_leaf_mut().len -= 1;
+
+ (key, val, edge)
+ }
+ }
+
+ fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
+ (
+ self.keys_mut().as_mut_ptr(),
+ self.vals_mut().as_mut_ptr()
+ )
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Checks whether a node is an `Internal` node or a `Leaf` node.
+ pub fn force(self) -> ForceResult<
+ NodeRef<BorrowType, K, V, marker::Leaf>,
+ NodeRef<BorrowType, K, V, marker::Internal>
+ > {
+ if self.height == 0 {
+ ForceResult::Leaf(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ })
+ } else {
+ ForceResult::Internal(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ })
+ }
+ }
+}
+
+/// A reference to a specific key/value pair or edge within a node. The `Node` parameter
+/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
+/// pair) or `Edge` (signifying a handle on an edge).
+///
+/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
+/// a child node, these represent the spaces where child pointers would go between the key/value
+/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
+/// to the left of the node, one between the two pairs, and one at the right of the node.
+pub struct Handle<Node, Type> {
+ node: Node,
+ idx: usize,
+ _marker: PhantomData<Type>
+}
+
+impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
+// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
+// `Clone`able is when it is an immutable reference and therefore `Copy`.
+impl<Node: Copy, Type> Clone for Handle<Node, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<Node, Type> Handle<Node, Type> {
+ /// Retrieves the node that contains the edge of key/value pair this handle points to.
+ pub fn into_node(self) -> Node {
+ self.node
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+ /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
+ pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ // Necessary for correctness, but in a private module
+ debug_assert!(idx < node.len());
+
+ Handle {
+ node,
+ idx,
+ _marker: PhantomData
+ }
+ }
+
+ pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ Handle::new_edge(self.node, self.idx)
+ }
+
+ pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ Handle::new_edge(self.node, self.idx + 1)
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
+
+ fn eq(&self, other: &Self) -> bool {
+ self.node.node == other.node.node && self.idx == other.idx
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
+
+ /// Temporarily takes out another, immutable handle on the same location.
+ pub fn reborrow(&self)
+ -> Handle<NodeRef<marker::Immut, K, V, NodeType>, HandleType> {
+
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle {
+ node: self.node.reborrow(),
+ idx: self.idx,
+ _marker: PhantomData
+ }
+ }
+}
+
+impl<'a, K, V, NodeType, HandleType>
+ Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+
+ /// Temporarily takes out another, mutable handle on the same location. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+ pub unsafe fn reborrow_mut(&mut self)
+ -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
+
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle {
+ node: self.node.reborrow_mut(),
+ idx: self.idx,
+ _marker: PhantomData
+ }
+ }
+}
+
+impl<BorrowType, K, V, NodeType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+
+ /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
+ /// `node.len()`.
+ pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ // Necessary for correctness, but in a private module
+ debug_assert!(idx <= node.len());
+
+ Handle {
+ node,
+ idx,
+ _marker: PhantomData
+ }
+ }
+
+ pub fn left_kv(self)
+ -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+
+ if self.idx > 0 {
+ Ok(Handle::new_kv(self.node, self.idx - 1))
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn right_kv(self)
+ -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+
+ if self.idx < self.node.len() {
+ Ok(Handle::new_kv(self.node, self.idx))
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method assumes that there is enough space in the node for the new
+ /// pair to fit.
+ ///
+ /// The returned pointer points to the inserted value.
+ fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+ // Necessary for correctness, but in a private module
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(!self.node.is_shared_root());
+
+ unsafe {
+ slice_insert(self.node.keys_mut(), self.idx, key);
+ slice_insert(self.node.vals_mut(), self.idx, val);
+
+ self.node.as_leaf_mut().len += 1;
+
+ self.node.vals_mut().get_unchecked_mut(self.idx)
+ }
+ }
+
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method splits the node if there isn't enough room.
+ ///
+ /// The returned pointer points to the inserted value.
+ pub fn insert(mut self, key: K, val: V)
+ -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
+
+ if self.node.len() < CAPACITY {
+ let ptr = self.insert_fit(key, val);
+ (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
+ } else {
+ let middle = Handle::new_kv(self.node, B);
+ let (mut left, k, v, mut right) = middle.split();
+ let ptr = if self.idx <= B {
+ unsafe {
+ Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
+ }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Leaf>(),
+ self.idx - (B + 1)
+ ).insert_fit(key, val)
+ }
+ };
+ (InsertResult::Split(left, k, v, right), ptr)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ /// Fixes the parent pointer and index in the child node below this edge. This is useful
+ /// when the ordering of edges has been changed, such as in the various `insert` methods.
+ fn correct_parent_link(mut self) {
+ let idx = self.idx as u16;
+ let ptr = self.node.as_internal_mut() as *mut _;
+ let mut child = self.descend();
+ child.as_leaf_mut().parent = ptr;
+ child.as_leaf_mut().parent_idx = idx;
+ }
+
+ /// Unsafely asserts to the compiler some static information about whether the underlying
+ /// node of this handle is a `Leaf`.
+ unsafe fn cast_unchecked<NewType>(&mut self)
+ -> Handle<NodeRef<marker::Mut, K, V, NewType>, marker::Edge> {
+
+ Handle::new_edge(self.node.cast_unchecked(), self.idx)
+ }
+
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method assumes
+ /// that there is enough space in the node for the new pair to fit.
+ fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but in an internal module
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(edge.height == self.node.height - 1);
+
+ unsafe {
+ // This cast is a lie, but it allows us to reuse the key/value insertion logic.
+ self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
+
+ slice_insert(
+ slice::from_raw_parts_mut(
+ self.node.as_internal_mut().edges.as_mut_ptr(),
+ self.node.len()
+ ),
+ self.idx + 1,
+ edge.node
+ );
+
+ for i in (self.idx+1)..(self.node.len()+1) {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ }
+ }
+
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method splits
+ /// the node if there isn't enough room.
+ pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
+ -> InsertResult<'a, K, V, marker::Internal> {
+
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.node.height - 1);
+
+ if self.node.len() < CAPACITY {
+ self.insert_fit(key, val, edge);
+ InsertResult::Fit(Handle::new_kv(self.node, self.idx))
+ } else {
+ let middle = Handle::new_kv(self.node, B);
+ let (mut left, k, v, mut right) = middle.split();
+ if self.idx <= B {
+ unsafe {
+ Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
+ }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Internal>(),
+ self.idx - (B + 1)
+ ).insert_fit(key, val, edge);
+ }
+ }
+ InsertResult::Split(left, k, v, right)
+ }
+ }
+}
+
+impl<BorrowType, K, V>
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+
+ /// Finds the node pointed to by this edge.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.node.height - 1,
+ node: unsafe { self.node.as_internal().edges.get_unchecked(self.idx).as_ptr() },
+ root: self.node.root,
+ _marker: PhantomData
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType>
+ Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
+
+ pub fn into_kv(self) -> (&'a K, &'a V) {
+ let (keys, vals) = self.node.into_slices();
+ unsafe {
+ (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType>
+ Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+
+ pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
+ let (keys, vals) = self.node.into_slices_mut();
+ unsafe {
+ (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+ }
+ }
+}
+
+impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
+ unsafe {
+ let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
+ (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the key/value pairs to the right of
+ /// this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the key/value pairs to the right of this handle are put into a newly
+ /// allocated node.
+ pub fn split(mut self)
+ -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
+ debug_assert!(!self.node.is_shared_root());
+ unsafe {
+ let mut new_node = Box::new(LeafNode::new());
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().offset(self.idx as isize + 1),
+ new_node.keys.as_mut_ptr(),
+ new_len
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().offset(self.idx as isize + 1),
+ new_node.vals.as_mut_ptr(),
+ new_len
+ );
+
+ self.node.as_leaf_mut().len = self.idx as u16;
+ new_node.len = new_len as u16;
+
+ (
+ self.node,
+ k, v,
+ Root {
+ node: BoxedNode::from_leaf(new_node),
+ height: 0
+ }
+ )
+ }
+ }
+
+ /// Removes the key/value pair pointed to by this handle, returning the edge between the
+ /// now adjacent key/value pairs to the left and right of this handle.
+ pub fn remove(mut self)
+ -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
+ debug_assert!(!self.node.is_shared_root());
+ unsafe {
+ let k = slice_remove(self.node.keys_mut(), self.idx);
+ let v = slice_remove(self.node.vals_mut(), self.idx);
+ self.node.as_leaf_mut().len -= 1;
+ (self.left_edge(), k, v)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the edges and key/value pairs to the
+ /// right of this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the edges and key/value pairs to the right of this handle are put into
+ /// a newly allocated node.
+ pub fn split(mut self)
+ -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
+ unsafe {
+ let mut new_node = Box::new(InternalNode::new());
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let height = self.node.height;
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().offset(self.idx as isize + 1),
+ new_node.data.keys.as_mut_ptr(),
+ new_len
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().offset(self.idx as isize + 1),
+ new_node.data.vals.as_mut_ptr(),
+ new_len
+ );
+ ptr::copy_nonoverlapping(
+ self.node.as_internal().edges.as_ptr().offset(self.idx as isize + 1),
+ new_node.edges.as_mut_ptr(),
+ new_len + 1
+ );
+
+ self.node.as_leaf_mut().len = self.idx as u16;
+ new_node.data.len = new_len as u16;
+
+ let mut new_root = Root {
+ node: BoxedNode::from_internal(new_node),
+ height,
+ };
+
+ for i in 0..(new_len+1) {
+ Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
+ }
+
+ (
+ self.node,
+ k, v,
+ new_root
+ )
+ }
+ }
+
+ /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in
+ /// a node to hold the combination of the nodes to the left and right of this handle along
+ /// with the key/value pair at this handle.
+ pub fn can_merge(&self) -> bool {
+ (
+ self.reborrow()
+ .left_edge()
+ .descend()
+ .len()
+ + self.reborrow()
+ .right_edge()
+ .descend()
+ .len()
+ + 1
+ ) <= CAPACITY
+ }
+
+ /// Combines the node immediately to the left of this handle, the key/value pair pointed
+ /// to by this handle, and the node immediately to the right of this handle into one new
+ /// child of the underlying node, returning an edge referencing that new child.
+ ///
+ /// Assumes that this edge `.can_merge()`.
+ pub fn merge(mut self)
+ -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ let self1 = unsafe { ptr::read(&self) };
+ let self2 = unsafe { ptr::read(&self) };
+ let mut left_node = self1.left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = self2.right_edge().descend();
+ let right_len = right_node.len();
+
+ // necessary for correctness, but in a private module
+ debug_assert!(left_len + right_len + 1 <= CAPACITY);
+
+ unsafe {
+ ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.keys_mut(), self.idx));
+ ptr::copy_nonoverlapping(
+ right_node.keys().as_ptr(),
+ left_node.keys_mut().as_mut_ptr().offset(left_len as isize + 1),
+ right_len
+ );
+ ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.vals_mut(), self.idx));
+ ptr::copy_nonoverlapping(
+ right_node.vals().as_ptr(),
+ left_node.vals_mut().as_mut_ptr().offset(left_len as isize + 1),
+ right_len
+ );
+
+ slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
+ for i in self.idx+1..self.node.len() {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ self.node.as_leaf_mut().len -= 1;
+
+ left_node.as_leaf_mut().len += right_len as u16 + 1;
+
+ if self.node.height > 1 {
+ ptr::copy_nonoverlapping(
+ right_node.cast_unchecked().as_internal().edges.as_ptr(),
+ left_node.cast_unchecked()
+ .as_internal_mut()
+ .edges
+ .as_mut_ptr()
+ .offset(left_len as isize + 1),
+ right_len + 1
+ );
+
+ for i in left_len+1..left_len+right_len+2 {
+ Handle::new_edge(
+ left_node.cast_unchecked().reborrow_mut(),
+ i
+ ).correct_parent_link();
+ }
+
+ Global.dealloc(
+ right_node.node.cast(),
+ Layout::new::<InternalNode<K, V>>(),
+ );
+ } else {
+ Global.dealloc(
+ right_node.node.cast(),
+ Layout::new::<LeafNode<K, V>>(),
+ );
+ }
+
+ Handle::new_edge(self.node, self.idx)
+ }
+ }
+
+ /// This removes a key/value pair from the left child and replaces it with the key/value pair
+ /// pointed to by this handle while pushing the old key/value pair of this handle into the right
+ /// child.
+ pub fn steal_left(&mut self) {
+ unsafe {
+ let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
+
+ let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+ match self.reborrow_mut().right_edge().descend().force() {
+ ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
+ ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
+ }
+ }
+ }
+
+ /// This removes a key/value pair from the right child and replaces it with the key/value pair
+ /// pointed to by this handle while pushing the old key/value pair of this handle into the left
+ /// child.
+ pub fn steal_right(&mut self) {
+ unsafe {
+ let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
+
+ let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+ match self.reborrow_mut().left_edge().descend().force() {
+ ForceResult::Leaf(mut leaf) => leaf.push(k, v),
+ ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap())
+ }
+ }
+ }
+
+ /// This does stealing similar to `steal_left` but steals multiple elements at once.
+ pub fn bulk_steal_left(&mut self, count: usize) {
+ unsafe {
+ let mut left_node = ptr::read(self).left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = ptr::read(self).right_edge().descend();
+ let right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ debug_assert!(right_len + count <= CAPACITY);
+ debug_assert!(left_len >= count);
+
+ let new_left_len = left_len - count;
+
+ // Move data.
+ {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+ let parent_kv = {
+ let kv = self.reborrow_mut().into_kv_mut();
+ (kv.0 as *mut K, kv.1 as *mut V)
+ };
+
+ // Make room for stolen elements in the right child.
+ ptr::copy(right_kv.0,
+ right_kv.0.offset(count as isize),
+ right_len);
+ ptr::copy(right_kv.1,
+ right_kv.1.offset(count as isize),
+ right_len);
+
+ // Move elements from the left child to the right one.
+ move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
+
+ // Move parent's key/value pair to the right child.
+ move_kv(parent_kv, 0, right_kv, count - 1, 1);
+
+ // Move the left-most stolen pair to the parent.
+ move_kv(left_kv, new_left_len, parent_kv, 0, 1);
+ }
+
+ left_node.reborrow_mut().as_leaf_mut().len -= count as u16;
+ right_node.reborrow_mut().as_leaf_mut().len += count as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+ // Make room for stolen edges.
+ let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+ ptr::copy(right_edges,
+ right_edges.offset(count as isize),
+ right_len + 1);
+ right.correct_childrens_parent_links(count, count + right_len + 1);
+
+ move_edges(left, new_left_len + 1, right, 0, count);
+ },
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
+ _ => { unreachable!(); }
+ }
+ }
+ }
+
+ /// The symmetric clone of `bulk_steal_left`.
+ pub fn bulk_steal_right(&mut self, count: usize) {
+ unsafe {
+ let mut left_node = ptr::read(self).left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = ptr::read(self).right_edge().descend();
+ let right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ debug_assert!(left_len + count <= CAPACITY);
+ debug_assert!(right_len >= count);
+
+ let new_right_len = right_len - count;
+
+ // Move data.
+ {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+ let parent_kv = {
+ let kv = self.reborrow_mut().into_kv_mut();
+ (kv.0 as *mut K, kv.1 as *mut V)
+ };
+
+ // Move parent's key/value pair to the left child.
+ move_kv(parent_kv, 0, left_kv, left_len, 1);
+
+ // Move elements from the right child to the left one.
+ move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
+
+ // Move the right-most stolen pair to the parent.
+ move_kv(right_kv, count - 1, parent_kv, 0, 1);
+
+ // Fix right indexing
+ ptr::copy(right_kv.0.offset(count as isize),
+ right_kv.0,
+ new_right_len);
+ ptr::copy(right_kv.1.offset(count as isize),
+ right_kv.1,
+ new_right_len);
+ }
+
+ left_node.reborrow_mut().as_leaf_mut().len += count as u16;
+ right_node.reborrow_mut().as_leaf_mut().len -= count as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+ move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
+
+ // Fix right indexing.
+ let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+ ptr::copy(right_edges.offset(count as isize),
+ right_edges,
+ new_right_len + 1);
+ right.correct_childrens_parent_links(0, new_right_len + 1);
+ },
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
+ _ => { unreachable!(); }
+ }
+ }
+ }
+}
+
+unsafe fn move_kv<K, V>(
+ source: (*mut K, *mut V), source_offset: usize,
+ dest: (*mut K, *mut V), dest_offset: usize,
+ count: usize)
+{
+ ptr::copy_nonoverlapping(source.0.offset(source_offset as isize),
+ dest.0.offset(dest_offset as isize),
+ count);
+ ptr::copy_nonoverlapping(source.1.offset(source_offset as isize),
+ dest.1.offset(dest_offset as isize),
+ count);
+}
+
+// Source and destination must have the same height.
+unsafe fn move_edges<K, V>(
+ mut source: NodeRef<marker::Mut, K, V, marker::Internal>, source_offset: usize,
+ mut dest: NodeRef<marker::Mut, K, V, marker::Internal>, dest_offset: usize,
+ count: usize)
+{
+ let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
+ let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
+ ptr::copy_nonoverlapping(source_ptr.offset(source_offset as isize),
+ dest_ptr.offset(dest_offset as isize),
+ count);
+ dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
+}
+
+impl<BorrowType, K, V, HandleType>
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
+
+ /// Check whether the underlying node is an `Internal` node or a `Leaf` node.
+ pub fn force(self) -> ForceResult<
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
+ > {
+ match self.node.force() {
+ ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
+ node,
+ idx: self.idx,
+ _marker: PhantomData
+ }),
+ ForceResult::Internal(node) => ForceResult::Internal(Handle {
+ node,
+ idx: self.idx,
+ _marker: PhantomData
+ })
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ /// Move the suffix after `self` from one node to another one. `right` must be empty.
+ /// The first edge of `right` remains unchanged.
+ pub fn move_suffix(&mut self,
+ right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
+ unsafe {
+ let left_new_len = self.idx;
+ let mut left_node = self.reborrow_mut().into_node();
+
+ let right_new_len = left_node.len() - left_new_len;
+ let mut right_node = right.reborrow_mut();
+
+ debug_assert!(right_node.len() == 0);
+ debug_assert!(left_node.height == right_node.height);
+
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+
+
+ move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
+
+ left_node.reborrow_mut().as_leaf_mut().len = left_new_len as u16;
+ right_node.reborrow_mut().as_leaf_mut().len = right_new_len as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(right)) => {
+ move_edges(left, left_new_len + 1, right, 1, right_new_len);
+ },
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
+ _ => { unreachable!(); }
+ }
+ }
+ }
+}
+
+pub enum ForceResult<Leaf, Internal> {
+ Leaf(Leaf),
+ Internal(Internal)
+}
+
+pub enum InsertResult<'a, K, V, Type> {
+ Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
+ Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
+}
+
+pub mod marker {
+ use core::marker::PhantomData;
+
+ pub enum Leaf { }
+ pub enum Internal { }
+ pub enum LeafOrInternal { }
+
+ pub enum Owned { }
+ pub struct Immut<'a>(PhantomData<&'a ()>);
+ pub struct Mut<'a>(PhantomData<&'a mut ()>);
+
+ pub enum KV { }
+ pub enum Edge { }
+}
+
+unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
+ ptr::copy(
+ slice.as_ptr().offset(idx as isize),
+ slice.as_mut_ptr().offset(idx as isize + 1),
+ slice.len() - idx
+ );
+ ptr::write(slice.get_unchecked_mut(idx), val);
+}
+
+unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
+ let ret = ptr::read(slice.get_unchecked(idx));
+ ptr::copy(
+ slice.as_ptr().offset(idx as isize + 1),
+ slice.as_mut_ptr().offset(idx as isize),
+ slice.len() - idx - 1
+ );
+ ret
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use core::cmp::Ordering;
+
+use borrow::Borrow;
+
+use super::node::{Handle, NodeRef, marker};
+
+use super::node::ForceResult::*;
+use self::SearchResult::*;
+
+pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
+ Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
+ GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>)
+}
+
+pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
+ mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ key: &Q
+) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
+ where Q: Ord, K: Borrow<Q> {
+
+ loop {
+ match search_node(node, key) {
+ Found(handle) => return Found(handle),
+ GoDown(handle) => match handle.force() {
+ Leaf(leaf) => return GoDown(leaf),
+ Internal(internal) => {
+ node = internal.descend();
+ continue;
+ }
+ }
+ }
+ }
+}
+
+pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
+ node: NodeRef<BorrowType, K, V, Type>,
+ key: &Q
+) -> SearchResult<BorrowType, K, V, Type, Type>
+ where Q: Ord, K: Borrow<Q> {
+
+ match search_linear(&node, key) {
+ (idx, true) => Found(
+ Handle::new_kv(node, idx)
+ ),
+ (idx, false) => SearchResult::GoDown(
+ Handle::new_edge(node, idx)
+ )
+ }
+}
+
+pub fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
+ node: &NodeRef<BorrowType, K, V, Type>,
+ key: &Q
+) -> (usize, bool)
+ where Q: Ord, K: Borrow<Q> {
+
+ for (i, k) in node.keys().iter().enumerate() {
+ match key.cmp(k.borrow()) {
+ Ordering::Greater => {},
+ Ordering::Equal => return (i, true),
+ Ordering::Less => return (i, false)
+ }
+ }
+ (node.keys().len(), false)
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
+// to TreeMap
+
+use core::cmp::Ordering::{self, Less, Greater, Equal};
+use core::cmp::{min, max};
+use core::fmt::Debug;
+use core::fmt;
+use core::iter::{Peekable, FromIterator, FusedIterator};
+use core::ops::{BitOr, BitAnd, BitXor, Sub, RangeBounds};
+
+use borrow::Borrow;
+use collections::btree_map::{self, BTreeMap, Keys};
+use super::Recover;
+
+// FIXME(conventions): implement bounded iterators
+
+/// A set based on a B-Tree.
+///
+/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
+/// benefits and drawbacks.
+///
+/// It is a logic error for an item to be modified in such a way that the item's ordering relative
+/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`Ord`]: ../../std/cmp/trait.Ord.html
+/// [`Cell`]: ../../std/cell/struct.Cell.html
+/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeSet;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BTreeSet<&str>` in this example).
+/// let mut books = BTreeSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons");
+/// books.insert("To Kill a Mockingbird");
+/// books.insert("The Odyssey");
+/// books.insert("The Great Gatsby");
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+/// println!("We have {} books, but The Winds of Winter ain't one.",
+/// books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+/// println!("{}", book);
+/// }
+/// ```
+#[derive(Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BTreeSet<T> {
+ map: BTreeMap<T, ()>,
+}
+
+/// An iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`iter`]: struct.BTreeSet.html#method.iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ iter: Keys<'a, T, ()>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Iter")
+ .field(&self.iter.clone())
+ .finish()
+ }
+}
+
+/// An owning iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`][`BTreeSet`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`into_iter`]: struct.BTreeSet.html#method.into_iter
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IntoIter<T> {
+ iter: btree_map::IntoIter<T, ()>,
+}
+
+/// An iterator over a sub-range of items in a `BTreeSet`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`range`]: struct.BTreeSet.html#method.range
+#[derive(Debug)]
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, T: 'a> {
+ iter: btree_map::Range<'a, T, ()>,
+}
+
+/// A lazy iterator producing elements in the difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`difference`]: struct.BTreeSet.html#method.difference
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Difference<'a, T: 'a> {
+ a: Peekable<Iter<'a, T>>,
+ b: Peekable<Iter<'a, T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Difference<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Difference")
+ .field(&self.a)
+ .field(&self.b)
+ .finish()
+ }
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`BTreeSet`]. See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct SymmetricDifference<'a, T: 'a> {
+ a: Peekable<Iter<'a, T>>,
+ b: Peekable<Iter<'a, T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for SymmetricDifference<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("SymmetricDifference")
+ .field(&self.a)
+ .field(&self.b)
+ .finish()
+ }
+}
+
+/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`intersection`]: struct.BTreeSet.html#method.intersection
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Intersection<'a, T: 'a> {
+ a: Peekable<Iter<'a, T>>,
+ b: Peekable<Iter<'a, T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Intersection<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Intersection")
+ .field(&self.a)
+ .field(&self.b)
+ .finish()
+ }
+}
+
+/// A lazy iterator producing elements in the union of `BTreeSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`union`]: struct.BTreeSet.html#method.union
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Union<'a, T: 'a> {
+ a: Peekable<Iter<'a, T>>,
+ b: Peekable<Iter<'a, T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Union<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Union")
+ .field(&self.a)
+ .field(&self.b)
+ .finish()
+ }
+}
+
+impl<T: Ord> BTreeSet<T> {
+ /// Makes a new `BTreeSet` with a reasonable choice of B.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<i32> = BTreeSet::new();
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> BTreeSet<T> {
+ BTreeSet { map: BTreeMap::new() }
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the set.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(3);
+ /// set.insert(5);
+ /// set.insert(8);
+ /// for &elem in set.range((Included(&4), Included(&8))) {
+ /// println!("{}", elem);
+ /// }
+ /// assert_eq!(Some(&5), set.range(4..).next());
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range<K: ?Sized, R>(&self, range: R) -> Range<T>
+ where K: Ord, T: Borrow<K>, R: RangeBounds<K>
+ {
+ Range { iter: self.map.range(range) }
+ }
+
+ /// Visits the values representing the difference,
+ /// i.e. the values that are in `self` but not in `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let diff: Vec<_> = a.difference(&b).cloned().collect();
+ /// assert_eq!(diff, [1]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
+ Difference {
+ a: self.iter().peekable(),
+ b: other.iter().peekable(),
+ }
+ }
+
+ /// Visits the values representing the symmetric difference,
+ /// i.e. the values that are in `self` or in `other` but not in both,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
+ /// assert_eq!(sym_diff, [1, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn symmetric_difference<'a>(&'a self,
+ other: &'a BTreeSet<T>)
+ -> SymmetricDifference<'a, T> {
+ SymmetricDifference {
+ a: self.iter().peekable(),
+ b: other.iter().peekable(),
+ }
+ }
+
+ /// Visits the values representing the intersection,
+ /// i.e. the values that are both in `self` and `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
+ /// assert_eq!(intersection, [2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T> {
+ Intersection {
+ a: self.iter().peekable(),
+ b: other.iter().peekable(),
+ }
+ }
+
+ /// Visits the values representing the union,
+ /// i.e. all the values in `self` or `other`, without duplicates,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ ///
+ /// let union: Vec<_> = a.union(&b).cloned().collect();
+ /// assert_eq!(union, [1, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
+ Union {
+ a: self.iter().peekable(),
+ b: other.iter().peekable(),
+ }
+ }
+
+ /// Clears the set, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// v.insert(1);
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ self.map.clear()
+ }
+
+ /// Returns `true` if the set contains a value.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.contains(&1), true);
+ /// assert_eq!(set.contains(&4), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+ where T: Borrow<Q>,
+ Q: Ord
+ {
+ self.map.contains_key(value)
+ }
+
+ /// Returns a reference to the value in the set, if any, that is equal to the given value.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.get(&2), Some(&2));
+ /// assert_eq!(set.get(&4), None);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+ where T: Borrow<Q>,
+ Q: Ord
+ {
+ Recover::get(&self.map, value)
+ }
+
+ /// Returns `true` if `self` has no elements in common with `other`.
+ /// This is equivalent to checking for an empty intersection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// let mut b = BTreeSet::new();
+ ///
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(4);
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(1);
+ /// assert_eq!(a.is_disjoint(&b), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool {
+ self.intersection(other).next().is_none()
+ }
+
+ /// Returns `true` if the set is a subset of another,
+ /// i.e. `other` contains at least all the values in `self`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(2);
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(4);
+ /// assert_eq!(set.is_subset(&sup), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_subset(&self, other: &BTreeSet<T>) -> bool {
+ // Stolen from TreeMap
+ let mut x = self.iter();
+ let mut y = other.iter();
+ let mut a = x.next();
+ let mut b = y.next();
+ while a.is_some() {
+ if b.is_none() {
+ return false;
+ }
+
+ let a1 = a.unwrap();
+ let b1 = b.unwrap();
+
+ match b1.cmp(a1) {
+ Less => (),
+ Greater => return false,
+ Equal => a = x.next(),
+ }
+
+ b = y.next();
+ }
+ true
+ }
+
+ /// Returns `true` if the set is a superset of another,
+ /// i.e. `self` contains at least all the values in `other`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect();
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(0);
+ /// set.insert(1);
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.is_superset(&sub), true);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_superset(&self, other: &BTreeSet<T>) -> bool {
+ other.is_subset(self)
+ }
+
+ /// Adds a value to the set.
+ ///
+ /// If the set did not have this value present, `true` is returned.
+ ///
+ /// If the set did have this value present, `false` is returned, and the
+ /// entry is not updated. See the [module-level documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.insert(2), true);
+ /// assert_eq!(set.insert(2), false);
+ /// assert_eq!(set.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: T) -> bool {
+ self.map.insert(value, ()).is_none()
+ }
+
+ /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
+ /// one. Returns the replaced value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(Vec::<i32>::new());
+ ///
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+ /// set.replace(Vec::with_capacity(10));
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn replace(&mut self, value: T) -> Option<T> {
+ Recover::replace(&mut self.map, value)
+ }
+
+ /// Removes a value from the set. Returns `true` if the value was
+ /// present in the set.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.remove(&2), true);
+ /// assert_eq!(set.remove(&2), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where T: Borrow<Q>,
+ Q: Ord
+ {
+ self.map.remove(value).is_some()
+ }
+
+ /// Removes and returns the value in the set, if any, that is equal to the given one.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.take(&2), Some(2));
+ /// assert_eq!(set.take(&2), None);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+ where T: Borrow<Q>,
+ Q: Ord
+ {
+ Recover::take(&mut self.map, value)
+ }
+
+ /// Moves all elements from `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(3);
+ /// b.insert(4);
+ /// b.insert(5);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ /// assert!(a.contains(&3));
+ /// assert!(a.contains(&4));
+ /// assert!(a.contains(&5));
+ /// ```
+ #[stable(feature = "btree_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ self.map.append(&mut other.map);
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ /// a.insert(17);
+ /// a.insert(41);
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ ///
+ /// assert!(b.contains(&3));
+ /// assert!(b.contains(&17));
+ /// assert!(b.contains(&41));
+ /// ```
+ #[stable(feature = "btree_split_off", since = "1.11.0")]
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where T: Borrow<Q> {
+ BTreeSet { map: self.map.split_off(key) }
+ }
+}
+
+impl<T> BTreeSet<T> {
+ /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ ///
+ /// Values returned by the iterator are returned in ascending order:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<T> {
+ Iter { iter: self.map.keys() }
+ }
+
+ /// Returns the number of elements in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert_eq!(v.len(), 0);
+ /// v.insert(1);
+ /// assert_eq!(v.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ /// Returns `true` if the set contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert!(v.is_empty());
+ /// v.insert(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BTreeSet<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
+ let mut set = BTreeSet::new();
+ set.extend(iter);
+ set
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for BTreeSet<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Gets an iterator for moving out the `BTreeSet`'s contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect();
+ ///
+ /// let v: Vec<_> = set.into_iter().collect();
+ /// assert_eq!(v, [1, 2, 3, 4]);
+ /// ```
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { iter: self.map.into_iter() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a BTreeSet<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Extend<T> for BTreeSet<T> {
+ #[inline]
+ fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
+ for elem in iter {
+ self.insert(elem);
+ }
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Default for BTreeSet<T> {
+ /// Makes an empty `BTreeSet<T>` with a reasonable choice of B.
+ fn default() -> BTreeSet<T> {
+ BTreeSet::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b, T: Ord + Clone> Sub<&'b BTreeSet<T>> for &'a BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+ ///
+ /// let result = &a - &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 2]);
+ /// ```
+ fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.difference(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b, T: Ord + Clone> BitXor<&'b BTreeSet<T>> for &'a BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+ ///
+ /// let result = &a ^ &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 4]);
+ /// ```
+ fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.symmetric_difference(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b, T: Ord + Clone> BitAnd<&'b BTreeSet<T>> for &'a BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+ ///
+ /// let result = &a & &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [2, 3]);
+ /// ```
+ fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.intersection(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b, T: Ord + Clone> BitOr<&'b BTreeSet<T>> for &'a BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+ ///
+ /// let result = &a | &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
+ /// ```
+ fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.union(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Debug> Debug for BTreeSet<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Iter<'a, T> {
+ Iter { iter: self.iter.clone() }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {
+ fn len(&self) -> usize { self.iter.len() }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for Iter<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn len(&self) -> usize { self.iter.len() }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> Clone for Range<'a, T> {
+ fn clone(&self) -> Range<'a, T> {
+ Range { iter: self.iter.clone() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> Iterator for Range<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> DoubleEndedIterator for Range<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for Range<'a, T> {}
+
+/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None
+fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering {
+ match (x, y) {
+ (None, _) => short,
+ (_, None) => long,
+ (Some(x1), Some(y1)) => x1.cmp(y1),
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Difference<'a, T> {
+ fn clone(&self) -> Difference<'a, T> {
+ Difference {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Difference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ match cmp_opt(self.a.peek(), self.b.peek(), Less, Less) {
+ Less => return self.a.next(),
+ Equal => {
+ self.a.next();
+ self.b.next();
+ }
+ Greater => {
+ self.b.next();
+ }
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let a_len = self.a.len();
+ let b_len = self.b.len();
+ (a_len.saturating_sub(b_len), Some(a_len))
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T: Ord> FusedIterator for Difference<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for SymmetricDifference<'a, T> {
+ fn clone(&self) -> SymmetricDifference<'a, T> {
+ SymmetricDifference {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
+ Less => return self.a.next(),
+ Equal => {
+ self.a.next();
+ self.b.next();
+ }
+ Greater => return self.b.next(),
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.a.len() + self.b.len()))
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T: Ord> FusedIterator for SymmetricDifference<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Intersection<'a, T> {
+ fn clone(&self) -> Intersection<'a, T> {
+ Intersection {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Intersection<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ match Ord::cmp(self.a.peek()?, self.b.peek()?) {
+ Less => {
+ self.a.next();
+ }
+ Equal => {
+ self.b.next();
+ return self.a.next();
+ }
+ Greater => {
+ self.b.next();
+ }
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(min(self.a.len(), self.b.len())))
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T: Ord> FusedIterator for Intersection<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Union<'a, T> {
+ fn clone(&self) -> Union<'a, T> {
+ Union {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Union<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
+ Less => self.a.next(),
+ Equal => {
+ self.b.next();
+ self.a.next()
+ }
+ Greater => self.b.next(),
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let a_len = self.a.len();
+ let b_len = self.b.len();
+ (max(a_len, b_len), Some(a_len + b_len))
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T: Ord> FusedIterator for Union<'a, T> {}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A doubly-linked list with owned nodes.
+//!
+//! The `LinkedList` allows pushing and popping elements at either end
+//! in constant time.
+//!
+//! Almost always it is better to use `Vec` or [`VecDeque`] instead of
+//! [`LinkedList`]. In general, array-based containers are faster,
+//! more memory efficient and make better use of CPU cache.
+//!
+//! [`LinkedList`]: ../linked_list/struct.LinkedList.html
+//! [`VecDeque`]: ../vec_deque/struct.VecDeque.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::Ordering;
+use core::fmt;
+use core::hash::{Hasher, Hash};
+use core::iter::{FromIterator, FusedIterator};
+use core::marker::PhantomData;
+use core::mem;
+use core::ptr::NonNull;
+
+use boxed::Box;
+use super::SpecExtend;
+
+/// A doubly-linked list with owned nodes.
+///
+/// The `LinkedList` allows pushing and popping elements at either end
+/// in constant time.
+///
+/// Almost always it is better to use `Vec` or `VecDeque` instead of
+/// `LinkedList`. In general, array-based containers are faster,
+/// more memory efficient and make better use of CPU cache.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct LinkedList<T> {
+ head: Option<NonNull<Node<T>>>,
+ tail: Option<NonNull<Node<T>>>,
+ len: usize,
+ marker: PhantomData<Box<Node<T>>>,
+}
+
+struct Node<T> {
+ next: Option<NonNull<Node<T>>>,
+ prev: Option<NonNull<Node<T>>>,
+ element: T,
+}
+
+/// An iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`iter`] method on [`LinkedList`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.LinkedList.html#method.iter
+/// [`LinkedList`]: struct.LinkedList.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ head: Option<NonNull<Node<T>>>,
+ tail: Option<NonNull<Node<T>>>,
+ len: usize,
+ marker: PhantomData<&'a Node<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Iter")
+ .field(&self.len)
+ .finish()
+ }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Self {
+ Iter { ..*self }
+ }
+}
+
+/// A mutable iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`LinkedList`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.LinkedList.html#method.iter_mut
+/// [`LinkedList`]: struct.LinkedList.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, T: 'a> {
+ list: &'a mut LinkedList<T>,
+ head: Option<NonNull<Node<T>>>,
+ tail: Option<NonNull<Node<T>>>,
+ len: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("IterMut")
+ .field(&self.list)
+ .field(&self.len)
+ .finish()
+ }
+}
+
+/// An owning iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`LinkedList`][`LinkedList`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.LinkedList.html#method.into_iter
+/// [`LinkedList`]: struct.LinkedList.html
+#[derive(Clone)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+ list: LinkedList<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("IntoIter")
+ .field(&self.list)
+ .finish()
+ }
+}
+
+impl<T> Node<T> {
+ fn new(element: T) -> Self {
+ Node {
+ next: None,
+ prev: None,
+ element,
+ }
+ }
+
+ fn into_element(self: Box<Self>) -> T {
+ self.element
+ }
+}
+
+// private methods
+impl<T> LinkedList<T> {
+ /// Adds the given node to the front of the list.
+ #[inline]
+ fn push_front_node(&mut self, mut node: Box<Node<T>>) {
+ unsafe {
+ node.next = self.head;
+ node.prev = None;
+ let node = Some(Box::into_raw_non_null(node));
+
+ match self.head {
+ None => self.tail = node,
+ Some(mut head) => head.as_mut().prev = node,
+ }
+
+ self.head = node;
+ self.len += 1;
+ }
+ }
+
+ /// Removes and returns the node at the front of the list.
+ #[inline]
+ fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
+ self.head.map(|node| unsafe {
+ let node = Box::from_raw(node.as_ptr());
+ self.head = node.next;
+
+ match self.head {
+ None => self.tail = None,
+ Some(mut head) => head.as_mut().prev = None,
+ }
+
+ self.len -= 1;
+ node
+ })
+ }
+
+ /// Adds the given node to the back of the list.
+ #[inline]
+ fn push_back_node(&mut self, mut node: Box<Node<T>>) {
+ unsafe {
+ node.next = None;
+ node.prev = self.tail;
+ let node = Some(Box::into_raw_non_null(node));
+
+ match self.tail {
+ None => self.head = node,
+ Some(mut tail) => tail.as_mut().next = node,
+ }
+
+ self.tail = node;
+ self.len += 1;
+ }
+ }
+
+ /// Removes and returns the node at the back of the list.
+ #[inline]
+ fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
+ self.tail.map(|node| unsafe {
+ let node = Box::from_raw(node.as_ptr());
+ self.tail = node.prev;
+
+ match self.tail {
+ None => self.head = None,
+ Some(mut tail) => tail.as_mut().next = None,
+ }
+
+ self.len -= 1;
+ node
+ })
+ }
+
+ /// Unlinks the specified node from the current list.
+ ///
+ /// Warning: this will not check that the provided node belongs to the current list.
+ #[inline]
+ unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
+ let node = node.as_mut();
+
+ match node.prev {
+ Some(mut prev) => prev.as_mut().next = node.next.clone(),
+ // this node is the head node
+ None => self.head = node.next.clone(),
+ };
+
+ match node.next {
+ Some(mut next) => next.as_mut().prev = node.prev.clone(),
+ // this node is the tail node
+ None => self.tail = node.prev.clone(),
+ };
+
+ self.len -= 1;
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for LinkedList<T> {
+ /// Creates an empty `LinkedList<T>`.
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<T> LinkedList<T> {
+ /// Creates an empty `LinkedList`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let list: LinkedList<u32> = LinkedList::new();
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> Self {
+ LinkedList {
+ head: None,
+ tail: None,
+ len: 0,
+ marker: PhantomData,
+ }
+ }
+
+ /// Moves all elements from `other` to the end of the list.
+ ///
+ /// This reuses all the nodes from `other` and moves them into `self`. After
+ /// this operation, `other` becomes empty.
+ ///
+ /// This operation should compute in O(1) time and O(1) memory.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list1 = LinkedList::new();
+ /// list1.push_back('a');
+ ///
+ /// let mut list2 = LinkedList::new();
+ /// list2.push_back('b');
+ /// list2.push_back('c');
+ ///
+ /// list1.append(&mut list2);
+ ///
+ /// let mut iter = list1.iter();
+ /// assert_eq!(iter.next(), Some(&'a'));
+ /// assert_eq!(iter.next(), Some(&'b'));
+ /// assert_eq!(iter.next(), Some(&'c'));
+ /// assert!(iter.next().is_none());
+ ///
+ /// assert!(list2.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ match self.tail {
+ None => mem::swap(self, other),
+ Some(mut tail) => {
+ if let Some(mut other_head) = other.head.take() {
+ unsafe {
+ tail.as_mut().next = Some(other_head);
+ other_head.as_mut().prev = Some(tail);
+ }
+
+ self.tail = other.tail.take();
+ self.len += mem::replace(&mut other.len, 0);
+ }
+ }
+ }
+ }
+
+ /// Provides a forward iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<u32> = LinkedList::new();
+ ///
+ /// list.push_back(0);
+ /// list.push_back(1);
+ /// list.push_back(2);
+ ///
+ /// let mut iter = list.iter();
+ /// assert_eq!(iter.next(), Some(&0));
+ /// assert_eq!(iter.next(), Some(&1));
+ /// assert_eq!(iter.next(), Some(&2));
+ /// assert_eq!(iter.next(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<T> {
+ Iter {
+ head: self.head,
+ tail: self.tail,
+ len: self.len,
+ marker: PhantomData,
+ }
+ }
+
+ /// Provides a forward iterator with mutable references.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<u32> = LinkedList::new();
+ ///
+ /// list.push_back(0);
+ /// list.push_back(1);
+ /// list.push_back(2);
+ ///
+ /// for element in list.iter_mut() {
+ /// *element += 10;
+ /// }
+ ///
+ /// let mut iter = list.iter();
+ /// assert_eq!(iter.next(), Some(&10));
+ /// assert_eq!(iter.next(), Some(&11));
+ /// assert_eq!(iter.next(), Some(&12));
+ /// assert_eq!(iter.next(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<T> {
+ IterMut {
+ head: self.head,
+ tail: self.tail,
+ len: self.len,
+ list: self,
+ }
+ }
+
+ /// Returns `true` if the `LinkedList` is empty.
+ ///
+ /// This operation should compute in O(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert!(dl.is_empty());
+ ///
+ /// dl.push_front("foo");
+ /// assert!(!dl.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.head.is_none()
+ }
+
+ /// Returns the length of the `LinkedList`.
+ ///
+ /// This operation should compute in O(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ ///
+ /// dl.push_front(2);
+ /// assert_eq!(dl.len(), 1);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.len(), 2);
+ ///
+ /// dl.push_back(3);
+ /// assert_eq!(dl.len(), 3);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.len
+ }
+
+ /// Removes all elements from the `LinkedList`.
+ ///
+ /// This operation should compute in O(n) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ ///
+ /// dl.push_front(2);
+ /// dl.push_front(1);
+ /// assert_eq!(dl.len(), 2);
+ /// assert_eq!(dl.front(), Some(&1));
+ ///
+ /// dl.clear();
+ /// assert_eq!(dl.len(), 0);
+ /// assert_eq!(dl.front(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ *self = Self::new();
+ }
+
+ /// Returns `true` if the `LinkedList` contains an element equal to the
+ /// given value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<u32> = LinkedList::new();
+ ///
+ /// list.push_back(0);
+ /// list.push_back(1);
+ /// list.push_back(2);
+ ///
+ /// assert_eq!(list.contains(&0), true);
+ /// assert_eq!(list.contains(&10), false);
+ /// ```
+ #[stable(feature = "linked_list_contains", since = "1.12.0")]
+ pub fn contains(&self, x: &T) -> bool
+ where T: PartialEq<T>
+ {
+ self.iter().any(|e| e == x)
+ }
+
+ /// Provides a reference to the front element, or `None` if the list is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.front(), None);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.front(), Some(&1));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front(&self) -> Option<&T> {
+ unsafe {
+ self.head.as_ref().map(|node| &node.as_ref().element)
+ }
+ }
+
+ /// Provides a mutable reference to the front element, or `None` if the list
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.front(), None);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.front(), Some(&1));
+ ///
+ /// match dl.front_mut() {
+ /// None => {},
+ /// Some(x) => *x = 5,
+ /// }
+ /// assert_eq!(dl.front(), Some(&5));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front_mut(&mut self) -> Option<&mut T> {
+ unsafe {
+ self.head.as_mut().map(|node| &mut node.as_mut().element)
+ }
+ }
+
+ /// Provides a reference to the back element, or `None` if the list is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.back(), None);
+ ///
+ /// dl.push_back(1);
+ /// assert_eq!(dl.back(), Some(&1));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back(&self) -> Option<&T> {
+ unsafe {
+ self.tail.as_ref().map(|node| &node.as_ref().element)
+ }
+ }
+
+ /// Provides a mutable reference to the back element, or `None` if the list
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.back(), None);
+ ///
+ /// dl.push_back(1);
+ /// assert_eq!(dl.back(), Some(&1));
+ ///
+ /// match dl.back_mut() {
+ /// None => {},
+ /// Some(x) => *x = 5,
+ /// }
+ /// assert_eq!(dl.back(), Some(&5));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back_mut(&mut self) -> Option<&mut T> {
+ unsafe {
+ self.tail.as_mut().map(|node| &mut node.as_mut().element)
+ }
+ }
+
+ /// Adds an element first in the list.
+ ///
+ /// This operation should compute in O(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ ///
+ /// dl.push_front(2);
+ /// assert_eq!(dl.front().unwrap(), &2);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.front().unwrap(), &1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_front(&mut self, elt: T) {
+ self.push_front_node(box Node::new(elt));
+ }
+
+ /// Removes the first element and returns it, or `None` if the list is
+ /// empty.
+ ///
+ /// This operation should compute in O(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ /// assert_eq!(d.pop_front(), None);
+ ///
+ /// d.push_front(1);
+ /// d.push_front(3);
+ /// assert_eq!(d.pop_front(), Some(3));
+ /// assert_eq!(d.pop_front(), Some(1));
+ /// assert_eq!(d.pop_front(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_front(&mut self) -> Option<T> {
+ self.pop_front_node().map(Node::into_element)
+ }
+
+ /// Appends an element to the back of a list
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ /// d.push_back(1);
+ /// d.push_back(3);
+ /// assert_eq!(3, *d.back().unwrap());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_back(&mut self, elt: T) {
+ self.push_back_node(box Node::new(elt));
+ }
+
+ /// Removes the last element from a list and returns it, or `None` if
+ /// it is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ /// assert_eq!(d.pop_back(), None);
+ /// d.push_back(1);
+ /// d.push_back(3);
+ /// assert_eq!(d.pop_back(), Some(3));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_back(&mut self) -> Option<T> {
+ self.pop_back_node().map(Node::into_element)
+ }
+
+ /// Splits the list into two at the given index. Returns everything after the given index,
+ /// including the index.
+ ///
+ /// This operation should compute in O(n) time.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ ///
+ /// d.push_front(1);
+ /// d.push_front(2);
+ /// d.push_front(3);
+ ///
+ /// let mut splitted = d.split_off(2);
+ ///
+ /// assert_eq!(splitted.pop_front(), Some(1));
+ /// assert_eq!(splitted.pop_front(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn split_off(&mut self, at: usize) -> LinkedList<T> {
+ let len = self.len();
+ assert!(at <= len, "Cannot split off at a nonexistent index");
+ if at == 0 {
+ return mem::replace(self, Self::new());
+ } else if at == len {
+ return Self::new();
+ }
+
+ // Below, we iterate towards the `i-1`th node, either from the start or the end,
+ // depending on which would be faster.
+ let split_node = if at - 1 <= len - 1 - (at - 1) {
+ let mut iter = self.iter_mut();
+ // instead of skipping using .skip() (which creates a new struct),
+ // we skip manually so we can access the head field without
+ // depending on implementation details of Skip
+ for _ in 0..at - 1 {
+ iter.next();
+ }
+ iter.head
+ } else {
+ // better off starting from the end
+ let mut iter = self.iter_mut();
+ for _ in 0..len - 1 - (at - 1) {
+ iter.next_back();
+ }
+ iter.tail
+ };
+
+ // The split node is the new tail node of the first part and owns
+ // the head of the second part.
+ let second_part_head;
+
+ unsafe {
+ second_part_head = split_node.unwrap().as_mut().next.take();
+ if let Some(mut head) = second_part_head {
+ head.as_mut().prev = None;
+ }
+ }
+
+ let second_part = LinkedList {
+ head: second_part_head,
+ tail: self.tail,
+ len: len - at,
+ marker: PhantomData,
+ };
+
+ // Fix the tail ptr of the first part
+ self.tail = split_node;
+ self.len = at;
+
+ second_part
+ }
+
+ /// Creates an iterator which uses a closure to determine if an element should be removed.
+ ///
+ /// If the closure returns true, then the element is removed and yielded.
+ /// If the closure returns false, the element will remain in the list and will not be yielded
+ /// by the iterator.
+ ///
+ /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
+ /// whether you choose to keep or remove it.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a list into evens and odds, reusing the original list:
+ ///
+ /// ```
+ /// #![feature(drain_filter)]
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut numbers: LinkedList<u32> = LinkedList::new();
+ /// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
+ ///
+ /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
+ /// let odds = numbers;
+ ///
+ /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
+ /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
+ /// ```
+ #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+ pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<T, F>
+ where F: FnMut(&mut T) -> bool
+ {
+ // avoid borrow issues.
+ let it = self.head;
+ let old_len = self.len;
+
+ DrainFilter {
+ list: self,
+ it: it,
+ pred: filter,
+ idx: 0,
+ old_len: old_len,
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for LinkedList<T> {
+ fn drop(&mut self) {
+ while let Some(_) = self.pop_front_node() {}
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.head.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &*node.as_ptr();
+ self.len -= 1;
+ self.head = node.next;
+ &node.element
+ })
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len, Some(self.len))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.tail.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &*node.as_ptr();
+ self.len -= 1;
+ self.tail = node.prev;
+ &node.element
+ })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for Iter<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for IterMut<'a, T> {
+ type Item = &'a mut T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a mut T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.head.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &mut *node.as_ptr();
+ self.len -= 1;
+ self.head = node.next;
+ &mut node.element
+ })
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len, Some(self.len))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a mut T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.tail.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &mut *node.as_ptr();
+ self.len -= 1;
+ self.tail = node.prev;
+ &mut node.element
+ })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for IterMut<'a, T> {}
+
+impl<'a, T> IterMut<'a, T> {
+ /// Inserts the given element just after the element most recently returned by `.next()`.
+ /// The inserted element does not appear in the iteration.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(linked_list_extras)]
+ ///
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<_> = vec![1, 3, 4].into_iter().collect();
+ ///
+ /// {
+ /// let mut it = list.iter_mut();
+ /// assert_eq!(it.next().unwrap(), &1);
+ /// // insert `2` after `1`
+ /// it.insert_next(2);
+ /// }
+ /// {
+ /// let vec: Vec<_> = list.into_iter().collect();
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// }
+ /// ```
+ #[inline]
+ #[unstable(feature = "linked_list_extras",
+ reason = "this is probably better handled by a cursor type -- we'll see",
+ issue = "27794")]
+ pub fn insert_next(&mut self, element: T) {
+ match self.head {
+ None => self.list.push_back(element),
+ Some(mut head) => unsafe {
+ let mut prev = match head.as_ref().prev {
+ None => return self.list.push_front(element),
+ Some(prev) => prev,
+ };
+
+ let node = Some(Box::into_raw_non_null(box Node {
+ next: Some(head),
+ prev: Some(prev),
+ element,
+ }));
+
+ prev.as_mut().next = node;
+ head.as_mut().prev = node;
+
+ self.list.len += 1;
+ },
+ }
+ }
+
+ /// Provides a reference to the next element, without changing the iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(linked_list_extras)]
+ ///
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<_> = vec![1, 2, 3].into_iter().collect();
+ ///
+ /// let mut it = list.iter_mut();
+ /// assert_eq!(it.next().unwrap(), &1);
+ /// assert_eq!(it.peek_next().unwrap(), &2);
+ /// // We just peeked at 2, so it was not consumed from the iterator.
+ /// assert_eq!(it.next().unwrap(), &2);
+ /// ```
+ #[inline]
+ #[unstable(feature = "linked_list_extras",
+ reason = "this is probably better handled by a cursor type -- we'll see",
+ issue = "27794")]
+ pub fn peek_next(&mut self) -> Option<&mut T> {
+ if self.len == 0 {
+ None
+ } else {
+ unsafe {
+ self.head.as_mut().map(|node| &mut node.as_mut().element)
+ }
+ }
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on LinkedList.
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+pub struct DrainFilter<'a, T: 'a, F: 'a>
+ where F: FnMut(&mut T) -> bool,
+{
+ list: &'a mut LinkedList<T>,
+ it: Option<NonNull<Node<T>>>,
+ pred: F,
+ idx: usize,
+ old_len: usize,
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<'a, T, F> Iterator for DrainFilter<'a, T, F>
+ where F: FnMut(&mut T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ while let Some(mut node) = self.it {
+ unsafe {
+ self.it = node.as_ref().next;
+ self.idx += 1;
+
+ if (self.pred)(&mut node.as_mut().element) {
+ self.list.unlink_node(node);
+ return Some(Box::from_raw(node.as_ptr()).element);
+ }
+ }
+ }
+
+ None
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.old_len - self.idx))
+ }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<'a, T, F> Drop for DrainFilter<'a, T, F>
+ where F: FnMut(&mut T) -> bool,
+{
+ fn drop(&mut self) {
+ self.for_each(drop);
+ }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<'a, T: 'a + fmt::Debug, F> fmt::Debug for DrainFilter<'a, T, F>
+ where F: FnMut(&mut T) -> bool
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("DrainFilter")
+ .field(&self.list)
+ .finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.list.pop_front()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.list.len, Some(self.list.len))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.list.pop_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> FromIterator<T> for LinkedList<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
+ let mut list = Self::new();
+ list.extend(iter);
+ list
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for LinkedList<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Consumes the list into an iterator yielding elements by value.
+ #[inline]
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { list: self }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a LinkedList<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut LinkedList<T> {
+ type Item = &'a mut T;
+ type IntoIter = IterMut<'a, T>;
+
+ fn into_iter(self) -> IterMut<'a, T> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Extend<T> for LinkedList<T> {
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ <Self as SpecExtend<I>>::spec_extend(self, iter);
+ }
+}
+
+impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
+ default fn spec_extend(&mut self, iter: I) {
+ for elt in iter {
+ self.push_back(elt);
+ }
+ }
+}
+
+impl<T> SpecExtend<LinkedList<T>> for LinkedList<T> {
+ fn spec_extend(&mut self, ref mut other: LinkedList<T>) {
+ self.append(other);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialEq> PartialEq for LinkedList<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().eq(other)
+ }
+
+ fn ne(&self, other: &Self) -> bool {
+ self.len() != other.len() || self.iter().ne(other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Eq> Eq for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialOrd> PartialOrd for LinkedList<T> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.iter().partial_cmp(other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Ord for LinkedList<T> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.iter().cmp(other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for LinkedList<T> {
+ fn clone(&self) -> Self {
+ self.iter().cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for LinkedList<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Hash> Hash for LinkedList<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+ for elt in self {
+ elt.hash(state);
+ }
+ }
+}
+
+// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
+ x
+ }
+ fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
+ x
+ }
+ fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
+ x
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
+
+#[cfg(test)]
+mod tests {
+ use std::thread;
+ use std::vec::Vec;
+
+ use rand::{thread_rng, Rng};
+
+ use super::{LinkedList, Node};
+
+ #[cfg(test)]
+ fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
+ v.iter().cloned().collect()
+ }
+
+ pub fn check_links<T>(list: &LinkedList<T>) {
+ unsafe {
+ let mut len = 0;
+ let mut last_ptr: Option<&Node<T>> = None;
+ let mut node_ptr: &Node<T>;
+ match list.head {
+ None => {
+ // tail node should also be None.
+ assert!(list.tail.is_none());
+ assert_eq!(0, list.len);
+ return;
+ }
+ Some(node) => node_ptr = &*node.as_ptr(),
+ }
+ loop {
+ match (last_ptr, node_ptr.prev) {
+ (None, None) => {}
+ (None, _) => panic!("prev link for head"),
+ (Some(p), Some(pptr)) => {
+ assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
+ }
+ _ => panic!("prev link is none, not good"),
+ }
+ match node_ptr.next {
+ Some(next) => {
+ last_ptr = Some(node_ptr);
+ node_ptr = &*next.as_ptr();
+ len += 1;
+ }
+ None => {
+ len += 1;
+ break;
+ }
+ }
+ }
+
+ // verify that the tail node points to the last node.
+ let tail = list.tail.as_ref().expect("some tail node").as_ref();
+ assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
+ // check that len matches interior links.
+ assert_eq!(len, list.len);
+ }
+ }
+
+ #[test]
+ fn test_append() {
+ // Empty to empty
+ {
+ let mut m = LinkedList::<i32>::new();
+ let mut n = LinkedList::new();
+ m.append(&mut n);
+ check_links(&m);
+ assert_eq!(m.len(), 0);
+ assert_eq!(n.len(), 0);
+ }
+ // Non-empty to empty
+ {
+ let mut m = LinkedList::new();
+ let mut n = LinkedList::new();
+ n.push_back(2);
+ m.append(&mut n);
+ check_links(&m);
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.pop_back(), Some(2));
+ assert_eq!(n.len(), 0);
+ check_links(&m);
+ }
+ // Empty to non-empty
+ {
+ let mut m = LinkedList::new();
+ let mut n = LinkedList::new();
+ m.push_back(2);
+ m.append(&mut n);
+ check_links(&m);
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.pop_back(), Some(2));
+ check_links(&m);
+ }
+
+ // Non-empty to non-empty
+ let v = vec![1, 2, 3, 4, 5];
+ let u = vec![9, 8, 1, 2, 3, 4, 5];
+ let mut m = list_from(&v);
+ let mut n = list_from(&u);
+ m.append(&mut n);
+ check_links(&m);
+ let mut sum = v;
+ sum.extend_from_slice(&u);
+ assert_eq!(sum.len(), m.len());
+ for elt in sum {
+ assert_eq!(m.pop_front(), Some(elt))
+ }
+ assert_eq!(n.len(), 0);
+ // let's make sure it's working properly, since we
+ // did some direct changes to private members
+ n.push_back(3);
+ assert_eq!(n.len(), 1);
+ assert_eq!(n.pop_front(), Some(3));
+ check_links(&n);
+ }
+
+ #[test]
+ fn test_insert_prev() {
+ let mut m = list_from(&[0, 2, 4, 6, 8]);
+ let len = m.len();
+ {
+ let mut it = m.iter_mut();
+ it.insert_next(-2);
+ loop {
+ match it.next() {
+ None => break,
+ Some(elt) => {
+ it.insert_next(*elt + 1);
+ match it.peek_next() {
+ Some(x) => assert_eq!(*x, *elt + 2),
+ None => assert_eq!(8, *elt),
+ }
+ }
+ }
+ }
+ it.insert_next(0);
+ it.insert_next(1);
+ }
+ check_links(&m);
+ assert_eq!(m.len(), 3 + len * 2);
+ assert_eq!(m.into_iter().collect::<Vec<_>>(),
+ [-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]);
+ }
+
+ #[test]
+ #[cfg_attr(target_os = "emscripten", ignore)]
+ fn test_send() {
+ let n = list_from(&[1, 2, 3]);
+ thread::spawn(move || {
+ check_links(&n);
+ let a: &[_] = &[&1, &2, &3];
+ assert_eq!(a, &*n.iter().collect::<Vec<_>>());
+ })
+ .join()
+ .ok()
+ .unwrap();
+ }
+
+ #[test]
+ fn test_fuzz() {
+ for _ in 0..25 {
+ fuzz_test(3);
+ fuzz_test(16);
+ fuzz_test(189);
+ }
+ }
+
+ #[test]
+ fn test_26021() {
+ // There was a bug in split_off that failed to null out the RHS's head's prev ptr.
+ // This caused the RHS's dtor to walk up into the LHS at drop and delete all of
+ // its nodes.
+ //
+ // https://github.com/rust-lang/rust/issues/26021
+ let mut v1 = LinkedList::new();
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+ let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption
+ assert_eq!(v1.len(), 3);
+
+ assert_eq!(v1.iter().len(), 3);
+ assert_eq!(v1.iter().collect::<Vec<_>>().len(), 3);
+ }
+
+ #[test]
+ fn test_split_off() {
+ let mut v1 = LinkedList::new();
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+
+ // test all splits
+ for ix in 0..1 + v1.len() {
+ let mut a = v1.clone();
+ let b = a.split_off(ix);
+ check_links(&a);
+ check_links(&b);
+ a.extend(b);
+ assert_eq!(v1, a);
+ }
+ }
+
+ #[cfg(test)]
+ fn fuzz_test(sz: i32) {
+ let mut m: LinkedList<_> = LinkedList::new();
+ let mut v = vec![];
+ for i in 0..sz {
+ check_links(&m);
+ let r: u8 = thread_rng().next_u32() as u8;
+ match r % 6 {
+ 0 => {
+ m.pop_back();
+ v.pop();
+ }
+ 1 => {
+ if !v.is_empty() {
+ m.pop_front();
+ v.remove(0);
+ }
+ }
+ 2 | 4 => {
+ m.push_front(-i);
+ v.insert(0, -i);
+ }
+ 3 | 5 | _ => {
+ m.push_back(i);
+ v.push(i);
+ }
+ }
+ }
+
+ check_links(&m);
+
+ let mut i = 0;
+ for (a, &b) in m.into_iter().zip(&v) {
+ i += 1;
+ assert_eq!(a, b);
+ }
+ assert_eq!(i, v.len());
+ }
+
+ #[test]
+ fn drain_filter_test() {
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
+
+ check_links(&m);
+
+ assert_eq!(deleted, &[1, 2, 3]);
+ assert_eq!(m.into_iter().collect::<Vec<_>>(), &[4, 5, 6]);
+ }
+
+ #[test]
+ fn drain_to_empty_test() {
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
+
+ check_links(&m);
+
+ assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]);
+ assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Collection types.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+pub mod binary_heap;
+mod btree;
+pub mod linked_list;
+pub mod vec_deque;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub mod btree_map {
+ //! A map based on a B-Tree.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub use super::btree::map::*;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub mod btree_set {
+ //! A set based on a B-Tree.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub use super::btree::set::*;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use self::binary_heap::BinaryHeap;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use self::btree_map::BTreeMap;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use self::btree_set::BTreeSet;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use self::linked_list::LinkedList;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use self::vec_deque::VecDeque;
+
+use alloc::{AllocErr, LayoutErr};
+
+/// Augments `AllocErr` with a CapacityOverflow variant.
+#[derive(Clone, PartialEq, Eq, Debug)]
+#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+pub enum CollectionAllocErr {
+ /// Error due to the computed capacity exceeding the collection's maximum
+ /// (usually `isize::MAX` bytes).
+ CapacityOverflow,
+ /// Error due to the allocator (see the `AllocErr` type's docs).
+ AllocErr,
+}
+
+#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+impl From<AllocErr> for CollectionAllocErr {
+ #[inline]
+ fn from(AllocErr: AllocErr) -> Self {
+ CollectionAllocErr::AllocErr
+ }
+}
+
+#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+impl From<LayoutErr> for CollectionAllocErr {
+ #[inline]
+ fn from(_: LayoutErr) -> Self {
+ CollectionAllocErr::CapacityOverflow
+ }
+}
+
+/// An intermediate trait for specialization of `Extend`.
+#[doc(hidden)]
+trait SpecExtend<I: IntoIterator> {
+ /// Extends `self` with the contents of the given iterator.
+ fn spec_extend(&mut self, iter: I);
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A double-ended queue implemented with a growable ring buffer.
+//!
+//! This queue has `O(1)` amortized inserts and removals from both ends of the
+//! container. It also has `O(1)` indexing like a vector. The contained elements
+//! are not required to be copyable, and the queue will be sendable if the
+//! contained type is sendable.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::Ordering;
+use core::fmt;
+use core::iter::{repeat, FromIterator, FusedIterator};
+use core::mem;
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, IndexMut, RangeBounds};
+use core::ptr;
+use core::ptr::NonNull;
+use core::slice;
+
+use core::hash::{Hash, Hasher};
+use core::cmp;
+
+use collections::CollectionAllocErr;
+use raw_vec::RawVec;
+use vec::Vec;
+
+const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
+const MINIMUM_CAPACITY: usize = 1; // 2 - 1
+#[cfg(target_pointer_width = "32")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (32 - 1); // Largest possible power of two
+#[cfg(target_pointer_width = "64")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (64 - 1); // Largest possible power of two
+
+/// A double-ended queue implemented with a growable ring buffer.
+///
+/// The "default" usage of this type as a queue is to use [`push_back`] to add to
+/// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`]
+/// push onto the back in this manner, and iterating over `VecDeque` goes front
+/// to back.
+///
+/// [`push_back`]: #method.push_back
+/// [`pop_front`]: #method.pop_front
+/// [`extend`]: #method.extend
+/// [`append`]: #method.append
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VecDeque<T> {
+ // tail and head are pointers into the buffer. Tail always points
+ // to the first element that could be read, Head always points
+ // to where data should be written.
+ // If tail == head the buffer is empty. The length of the ringbuffer
+ // is defined as the distance between the two.
+ tail: usize,
+ head: usize,
+ buf: RawVec<T>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for VecDeque<T> {
+ fn clone(&self) -> VecDeque<T> {
+ self.iter().cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for VecDeque<T> {
+ fn drop(&mut self) {
+ let (front, back) = self.as_mut_slices();
+ unsafe {
+ // use drop for [T]
+ ptr::drop_in_place(front);
+ ptr::drop_in_place(back);
+ }
+ // RawVec handles deallocation
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for VecDeque<T> {
+ /// Creates an empty `VecDeque<T>`.
+ #[inline]
+ fn default() -> VecDeque<T> {
+ VecDeque::new()
+ }
+}
+
+impl<T> VecDeque<T> {
+ /// Marginally more convenient
+ #[inline]
+ fn ptr(&self) -> *mut T {
+ self.buf.ptr()
+ }
+
+ /// Marginally more convenient
+ #[inline]
+ fn cap(&self) -> usize {
+ if mem::size_of::<T>() == 0 {
+ // For zero sized types, we are always at maximum capacity
+ MAXIMUM_ZST_CAPACITY
+ } else {
+ self.buf.cap()
+ }
+ }
+
+ /// Turn ptr into a slice
+ #[inline]
+ unsafe fn buffer_as_slice(&self) -> &[T] {
+ slice::from_raw_parts(self.ptr(), self.cap())
+ }
+
+ /// Turn ptr into a mut slice
+ #[inline]
+ unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
+ slice::from_raw_parts_mut(self.ptr(), self.cap())
+ }
+
+ /// Moves an element out of the buffer
+ #[inline]
+ unsafe fn buffer_read(&mut self, off: usize) -> T {
+ ptr::read(self.ptr().offset(off as isize))
+ }
+
+ /// Writes an element into the buffer, moving it.
+ #[inline]
+ unsafe fn buffer_write(&mut self, off: usize, value: T) {
+ ptr::write(self.ptr().offset(off as isize), value);
+ }
+
+ /// Returns `true` if and only if the buffer is at full capacity.
+ #[inline]
+ fn is_full(&self) -> bool {
+ self.cap() - self.len() == 1
+ }
+
+ /// Returns the index in the underlying buffer for a given logical element
+ /// index.
+ #[inline]
+ fn wrap_index(&self, idx: usize) -> usize {
+ wrap_index(idx, self.cap())
+ }
+
+ /// Returns the index in the underlying buffer for a given logical element
+ /// index + addend.
+ #[inline]
+ fn wrap_add(&self, idx: usize, addend: usize) -> usize {
+ wrap_index(idx.wrapping_add(addend), self.cap())
+ }
+
+ /// Returns the index in the underlying buffer for a given logical element
+ /// index - subtrahend.
+ #[inline]
+ fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize {
+ wrap_index(idx.wrapping_sub(subtrahend), self.cap())
+ }
+
+ /// Copies a contiguous block of memory len long from src to dst
+ #[inline]
+ unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
+ debug_assert!(dst + len <= self.cap(),
+ "cpy dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap());
+ debug_assert!(src + len <= self.cap(),
+ "cpy dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap());
+ ptr::copy(self.ptr().offset(src as isize),
+ self.ptr().offset(dst as isize),
+ len);
+ }
+
+ /// Copies a contiguous block of memory len long from src to dst
+ #[inline]
+ unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
+ debug_assert!(dst + len <= self.cap(),
+ "cno dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap());
+ debug_assert!(src + len <= self.cap(),
+ "cno dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap());
+ ptr::copy_nonoverlapping(self.ptr().offset(src as isize),
+ self.ptr().offset(dst as isize),
+ len);
+ }
+
+ /// Copies a potentially wrapping block of memory len long from src to dest.
+ /// (abs(dst - src) + len) must be no larger than cap() (There must be at
+ /// most one continuous overlapping region between src and dest).
+ unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
+ #[allow(dead_code)]
+ fn diff(a: usize, b: usize) -> usize {
+ if a <= b { b - a } else { a - b }
+ }
+ debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
+ "wrc dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap());
+
+ if src == dst || len == 0 {
+ return;
+ }
+
+ let dst_after_src = self.wrap_sub(dst, src) < len;
+
+ let src_pre_wrap_len = self.cap() - src;
+ let dst_pre_wrap_len = self.cap() - dst;
+ let src_wraps = src_pre_wrap_len < len;
+ let dst_wraps = dst_pre_wrap_len < len;
+
+ match (dst_after_src, src_wraps, dst_wraps) {
+ (_, false, false) => {
+ // src doesn't wrap, dst doesn't wrap
+ //
+ // S . . .
+ // 1 [_ _ A A B B C C _]
+ // 2 [_ _ A A A A B B _]
+ // D . . .
+ //
+ self.copy(dst, src, len);
+ }
+ (false, false, true) => {
+ // dst before src, src doesn't wrap, dst wraps
+ //
+ // S . . .
+ // 1 [A A B B _ _ _ C C]
+ // 2 [A A B B _ _ _ A A]
+ // 3 [B B B B _ _ _ A A]
+ // . . D .
+ //
+ self.copy(dst, src, dst_pre_wrap_len);
+ self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
+ }
+ (true, false, true) => {
+ // src before dst, src doesn't wrap, dst wraps
+ //
+ // S . . .
+ // 1 [C C _ _ _ A A B B]
+ // 2 [B B _ _ _ A A B B]
+ // 3 [B B _ _ _ A A A A]
+ // . . D .
+ //
+ self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
+ self.copy(dst, src, dst_pre_wrap_len);
+ }
+ (false, true, false) => {
+ // dst before src, src wraps, dst doesn't wrap
+ //
+ // . . S .
+ // 1 [C C _ _ _ A A B B]
+ // 2 [C C _ _ _ B B B B]
+ // 3 [C C _ _ _ B B C C]
+ // D . . .
+ //
+ self.copy(dst, src, src_pre_wrap_len);
+ self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
+ }
+ (true, true, false) => {
+ // src before dst, src wraps, dst doesn't wrap
+ //
+ // . . S .
+ // 1 [A A B B _ _ _ C C]
+ // 2 [A A A A _ _ _ C C]
+ // 3 [C C A A _ _ _ C C]
+ // D . . .
+ //
+ self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
+ self.copy(dst, src, src_pre_wrap_len);
+ }
+ (false, true, true) => {
+ // dst before src, src wraps, dst wraps
+ //
+ // . . . S .
+ // 1 [A B C D _ E F G H]
+ // 2 [A B C D _ E G H H]
+ // 3 [A B C D _ E G H A]
+ // 4 [B C C D _ E G H A]
+ // . . D . .
+ //
+ debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
+ let delta = dst_pre_wrap_len - src_pre_wrap_len;
+ self.copy(dst, src, src_pre_wrap_len);
+ self.copy(dst + src_pre_wrap_len, 0, delta);
+ self.copy(0, delta, len - dst_pre_wrap_len);
+ }
+ (true, true, true) => {
+ // src before dst, src wraps, dst wraps
+ //
+ // . . S . .
+ // 1 [A B C D _ E F G H]
+ // 2 [A A B D _ E F G H]
+ // 3 [H A B D _ E F G H]
+ // 4 [H A B D _ E F F G]
+ // . . . D .
+ //
+ debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
+ let delta = src_pre_wrap_len - dst_pre_wrap_len;
+ self.copy(delta, 0, len - src_pre_wrap_len);
+ self.copy(0, self.cap() - delta, delta);
+ self.copy(dst, src, dst_pre_wrap_len);
+ }
+ }
+ }
+
+ /// Frobs the head and tail sections around to handle the fact that we
+ /// just reallocated. Unsafe because it trusts old_cap.
+ #[inline]
+ unsafe fn handle_cap_increase(&mut self, old_cap: usize) {
+ let new_cap = self.cap();
+
+ // Move the shortest contiguous section of the ring buffer
+ // T H
+ // [o o o o o o o . ]
+ // T H
+ // A [o o o o o o o . . . . . . . . . ]
+ // H T
+ // [o o . o o o o o ]
+ // T H
+ // B [. . . o o o o o o o . . . . . . ]
+ // H T
+ // [o o o o o . o o ]
+ // H T
+ // C [o o o o o . . . . . . . . . o o ]
+
+ if self.tail <= self.head {
+ // A
+ // Nop
+ } else if self.head < old_cap - self.tail {
+ // B
+ self.copy_nonoverlapping(old_cap, 0, self.head);
+ self.head += old_cap;
+ debug_assert!(self.head > self.tail);
+ } else {
+ // C
+ let new_tail = new_cap - (old_cap - self.tail);
+ self.copy_nonoverlapping(new_tail, self.tail, old_cap - self.tail);
+ self.tail = new_tail;
+ debug_assert!(self.head < self.tail);
+ }
+ debug_assert!(self.head < self.cap());
+ debug_assert!(self.tail < self.cap());
+ debug_assert!(self.cap().count_ones() == 1);
+ }
+}
+
+impl<T> VecDeque<T> {
+ /// Creates an empty `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let vector: VecDeque<u32> = VecDeque::new();
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> VecDeque<T> {
+ VecDeque::with_capacity(INITIAL_CAPACITY)
+ }
+
+ /// Creates an empty `VecDeque` with space for at least `n` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let vector: VecDeque<u32> = VecDeque::with_capacity(10);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn with_capacity(n: usize) -> VecDeque<T> {
+ // +1 since the ringbuffer always leaves one space empty
+ let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
+ assert!(cap > n, "capacity overflow");
+
+ VecDeque {
+ tail: 0,
+ head: 0,
+ buf: RawVec::with_capacity(cap),
+ }
+ }
+
+ /// Retrieves an element in the `VecDeque` by index.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// buf.push_back(5);
+ /// assert_eq!(buf.get(1), Some(&4));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get(&self, index: usize) -> Option<&T> {
+ if index < self.len() {
+ let idx = self.wrap_add(self.tail, index);
+ unsafe { Some(&*self.ptr().offset(idx as isize)) }
+ } else {
+ None
+ }
+ }
+
+ /// Retrieves an element in the `VecDeque` mutably by index.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// buf.push_back(5);
+ /// if let Some(elem) = buf.get_mut(1) {
+ /// *elem = 7;
+ /// }
+ ///
+ /// assert_eq!(buf[1], 7);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
+ if index < self.len() {
+ let idx = self.wrap_add(self.tail, index);
+ unsafe { Some(&mut *self.ptr().offset(idx as isize)) }
+ } else {
+ None
+ }
+ }
+
+ /// Swaps elements at indices `i` and `j`.
+ ///
+ /// `i` and `j` may be equal.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Panics
+ ///
+ /// Panics if either index is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// buf.push_back(5);
+ /// assert_eq!(buf, [3, 4, 5]);
+ /// buf.swap(0, 2);
+ /// assert_eq!(buf, [5, 4, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn swap(&mut self, i: usize, j: usize) {
+ assert!(i < self.len());
+ assert!(j < self.len());
+ let ri = self.wrap_add(self.tail, i);
+ let rj = self.wrap_add(self.tail, j);
+ unsafe {
+ ptr::swap(self.ptr().offset(ri as isize),
+ self.ptr().offset(rj as isize))
+ }
+ }
+
+ /// Returns the number of elements the `VecDeque` can hold without
+ /// reallocating.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let buf: VecDeque<i32> = VecDeque::with_capacity(10);
+ /// assert!(buf.capacity() >= 10);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn capacity(&self) -> usize {
+ self.cap() - 1
+ }
+
+ /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
+ /// given `VecDeque`. Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it requests. Therefore
+ /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
+ /// insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
+ /// buf.reserve_exact(10);
+ /// assert!(buf.capacity() >= 11);
+ /// ```
+ ///
+ /// [`reserve`]: #method.reserve
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+
+ /// Reserves capacity for at least `additional` more elements to be inserted in the given
+ /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
+ /// buf.reserve(10);
+ /// assert!(buf.capacity() >= 11);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve(&mut self, additional: usize) {
+ let old_cap = self.cap();
+ let used_cap = self.len() + 1;
+ let new_cap = used_cap.checked_add(additional)
+ .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
+ .expect("capacity overflow");
+
+ if new_cap > old_cap {
+ self.buf.reserve_exact(used_cap, new_cap - used_cap);
+ unsafe {
+ self.handle_cap_increase(old_cap);
+ }
+ }
+ }
+
+ /// Tries to reserves the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `VecDeque<T>`. After calling `reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore capacity can not be relied upon to be precisely
+ /// minimal. Prefer `reserve` if future insertions are expected.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::CollectionAllocErr;
+ /// use std::collections::VecDeque;
+ ///
+ /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, CollectionAllocErr> {
+ /// let mut output = VecDeque::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve_exact(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.extend(data.iter().map(|&val| {
+ /// val * 2 + 5 // very complicated
+ /// }));
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+ pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
+ self.try_reserve(additional)
+ }
+
+ /// Tries to reserve capacity for at least `additional` more elements to be inserted
+ /// in the given `VecDeque<T>`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::CollectionAllocErr;
+ /// use std::collections::VecDeque;
+ ///
+ /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, CollectionAllocErr> {
+ /// let mut output = VecDeque::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.extend(data.iter().map(|&val| {
+ /// val * 2 + 5 // very complicated
+ /// }));
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
+ let old_cap = self.cap();
+ let used_cap = self.len() + 1;
+ let new_cap = used_cap.checked_add(additional)
+ .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
+ .ok_or(CollectionAllocErr::CapacityOverflow)?;
+
+ if new_cap > old_cap {
+ self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?;
+ unsafe {
+ self.handle_cap_increase(old_cap);
+ }
+ }
+ Ok(())
+ }
+
+ /// Shrinks the capacity of the `VecDeque` as much as possible.
+ ///
+ /// It will drop down as close as possible to the length but the allocator may still inform the
+ /// `VecDeque` that there is space for a few more elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::with_capacity(15);
+ /// buf.extend(0..4);
+ /// assert_eq!(buf.capacity(), 15);
+ /// buf.shrink_to_fit();
+ /// assert!(buf.capacity() >= 4);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn shrink_to_fit(&mut self) {
+ self.shrink_to(0);
+ }
+
+ /// Shrinks the capacity of the `VecDeque` with a lower bound.
+ ///
+ /// The capacity will remain at least as large as both the length
+ /// and the supplied value.
+ ///
+ /// Panics if the current capacity is smaller than the supplied
+ /// minimum capacity.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(shrink_to)]
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::with_capacity(15);
+ /// buf.extend(0..4);
+ /// assert_eq!(buf.capacity(), 15);
+ /// buf.shrink_to(6);
+ /// assert!(buf.capacity() >= 6);
+ /// buf.shrink_to(0);
+ /// assert!(buf.capacity() >= 4);
+ /// ```
+ #[unstable(feature = "shrink_to", reason = "new API", issue="0")]
+ pub fn shrink_to(&mut self, min_capacity: usize) {
+ assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity");
+
+ // +1 since the ringbuffer always leaves one space empty
+ // len + 1 can't overflow for an existing, well-formed ringbuffer.
+ let target_cap = cmp::max(
+ cmp::max(min_capacity, self.len()) + 1,
+ MINIMUM_CAPACITY + 1
+ ).next_power_of_two();
+
+ if target_cap < self.cap() {
+ // There are three cases of interest:
+ // All elements are out of desired bounds
+ // Elements are contiguous, and head is out of desired bounds
+ // Elements are discontiguous, and tail is out of desired bounds
+ //
+ // At all other times, element positions are unaffected.
+ //
+ // Indicates that elements at the head should be moved.
+ let head_outside = self.head == 0 || self.head >= target_cap;
+ // Move elements from out of desired bounds (positions after target_cap)
+ if self.tail >= target_cap && head_outside {
+ // T H
+ // [. . . . . . . . o o o o o o o . ]
+ // T H
+ // [o o o o o o o . ]
+ unsafe {
+ self.copy_nonoverlapping(0, self.tail, self.len());
+ }
+ self.head = self.len();
+ self.tail = 0;
+ } else if self.tail != 0 && self.tail < target_cap && head_outside {
+ // T H
+ // [. . . o o o o o o o . . . . . . ]
+ // H T
+ // [o o . o o o o o ]
+ let len = self.wrap_sub(self.head, target_cap);
+ unsafe {
+ self.copy_nonoverlapping(0, target_cap, len);
+ }
+ self.head = len;
+ debug_assert!(self.head < self.tail);
+ } else if self.tail >= target_cap {
+ // H T
+ // [o o o o o . . . . . . . . . o o ]
+ // H T
+ // [o o o o o . o o ]
+ debug_assert!(self.wrap_sub(self.head, 1) < target_cap);
+ let len = self.cap() - self.tail;
+ let new_tail = target_cap - len;
+ unsafe {
+ self.copy_nonoverlapping(new_tail, self.tail, len);
+ }
+ self.tail = new_tail;
+ debug_assert!(self.head < self.tail);
+ }
+
+ self.buf.shrink_to_fit(target_cap);
+
+ debug_assert!(self.head < self.cap());
+ debug_assert!(self.tail < self.cap());
+ debug_assert!(self.cap().count_ones() == 1);
+ }
+ }
+
+ /// Shortens the `VecDeque`, dropping excess elements from the back.
+ ///
+ /// If `len` is greater than the `VecDeque`'s current length, this has no
+ /// effect.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(10);
+ /// buf.push_back(15);
+ /// assert_eq!(buf, [5, 10, 15]);
+ /// buf.truncate(1);
+ /// assert_eq!(buf, [5]);
+ /// ```
+ #[stable(feature = "deque_extras", since = "1.16.0")]
+ pub fn truncate(&mut self, len: usize) {
+ for _ in len..self.len() {
+ self.pop_back();
+ }
+ }
+
+ /// Returns a front-to-back iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// let b: &[_] = &[&5, &3, &4];
+ /// let c: Vec<&i32> = buf.iter().collect();
+ /// assert_eq!(&c[..], b);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<T> {
+ Iter {
+ tail: self.tail,
+ head: self.head,
+ ring: unsafe { self.buffer_as_slice() },
+ }
+ }
+
+ /// Returns a front-to-back iterator that returns mutable references.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// for num in buf.iter_mut() {
+ /// *num = *num - 2;
+ /// }
+ /// let b: &[_] = &[&mut 3, &mut 1, &mut 2];
+ /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<T> {
+ IterMut {
+ tail: self.tail,
+ head: self.head,
+ ring: unsafe { self.buffer_as_mut_slice() },
+ }
+ }
+
+ /// Returns a pair of slices which contain, in order, the contents of the
+ /// `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vector = VecDeque::new();
+ ///
+ /// vector.push_back(0);
+ /// vector.push_back(1);
+ /// vector.push_back(2);
+ ///
+ /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..]));
+ ///
+ /// vector.push_front(10);
+ /// vector.push_front(9);
+ ///
+ /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
+ /// ```
+ #[inline]
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn as_slices(&self) -> (&[T], &[T]) {
+ unsafe {
+ let buf = self.buffer_as_slice();
+ RingSlices::ring_slices(buf, self.head, self.tail)
+ }
+ }
+
+ /// Returns a pair of slices which contain, in order, the contents of the
+ /// `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vector = VecDeque::new();
+ ///
+ /// vector.push_back(0);
+ /// vector.push_back(1);
+ ///
+ /// vector.push_front(10);
+ /// vector.push_front(9);
+ ///
+ /// vector.as_mut_slices().0[0] = 42;
+ /// vector.as_mut_slices().1[0] = 24;
+ /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..]));
+ /// ```
+ #[inline]
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
+ unsafe {
+ let head = self.head;
+ let tail = self.tail;
+ let buf = self.buffer_as_mut_slice();
+ RingSlices::ring_slices(buf, head, tail)
+ }
+ }
+
+ /// Returns the number of elements in the `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v = VecDeque::new();
+ /// assert_eq!(v.len(), 0);
+ /// v.push_back(1);
+ /// assert_eq!(v.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ count(self.tail, self.head, self.cap())
+ }
+
+ /// Returns `true` if the `VecDeque` is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v = VecDeque::new();
+ /// assert!(v.is_empty());
+ /// v.push_front(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.tail == self.head
+ }
+
+ /// Create a draining iterator that removes the specified range in the
+ /// `VecDeque` and yields the removed items.
+ ///
+ /// Note 1: The element range is removed even if the iterator is not
+ /// consumed until the end.
+ ///
+ /// Note 2: It is unspecified how many elements are removed from the deque,
+ /// if the `Drain` value is not dropped, but the borrow it holds expires
+ /// (eg. due to mem::forget).
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+ /// let drained = v.drain(2..).collect::<VecDeque<_>>();
+ /// assert_eq!(drained, [3]);
+ /// assert_eq!(v, [1, 2]);
+ ///
+ /// // A full range clears all contents
+ /// v.drain(..);
+ /// assert!(v.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "drain", since = "1.6.0")]
+ pub fn drain<R>(&mut self, range: R) -> Drain<T>
+ where R: RangeBounds<usize>
+ {
+ // Memory safety
+ //
+ // When the Drain is first created, the source deque is shortened to
+ // make sure no uninitialized or moved-from elements are accessible at
+ // all if the Drain's destructor never gets to run.
+ //
+ // Drain will ptr::read out the values to remove.
+ // When finished, the remaining data will be copied back to cover the hole,
+ // and the head/tail values will be restored correctly.
+ //
+ let len = self.len();
+ let start = match range.start_bound() {
+ Included(&n) => n,
+ Excluded(&n) => n + 1,
+ Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Included(&n) => n + 1,
+ Excluded(&n) => n,
+ Unbounded => len,
+ };
+ assert!(start <= end, "drain lower bound was too large");
+ assert!(end <= len, "drain upper bound was too large");
+
+ // The deque's elements are parted into three segments:
+ // * self.tail -> drain_tail
+ // * drain_tail -> drain_head
+ // * drain_head -> self.head
+ //
+ // T = self.tail; H = self.head; t = drain_tail; h = drain_head
+ //
+ // We store drain_tail as self.head, and drain_head and self.head as
+ // after_tail and after_head respectively on the Drain. This also
+ // truncates the effective array such that if the Drain is leaked, we
+ // have forgotten about the potentially moved values after the start of
+ // the drain.
+ //
+ // T t h H
+ // [. . . o o x x o o . . .]
+ //
+ let drain_tail = self.wrap_add(self.tail, start);
+ let drain_head = self.wrap_add(self.tail, end);
+ let head = self.head;
+
+ // "forget" about the values after the start of the drain until after
+ // the drain is complete and the Drain destructor is run.
+ self.head = drain_tail;
+
+ Drain {
+ deque: NonNull::from(&mut *self),
+ after_tail: drain_head,
+ after_head: head,
+ iter: Iter {
+ tail: drain_tail,
+ head: drain_head,
+ ring: unsafe { self.buffer_as_mut_slice() },
+ },
+ }
+ }
+
+ /// Clears the `VecDeque`, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v = VecDeque::new();
+ /// v.push_back(1);
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn clear(&mut self) {
+ self.drain(..);
+ }
+
+ /// Returns `true` if the `VecDeque` contains an element equal to the
+ /// given value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vector: VecDeque<u32> = VecDeque::new();
+ ///
+ /// vector.push_back(0);
+ /// vector.push_back(1);
+ ///
+ /// assert_eq!(vector.contains(&1), true);
+ /// assert_eq!(vector.contains(&10), false);
+ /// ```
+ #[stable(feature = "vec_deque_contains", since = "1.12.0")]
+ pub fn contains(&self, x: &T) -> bool
+ where T: PartialEq<T>
+ {
+ let (a, b) = self.as_slices();
+ a.contains(x) || b.contains(x)
+ }
+
+ /// Provides a reference to the front element, or `None` if the `VecDeque` is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.front(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// assert_eq!(d.front(), Some(&1));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front(&self) -> Option<&T> {
+ if !self.is_empty() {
+ Some(&self[0])
+ } else {
+ None
+ }
+ }
+
+ /// Provides a mutable reference to the front element, or `None` if the
+ /// `VecDeque` is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.front_mut(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// match d.front_mut() {
+ /// Some(x) => *x = 9,
+ /// None => (),
+ /// }
+ /// assert_eq!(d.front(), Some(&9));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front_mut(&mut self) -> Option<&mut T> {
+ if !self.is_empty() {
+ Some(&mut self[0])
+ } else {
+ None
+ }
+ }
+
+ /// Provides a reference to the back element, or `None` if the `VecDeque` is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.back(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// assert_eq!(d.back(), Some(&2));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back(&self) -> Option<&T> {
+ if !self.is_empty() {
+ Some(&self[self.len() - 1])
+ } else {
+ None
+ }
+ }
+
+ /// Provides a mutable reference to the back element, or `None` if the
+ /// `VecDeque` is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.back(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// match d.back_mut() {
+ /// Some(x) => *x = 9,
+ /// None => (),
+ /// }
+ /// assert_eq!(d.back(), Some(&9));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back_mut(&mut self) -> Option<&mut T> {
+ let len = self.len();
+ if !self.is_empty() {
+ Some(&mut self[len - 1])
+ } else {
+ None
+ }
+ }
+
+ /// Removes the first element and returns it, or `None` if the `VecDeque` is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// d.push_back(1);
+ /// d.push_back(2);
+ ///
+ /// assert_eq!(d.pop_front(), Some(1));
+ /// assert_eq!(d.pop_front(), Some(2));
+ /// assert_eq!(d.pop_front(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_front(&mut self) -> Option<T> {
+ if self.is_empty() {
+ None
+ } else {
+ let tail = self.tail;
+ self.tail = self.wrap_add(self.tail, 1);
+ unsafe { Some(self.buffer_read(tail)) }
+ }
+ }
+
+ /// Prepends an element to the `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// d.push_front(1);
+ /// d.push_front(2);
+ /// assert_eq!(d.front(), Some(&2));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_front(&mut self, value: T) {
+ self.grow_if_necessary();
+
+ self.tail = self.wrap_sub(self.tail, 1);
+ let tail = self.tail;
+ unsafe {
+ self.buffer_write(tail, value);
+ }
+ }
+
+ /// Appends an element to the back of the `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(1);
+ /// buf.push_back(3);
+ /// assert_eq!(3, *buf.back().unwrap());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_back(&mut self, value: T) {
+ self.grow_if_necessary();
+
+ let head = self.head;
+ self.head = self.wrap_add(self.head, 1);
+ unsafe { self.buffer_write(head, value) }
+ }
+
+ /// Removes the last element from the `VecDeque` and returns it, or `None` if
+ /// it is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// assert_eq!(buf.pop_back(), None);
+ /// buf.push_back(1);
+ /// buf.push_back(3);
+ /// assert_eq!(buf.pop_back(), Some(3));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_back(&mut self) -> Option<T> {
+ if self.is_empty() {
+ None
+ } else {
+ self.head = self.wrap_sub(self.head, 1);
+ let head = self.head;
+ unsafe { Some(self.buffer_read(head)) }
+ }
+ }
+
+ #[inline]
+ fn is_contiguous(&self) -> bool {
+ self.tail <= self.head
+ }
+
+ /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
+ /// last element.
+ ///
+ /// This does not preserve ordering, but is O(1).
+ ///
+ /// Returns `None` if `index` is out of bounds.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// assert_eq!(buf.swap_remove_back(0), None);
+ /// buf.push_back(1);
+ /// buf.push_back(2);
+ /// buf.push_back(3);
+ /// assert_eq!(buf, [1, 2, 3]);
+ ///
+ /// assert_eq!(buf.swap_remove_back(0), Some(1));
+ /// assert_eq!(buf, [3, 2]);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
+ let length = self.len();
+ if length > 0 && index < length - 1 {
+ self.swap(index, length - 1);
+ } else if index >= length {
+ return None;
+ }
+ self.pop_back()
+ }
+
+ /// Removes an element from anywhere in the `VecDeque` and returns it,
+ /// replacing it with the first element.
+ ///
+ /// This does not preserve ordering, but is O(1).
+ ///
+ /// Returns `None` if `index` is out of bounds.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// assert_eq!(buf.swap_remove_front(0), None);
+ /// buf.push_back(1);
+ /// buf.push_back(2);
+ /// buf.push_back(3);
+ /// assert_eq!(buf, [1, 2, 3]);
+ ///
+ /// assert_eq!(buf.swap_remove_front(2), Some(3));
+ /// assert_eq!(buf, [2, 1]);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
+ let length = self.len();
+ if length > 0 && index < length && index != 0 {
+ self.swap(index, 0);
+ } else if index >= length {
+ return None;
+ }
+ self.pop_front()
+ }
+
+ /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices
+ /// greater than or equal to `index` towards the back.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is greater than `VecDeque`'s length
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vec_deque = VecDeque::new();
+ /// vec_deque.push_back('a');
+ /// vec_deque.push_back('b');
+ /// vec_deque.push_back('c');
+ /// assert_eq!(vec_deque, &['a', 'b', 'c']);
+ ///
+ /// vec_deque.insert(1, 'd');
+ /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn insert(&mut self, index: usize, value: T) {
+ assert!(index <= self.len(), "index out of bounds");
+ self.grow_if_necessary();
+
+ // Move the least number of elements in the ring buffer and insert
+ // the given object
+ //
+ // At most len/2 - 1 elements will be moved. O(min(n, n-i))
+ //
+ // There are three main cases:
+ // Elements are contiguous
+ // - special case when tail is 0
+ // Elements are discontiguous and the insert is in the tail section
+ // Elements are discontiguous and the insert is in the head section
+ //
+ // For each of those there are two more cases:
+ // Insert is closer to tail
+ // Insert is closer to head
+ //
+ // Key: H - self.head
+ // T - self.tail
+ // o - Valid element
+ // I - Insertion element
+ // A - The element that should be after the insertion point
+ // M - Indicates element was moved
+
+ let idx = self.wrap_add(self.tail, index);
+
+ let distance_to_tail = index;
+ let distance_to_head = self.len() - index;
+
+ let contiguous = self.is_contiguous();
+
+ match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
+ (true, true, _) if index == 0 => {
+ // push_front
+ //
+ // T
+ // I H
+ // [A o o o o o o . . . . . . . . .]
+ //
+ // H T
+ // [A o o o o o o o . . . . . I]
+ //
+
+ self.tail = self.wrap_sub(self.tail, 1);
+ }
+ (true, true, _) => {
+ unsafe {
+ // contiguous, insert closer to tail:
+ //
+ // T I H
+ // [. . . o o A o o o o . . . . . .]
+ //
+ // T H
+ // [. . o o I A o o o o . . . . . .]
+ // M M
+ //
+ // contiguous, insert closer to tail and tail is 0:
+ //
+ //
+ // T I H
+ // [o o A o o o o . . . . . . . . .]
+ //
+ // H T
+ // [o I A o o o o o . . . . . . . o]
+ // M M
+
+ let new_tail = self.wrap_sub(self.tail, 1);
+
+ self.copy(new_tail, self.tail, 1);
+ // Already moved the tail, so we only copy `index - 1` elements.
+ self.copy(self.tail, self.tail + 1, index - 1);
+
+ self.tail = new_tail;
+ }
+ }
+ (true, false, _) => {
+ unsafe {
+ // contiguous, insert closer to head:
+ //
+ // T I H
+ // [. . . o o o o A o o . . . . . .]
+ //
+ // T H
+ // [. . . o o o o I A o o . . . . .]
+ // M M M
+
+ self.copy(idx + 1, idx, self.head - idx);
+ self.head = self.wrap_add(self.head, 1);
+ }
+ }
+ (false, true, true) => {
+ unsafe {
+ // discontiguous, insert closer to tail, tail section:
+ //
+ // H T I
+ // [o o o o o o . . . . . o o A o o]
+ //
+ // H T
+ // [o o o o o o . . . . o o I A o o]
+ // M M
+
+ self.copy(self.tail - 1, self.tail, index);
+ self.tail -= 1;
+ }
+ }
+ (false, false, true) => {
+ unsafe {
+ // discontiguous, insert closer to head, tail section:
+ //
+ // H T I
+ // [o o . . . . . . . o o o o o A o]
+ //
+ // H T
+ // [o o o . . . . . . o o o o o I A]
+ // M M M M
+
+ // copy elements up to new head
+ self.copy(1, 0, self.head);
+
+ // copy last element into empty spot at bottom of buffer
+ self.copy(0, self.cap() - 1, 1);
+
+ // move elements from idx to end forward not including ^ element
+ self.copy(idx + 1, idx, self.cap() - 1 - idx);
+
+ self.head += 1;
+ }
+ }
+ (false, true, false) if idx == 0 => {
+ unsafe {
+ // discontiguous, insert is closer to tail, head section,
+ // and is at index zero in the internal buffer:
+ //
+ // I H T
+ // [A o o o o o o o o o . . . o o o]
+ //
+ // H T
+ // [A o o o o o o o o o . . o o o I]
+ // M M M
+
+ // copy elements up to new tail
+ self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
+
+ // copy last element into empty spot at bottom of buffer
+ self.copy(self.cap() - 1, 0, 1);
+
+ self.tail -= 1;
+ }
+ }
+ (false, true, false) => {
+ unsafe {
+ // discontiguous, insert closer to tail, head section:
+ //
+ // I H T
+ // [o o o A o o o o o o . . . o o o]
+ //
+ // H T
+ // [o o I A o o o o o o . . o o o o]
+ // M M M M M M
+
+ // copy elements up to new tail
+ self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
+
+ // copy last element into empty spot at bottom of buffer
+ self.copy(self.cap() - 1, 0, 1);
+
+ // move elements from idx-1 to end forward not including ^ element
+ self.copy(0, 1, idx - 1);
+
+ self.tail -= 1;
+ }
+ }
+ (false, false, false) => {
+ unsafe {
+ // discontiguous, insert closer to head, head section:
+ //
+ // I H T
+ // [o o o o A o o . . . . . . o o o]
+ //
+ // H T
+ // [o o o o I A o o . . . . . o o o]
+ // M M M
+
+ self.copy(idx + 1, idx, self.head - idx);
+ self.head += 1;
+ }
+ }
+ }
+
+ // tail might've been changed so we need to recalculate
+ let new_idx = self.wrap_add(self.tail, index);
+ unsafe {
+ self.buffer_write(new_idx, value);
+ }
+ }
+
+ /// Removes and returns the element at `index` from the `VecDeque`.
+ /// Whichever end is closer to the removal point will be moved to make
+ /// room, and all the affected elements will be moved to new positions.
+ /// Returns `None` if `index` is out of bounds.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(1);
+ /// buf.push_back(2);
+ /// buf.push_back(3);
+ /// assert_eq!(buf, [1, 2, 3]);
+ ///
+ /// assert_eq!(buf.remove(1), Some(2));
+ /// assert_eq!(buf, [1, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(&mut self, index: usize) -> Option<T> {
+ if self.is_empty() || self.len() <= index {
+ return None;
+ }
+
+ // There are three main cases:
+ // Elements are contiguous
+ // Elements are discontiguous and the removal is in the tail section
+ // Elements are discontiguous and the removal is in the head section
+ // - special case when elements are technically contiguous,
+ // but self.head = 0
+ //
+ // For each of those there are two more cases:
+ // Insert is closer to tail
+ // Insert is closer to head
+ //
+ // Key: H - self.head
+ // T - self.tail
+ // o - Valid element
+ // x - Element marked for removal
+ // R - Indicates element that is being removed
+ // M - Indicates element was moved
+
+ let idx = self.wrap_add(self.tail, index);
+
+ let elem = unsafe { Some(self.buffer_read(idx)) };
+
+ let distance_to_tail = index;
+ let distance_to_head = self.len() - index;
+
+ let contiguous = self.is_contiguous();
+
+ match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
+ (true, true, _) => {
+ unsafe {
+ // contiguous, remove closer to tail:
+ //
+ // T R H
+ // [. . . o o x o o o o . . . . . .]
+ //
+ // T H
+ // [. . . . o o o o o o . . . . . .]
+ // M M
+
+ self.copy(self.tail + 1, self.tail, index);
+ self.tail += 1;
+ }
+ }
+ (true, false, _) => {
+ unsafe {
+ // contiguous, remove closer to head:
+ //
+ // T R H
+ // [. . . o o o o x o o . . . . . .]
+ //
+ // T H
+ // [. . . o o o o o o . . . . . . .]
+ // M M
+
+ self.copy(idx, idx + 1, self.head - idx - 1);
+ self.head -= 1;
+ }
+ }
+ (false, true, true) => {
+ unsafe {
+ // discontiguous, remove closer to tail, tail section:
+ //
+ // H T R
+ // [o o o o o o . . . . . o o x o o]
+ //
+ // H T
+ // [o o o o o o . . . . . . o o o o]
+ // M M
+
+ self.copy(self.tail + 1, self.tail, index);
+ self.tail = self.wrap_add(self.tail, 1);
+ }
+ }
+ (false, false, false) => {
+ unsafe {
+ // discontiguous, remove closer to head, head section:
+ //
+ // R H T
+ // [o o o o x o o . . . . . . o o o]
+ //
+ // H T
+ // [o o o o o o . . . . . . . o o o]
+ // M M
+
+ self.copy(idx, idx + 1, self.head - idx - 1);
+ self.head -= 1;
+ }
+ }
+ (false, false, true) => {
+ unsafe {
+ // discontiguous, remove closer to head, tail section:
+ //
+ // H T R
+ // [o o o . . . . . . o o o o o x o]
+ //
+ // H T
+ // [o o . . . . . . . o o o o o o o]
+ // M M M M
+ //
+ // or quasi-discontiguous, remove next to head, tail section:
+ //
+ // H T R
+ // [. . . . . . . . . o o o o o x o]
+ //
+ // T H
+ // [. . . . . . . . . o o o o o o .]
+ // M
+
+ // draw in elements in the tail section
+ self.copy(idx, idx + 1, self.cap() - idx - 1);
+
+ // Prevents underflow.
+ if self.head != 0 {
+ // copy first element into empty spot
+ self.copy(self.cap() - 1, 0, 1);
+
+ // move elements in the head section backwards
+ self.copy(0, 1, self.head - 1);
+ }
+
+ self.head = self.wrap_sub(self.head, 1);
+ }
+ }
+ (false, true, false) => {
+ unsafe {
+ // discontiguous, remove closer to tail, head section:
+ //
+ // R H T
+ // [o o x o o o o o o o . . . o o o]
+ //
+ // H T
+ // [o o o o o o o o o o . . . . o o]
+ // M M M M M
+
+ // draw in elements up to idx
+ self.copy(1, 0, idx);
+
+ // copy last element into empty spot
+ self.copy(0, self.cap() - 1, 1);
+
+ // move elements from tail to end forward, excluding the last one
+ self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1);
+
+ self.tail = self.wrap_add(self.tail, 1);
+ }
+ }
+ }
+
+ return elem;
+ }
+
+ /// Splits the `VecDeque` into two at the given index.
+ ///
+ /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`,
+ /// and the returned `VecDeque` contains elements `[at, len)`.
+ ///
+ /// Note that the capacity of `self` does not change.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<_> = vec![1,2,3].into_iter().collect();
+ /// let buf2 = buf.split_off(1);
+ /// assert_eq!(buf, [1]);
+ /// assert_eq!(buf2, [2, 3]);
+ /// ```
+ #[inline]
+ #[stable(feature = "split_off", since = "1.4.0")]
+ pub fn split_off(&mut self, at: usize) -> Self {
+ let len = self.len();
+ assert!(at <= len, "`at` out of bounds");
+
+ let other_len = len - at;
+ let mut other = VecDeque::with_capacity(other_len);
+
+ unsafe {
+ let (first_half, second_half) = self.as_slices();
+
+ let first_len = first_half.len();
+ let second_len = second_half.len();
+ if at < first_len {
+ // `at` lies in the first half.
+ let amount_in_first = first_len - at;
+
+ ptr::copy_nonoverlapping(first_half.as_ptr().offset(at as isize),
+ other.ptr(),
+ amount_in_first);
+
+ // just take all of the second half.
+ ptr::copy_nonoverlapping(second_half.as_ptr(),
+ other.ptr().offset(amount_in_first as isize),
+ second_len);
+ } else {
+ // `at` lies in the second half, need to factor in the elements we skipped
+ // in the first half.
+ let offset = at - first_len;
+ let amount_in_second = second_len - offset;
+ ptr::copy_nonoverlapping(second_half.as_ptr().offset(offset as isize),
+ other.ptr(),
+ amount_in_second);
+ }
+ }
+
+ // Cleanup where the ends of the buffers are
+ self.head = self.wrap_sub(self.head, other_len);
+ other.head = other.wrap_index(other_len);
+
+ other
+ }
+
+ /// Moves all the elements of `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new number of elements in self overflows a `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect();
+ /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect();
+ /// buf.append(&mut buf2);
+ /// assert_eq!(buf, [1, 2, 3, 4]);
+ /// assert_eq!(buf2, []);
+ /// ```
+ #[inline]
+ #[stable(feature = "append", since = "1.4.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ // naive impl
+ self.extend(other.drain(..));
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` such that `f(&e)` returns false.
+ /// This method operates in place and preserves the order of the retained
+ /// elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.extend(1..5);
+ /// buf.retain(|&x| x%2 == 0);
+ /// assert_eq!(buf, [2, 4]);
+ /// ```
+ #[stable(feature = "vec_deque_retain", since = "1.4.0")]
+ pub fn retain<F>(&mut self, mut f: F)
+ where F: FnMut(&T) -> bool
+ {
+ let len = self.len();
+ let mut del = 0;
+ for i in 0..len {
+ if !f(&self[i]) {
+ del += 1;
+ } else if del > 0 {
+ self.swap(i - del, i);
+ }
+ }
+ if del > 0 {
+ self.truncate(len - del);
+ }
+ }
+
+ // This may panic or abort
+ #[inline]
+ fn grow_if_necessary(&mut self) {
+ if self.is_full() {
+ let old_cap = self.cap();
+ self.buf.double();
+ unsafe {
+ self.handle_cap_increase(old_cap);
+ }
+ debug_assert!(!self.is_full());
+ }
+ }
+}
+
+impl<T: Clone> VecDeque<T> {
+ /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
+ /// either by removing excess elements from the back or by appending clones of `value`
+ /// to the back.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(10);
+ /// buf.push_back(15);
+ /// assert_eq!(buf, [5, 10, 15]);
+ ///
+ /// buf.resize(2, 0);
+ /// assert_eq!(buf, [5, 10]);
+ ///
+ /// buf.resize(5, 20);
+ /// assert_eq!(buf, [5, 10, 20, 20, 20]);
+ /// ```
+ #[stable(feature = "deque_extras", since = "1.16.0")]
+ pub fn resize(&mut self, new_len: usize, value: T) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend(repeat(value).take(new_len - len))
+ } else {
+ self.truncate(new_len);
+ }
+ }
+}
+
+/// Returns the index in the underlying buffer for a given logical element index.
+#[inline]
+fn wrap_index(index: usize, size: usize) -> usize {
+ // size is always a power of 2
+ debug_assert!(size.is_power_of_two());
+ index & (size - 1)
+}
+
+/// Returns the two slices that cover the `VecDeque`'s valid range
+trait RingSlices: Sized {
+ fn slice(self, from: usize, to: usize) -> Self;
+ fn split_at(self, i: usize) -> (Self, Self);
+
+ fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) {
+ let contiguous = tail <= head;
+ if contiguous {
+ let (empty, buf) = buf.split_at(0);
+ (buf.slice(tail, head), empty)
+ } else {
+ let (mid, right) = buf.split_at(tail);
+ let (left, _) = mid.split_at(head);
+ (right, left)
+ }
+ }
+}
+
+impl<'a, T> RingSlices for &'a [T] {
+ fn slice(self, from: usize, to: usize) -> Self {
+ &self[from..to]
+ }
+ fn split_at(self, i: usize) -> (Self, Self) {
+ (*self).split_at(i)
+ }
+}
+
+impl<'a, T> RingSlices for &'a mut [T] {
+ fn slice(self, from: usize, to: usize) -> Self {
+ &mut self[from..to]
+ }
+ fn split_at(self, i: usize) -> (Self, Self) {
+ (*self).split_at_mut(i)
+ }
+}
+
+/// Calculate the number of elements left to be read in the buffer
+#[inline]
+fn count(tail: usize, head: usize, size: usize) -> usize {
+ // size is always a power of 2
+ (head.wrapping_sub(tail)) & (size - 1)
+}
+
+/// An iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`iter`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.VecDeque.html#method.iter
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ ring: &'a [T],
+ tail: usize,
+ head: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Iter")
+ .field(&self.ring)
+ .field(&self.tail)
+ .field(&self.head)
+ .finish()
+ }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Iter<'a, T> {
+ Iter {
+ ring: self.ring,
+ tail: self.tail,
+ head: self.head,
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ if self.tail == self.head {
+ return None;
+ }
+ let tail = self.tail;
+ self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
+ unsafe { Some(self.ring.get_unchecked(tail)) }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = count(self.tail, self.head, self.ring.len());
+ (len, Some(len))
+ }
+
+ fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where F: FnMut(Acc, Self::Item) -> Acc
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = front.iter().fold(accum, &mut f);
+ back.iter().fold(accum, &mut f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ if self.tail == self.head {
+ return None;
+ }
+ self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
+ unsafe { Some(self.ring.get_unchecked(self.head)) }
+ }
+
+ fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where F: FnMut(Acc, Self::Item) -> Acc
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = back.iter().rfold(accum, &mut f);
+ front.iter().rfold(accum, &mut f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {
+ fn is_empty(&self) -> bool {
+ self.head == self.tail
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for Iter<'a, T> {}
+
+
+/// A mutable iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.VecDeque.html#method.iter_mut
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, T: 'a> {
+ ring: &'a mut [T],
+ tail: usize,
+ head: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("IterMut")
+ .field(&self.ring)
+ .field(&self.tail)
+ .field(&self.head)
+ .finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for IterMut<'a, T> {
+ type Item = &'a mut T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a mut T> {
+ if self.tail == self.head {
+ return None;
+ }
+ let tail = self.tail;
+ self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
+
+ unsafe {
+ let elem = self.ring.get_unchecked_mut(tail);
+ Some(&mut *(elem as *mut _))
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = count(self.tail, self.head, self.ring.len());
+ (len, Some(len))
+ }
+
+ fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where F: FnMut(Acc, Self::Item) -> Acc
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = front.iter_mut().fold(accum, &mut f);
+ back.iter_mut().fold(accum, &mut f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a mut T> {
+ if self.tail == self.head {
+ return None;
+ }
+ self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
+
+ unsafe {
+ let elem = self.ring.get_unchecked_mut(self.head);
+ Some(&mut *(elem as *mut _))
+ }
+ }
+
+ fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where F: FnMut(Acc, Self::Item) -> Acc
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = back.iter_mut().rfold(accum, &mut f);
+ front.iter_mut().rfold(accum, &mut f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
+ fn is_empty(&self) -> bool {
+ self.head == self.tail
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T> FusedIterator for IterMut<'a, T> {}
+
+/// An owning iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`VecDeque`][`VecDeque`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.VecDeque.html#method.into_iter
+/// [`VecDeque`]: struct.VecDeque.html
+#[derive(Clone)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+ inner: VecDeque<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("IntoIter")
+ .field(&self.inner)
+ .finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.inner.pop_front()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.inner.len();
+ (len, Some(len))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.inner.pop_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+/// A draining iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.VecDeque.html#method.drain
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a, T: 'a> {
+ after_tail: usize,
+ after_head: usize,
+ iter: Iter<'a, T>,
+ deque: NonNull<VecDeque<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Drain")
+ .field(&self.after_tail)
+ .field(&self.after_head)
+ .field(&self.iter)
+ .finish()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> Drop for Drain<'a, T> {
+ fn drop(&mut self) {
+ self.for_each(drop);
+
+ let source_deque = unsafe { self.deque.as_mut() };
+
+ // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
+ //
+ // T t h H
+ // [. . . o o x x o o . . .]
+ //
+ let orig_tail = source_deque.tail;
+ let drain_tail = source_deque.head;
+ let drain_head = self.after_tail;
+ let orig_head = self.after_head;
+
+ let tail_len = count(orig_tail, drain_tail, source_deque.cap());
+ let head_len = count(drain_head, orig_head, source_deque.cap());
+
+ // Restore the original head value
+ source_deque.head = orig_head;
+
+ match (tail_len, head_len) {
+ (0, 0) => {
+ source_deque.head = 0;
+ source_deque.tail = 0;
+ }
+ (0, _) => {
+ source_deque.tail = drain_head;
+ }
+ (_, 0) => {
+ source_deque.head = drain_tail;
+ }
+ _ => unsafe {
+ if tail_len <= head_len {
+ source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
+ source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
+ } else {
+ source_deque.head = source_deque.wrap_add(drain_tail, head_len);
+ source_deque.wrap_copy(drain_tail, drain_head, head_len);
+ }
+ },
+ }
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> Iterator for Drain<'a, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|elt| unsafe { ptr::read(elt) })
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: PartialEq> PartialEq for VecDeque<A> {
+ fn eq(&self, other: &VecDeque<A>) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+ let (sa, sb) = self.as_slices();
+ let (oa, ob) = other.as_slices();
+ if sa.len() == oa.len() {
+ sa == oa && sb == ob
+ } else if sa.len() < oa.len() {
+ // Always divisible in three sections, for example:
+ // self: [a b c|d e f]
+ // other: [0 1 2 3|4 5]
+ // front = 3, mid = 1,
+ // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5]
+ let front = sa.len();
+ let mid = oa.len() - front;
+
+ let (oa_front, oa_mid) = oa.split_at(front);
+ let (sb_mid, sb_back) = sb.split_at(mid);
+ debug_assert_eq!(sa.len(), oa_front.len());
+ debug_assert_eq!(sb_mid.len(), oa_mid.len());
+ debug_assert_eq!(sb_back.len(), ob.len());
+ sa == oa_front && sb_mid == oa_mid && sb_back == ob
+ } else {
+ let front = oa.len();
+ let mid = sa.len() - front;
+
+ let (sa_front, sa_mid) = sa.split_at(front);
+ let (ob_mid, ob_back) = ob.split_at(mid);
+ debug_assert_eq!(sa_front.len(), oa.len());
+ debug_assert_eq!(sa_mid.len(), ob_mid.len());
+ debug_assert_eq!(sb.len(), ob_back.len());
+ sa_front == oa && sa_mid == ob_mid && sb == ob_back
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Eq> Eq for VecDeque<A> {}
+
+macro_rules! __impl_slice_eq1 {
+ ($Lhs: ty, $Rhs: ty) => {
+ __impl_slice_eq1! { $Lhs, $Rhs, Sized }
+ };
+ ($Lhs: ty, $Rhs: ty, $Bound: ident) => {
+ #[stable(feature = "vec-deque-partial-eq-slice", since = "1.17.0")]
+ impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq<B> {
+ fn eq(&self, other: &$Rhs) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+ let (sa, sb) = self.as_slices();
+ let (oa, ob) = other[..].split_at(sa.len());
+ sa == oa && sb == ob
+ }
+ }
+ }
+}
+
+__impl_slice_eq1! { VecDeque<A>, Vec<B> }
+__impl_slice_eq1! { VecDeque<A>, &'b [B] }
+__impl_slice_eq1! { VecDeque<A>, &'b mut [B] }
+
+macro_rules! array_impls {
+ ($($N: expr)+) => {
+ $(
+ __impl_slice_eq1! { VecDeque<A>, [B; $N] }
+ __impl_slice_eq1! { VecDeque<A>, &'b [B; $N] }
+ __impl_slice_eq1! { VecDeque<A>, &'b mut [B; $N] }
+ )+
+ }
+}
+
+array_impls! {
+ 0 1 2 3 4 5 6 7 8 9
+ 10 11 12 13 14 15 16 17 18 19
+ 20 21 22 23 24 25 26 27 28 29
+ 30 31 32
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: PartialOrd> PartialOrd for VecDeque<A> {
+ fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Ord> Ord for VecDeque<A> {
+ #[inline]
+ fn cmp(&self, other: &VecDeque<A>) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Hash> Hash for VecDeque<A> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+ let (a, b) = self.as_slices();
+ Hash::hash_slice(a, state);
+ Hash::hash_slice(b, state);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> Index<usize> for VecDeque<A> {
+ type Output = A;
+
+ #[inline]
+ fn index(&self, index: usize) -> &A {
+ self.get(index).expect("Out of bounds access")
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> IndexMut<usize> for VecDeque<A> {
+ #[inline]
+ fn index_mut(&mut self, index: usize) -> &mut A {
+ self.get_mut(index).expect("Out of bounds access")
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> FromIterator<A> for VecDeque<A> {
+ fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> {
+ let iterator = iter.into_iter();
+ let (lower, _) = iterator.size_hint();
+ let mut deq = VecDeque::with_capacity(lower);
+ deq.extend(iterator);
+ deq
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for VecDeque<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by
+ /// value.
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { inner: self }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a VecDeque<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut VecDeque<T> {
+ type Item = &'a mut T;
+ type IntoIter = IterMut<'a, T>;
+
+ fn into_iter(self) -> IterMut<'a, T> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> Extend<A> for VecDeque<A> {
+ fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
+ for elt in iter {
+ self.push_back(elt);
+ }
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for VecDeque<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_list().entries(self).finish()
+ }
+}
+
+#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
+impl<T> From<Vec<T>> for VecDeque<T> {
+ fn from(mut other: Vec<T>) -> Self {
+ unsafe {
+ let other_buf = other.as_mut_ptr();
+ let mut buf = RawVec::from_raw_parts(other_buf, other.capacity());
+ let len = other.len();
+ mem::forget(other);
+
+ // We need to extend the buf if it's not a power of two, too small
+ // or doesn't have at least one free space
+ if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) ||
+ (buf.cap() == len) {
+ let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
+ buf.reserve_exact(len, cap - len);
+ }
+
+ VecDeque {
+ tail: 0,
+ head: len,
+ buf,
+ }
+ }
+ }
+}
+
+#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
+impl<T> From<VecDeque<T>> for Vec<T> {
+ fn from(other: VecDeque<T>) -> Self {
+ unsafe {
+ let buf = other.buf.ptr();
+ let len = other.len();
+ let tail = other.tail;
+ let head = other.head;
+ let cap = other.cap();
+
+ // Need to move the ring to the front of the buffer, as vec will expect this.
+ if other.is_contiguous() {
+ ptr::copy(buf.offset(tail as isize), buf, len);
+ } else {
+ if (tail - head) >= cmp::min(cap - tail, head) {
+ // There is enough free space in the centre for the shortest block so we can
+ // do this in at most three copy moves.
+ if (cap - tail) > head {
+ // right hand block is the long one; move that enough for the left
+ ptr::copy(buf.offset(tail as isize),
+ buf.offset((tail - head) as isize),
+ cap - tail);
+ // copy left in the end
+ ptr::copy(buf, buf.offset((cap - head) as isize), head);
+ // shift the new thing to the start
+ ptr::copy(buf.offset((tail - head) as isize), buf, len);
+ } else {
+ // left hand block is the long one, we can do it in two!
+ ptr::copy(buf, buf.offset((cap - tail) as isize), head);
+ ptr::copy(buf.offset(tail as isize), buf, cap - tail);
+ }
+ } else {
+ // Need to use N swaps to move the ring
+ // We can use the space at the end of the ring as a temp store
+
+ let mut left_edge: usize = 0;
+ let mut right_edge: usize = tail;
+
+ // The general problem looks like this
+ // GHIJKLM...ABCDEF - before any swaps
+ // ABCDEFM...GHIJKL - after 1 pass of swaps
+ // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store
+ // - then restart the algorithm with a new (smaller) store
+ // Sometimes the temp store is reached when the right edge is at the end
+ // of the buffer - this means we've hit the right order with fewer swaps!
+ // E.g
+ // EF..ABCD
+ // ABCDEF.. - after four only swaps we've finished
+
+ while left_edge < len && right_edge != cap {
+ let mut right_offset = 0;
+ for i in left_edge..right_edge {
+ right_offset = (i - left_edge) % (cap - right_edge);
+ let src: isize = (right_edge + right_offset) as isize;
+ ptr::swap(buf.offset(i as isize), buf.offset(src));
+ }
+ let n_ops = right_edge - left_edge;
+ left_edge += n_ops;
+ right_edge += right_offset + 1;
+
+ }
+ }
+
+ }
+ let out = Vec::from_raw_parts(buf, len, cap);
+ mem::forget(other);
+ out
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use test;
+
+ use super::VecDeque;
+
+ #[bench]
+ fn bench_push_back_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::with_capacity(101);
+ b.iter(|| {
+ for i in 0..100 {
+ deq.push_back(i);
+ }
+ deq.head = 0;
+ deq.tail = 0;
+ })
+ }
+
+ #[bench]
+ fn bench_push_front_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::with_capacity(101);
+ b.iter(|| {
+ for i in 0..100 {
+ deq.push_front(i);
+ }
+ deq.head = 0;
+ deq.tail = 0;
+ })
+ }
+
+ #[bench]
+ fn bench_pop_back_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::<i32>::with_capacity(101);
+
+ b.iter(|| {
+ deq.head = 100;
+ deq.tail = 0;
+ while !deq.is_empty() {
+ test::black_box(deq.pop_back());
+ }
+ })
+ }
+
+ #[bench]
+ fn bench_pop_front_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::<i32>::with_capacity(101);
+
+ b.iter(|| {
+ deq.head = 100;
+ deq.tail = 0;
+ while !deq.is_empty() {
+ test::black_box(deq.pop_front());
+ }
+ })
+ }
+
+ #[test]
+ fn test_swap_front_back_remove() {
+ fn test(back: bool) {
+ // This test checks that every single combination of tail position and length is tested.
+ // Capacity 15 should be large enough to cover every case.
+ let mut tester = VecDeque::with_capacity(15);
+ let usable_cap = tester.capacity();
+ let final_len = usable_cap / 2;
+
+ for len in 0..final_len {
+ let expected: VecDeque<_> = if back {
+ (0..len).collect()
+ } else {
+ (0..len).rev().collect()
+ };
+ for tail_pos in 0..usable_cap {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ if back {
+ for i in 0..len * 2 {
+ tester.push_front(i);
+ }
+ for i in 0..len {
+ assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i));
+ }
+ } else {
+ for i in 0..len * 2 {
+ tester.push_back(i);
+ }
+ for i in 0..len {
+ let idx = tester.len() - 1 - i;
+ assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i));
+ }
+ }
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+ test(true);
+ test(false);
+ }
+
+ #[test]
+ fn test_insert() {
+ // This test checks that every single combination of tail position, length, and
+ // insertion position is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+
+
+ // len is the length *after* insertion
+ for len in 1..cap {
+ // 0, 1, 2, .., len - 1
+ let expected = (0..).take(len).collect::<VecDeque<_>>();
+ for tail_pos in 0..cap {
+ for to_insert in 0..len {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ for i in 0..len {
+ if i != to_insert {
+ tester.push_back(i);
+ }
+ }
+ tester.insert(to_insert, to_insert);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_remove() {
+ // This test checks that every single combination of tail position, length, and
+ // removal position is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+
+ // len is the length *after* removal
+ for len in 0..cap - 1 {
+ // 0, 1, 2, .., len - 1
+ let expected = (0..).take(len).collect::<VecDeque<_>>();
+ for tail_pos in 0..cap {
+ for to_remove in 0..len + 1 {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ for i in 0..len {
+ if i == to_remove {
+ tester.push_back(1234);
+ }
+ tester.push_back(i);
+ }
+ if to_remove == len {
+ tester.push_back(1234);
+ }
+ tester.remove(to_remove);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_drain() {
+ let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+ let cap = tester.capacity();
+ for len in 0..cap + 1 {
+ for tail in 0..cap + 1 {
+ for drain_start in 0..len + 1 {
+ for drain_end in drain_start..len + 1 {
+ tester.tail = tail;
+ tester.head = tail;
+ for i in 0..len {
+ tester.push_back(i);
+ }
+
+ // Check that we drain the correct values
+ let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect();
+ let drained_expected: VecDeque<_> = (drain_start..drain_end).collect();
+ assert_eq!(drained, drained_expected);
+
+ // We shouldn't have changed the capacity or made the
+ // head or tail out of bounds
+ assert_eq!(tester.capacity(), cap);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+
+ // We should see the correct values in the VecDeque
+ let expected: VecDeque<_> = (0..drain_start)
+ .chain(drain_end..len)
+ .collect();
+ assert_eq!(expected, tester);
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_shrink_to_fit() {
+ // This test checks that every single combination of head and tail position,
+ // is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+ tester.reserve(63);
+ let max_cap = tester.capacity();
+
+ for len in 0..cap + 1 {
+ // 0, 1, 2, .., len - 1
+ let expected = (0..).take(len).collect::<VecDeque<_>>();
+ for tail_pos in 0..max_cap + 1 {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ tester.reserve(63);
+ for i in 0..len {
+ tester.push_back(i);
+ }
+ tester.shrink_to_fit();
+ assert!(tester.capacity() <= cap);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+
+ #[test]
+ fn test_split_off() {
+ // This test checks that every single combination of tail position, length, and
+ // split position is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+
+ // len is the length *before* splitting
+ for len in 0..cap {
+ // index to split at
+ for at in 0..len + 1 {
+ // 0, 1, 2, .., at - 1 (may be empty)
+ let expected_self = (0..).take(at).collect::<VecDeque<_>>();
+ // at, at + 1, .., len - 1 (may be empty)
+ let expected_other = (at..).take(len - at).collect::<VecDeque<_>>();
+
+ for tail_pos in 0..cap {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ for i in 0..len {
+ tester.push_back(i);
+ }
+ let result = tester.split_off(at);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert!(result.tail < result.cap());
+ assert!(result.head < result.cap());
+ assert_eq!(tester, expected_self);
+ assert_eq!(result, expected_other);
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_from_vec() {
+ use vec::Vec;
+ for cap in 0..35 {
+ for len in 0..cap + 1 {
+ let mut vec = Vec::with_capacity(cap);
+ vec.extend(0..len);
+
+ let vd = VecDeque::from(vec.clone());
+ assert!(vd.cap().is_power_of_two());
+ assert_eq!(vd.len(), vec.len());
+ assert!(vd.into_iter().eq(vec));
+ }
+ }
+ }
+
+ #[test]
+ fn test_vec_from_vecdeque() {
+ use vec::Vec;
+
+ fn create_vec_and_test_convert(cap: usize, offset: usize, len: usize) {
+ let mut vd = VecDeque::with_capacity(cap);
+ for _ in 0..offset {
+ vd.push_back(0);
+ vd.pop_front();
+ }
+ vd.extend(0..len);
+
+ let vec: Vec<_> = Vec::from(vd.clone());
+ assert_eq!(vec.len(), vd.len());
+ assert!(vec.into_iter().eq(vd));
+ }
+
+ for cap_pwr in 0..7 {
+ // Make capacity as a (2^x)-1, so that the ring size is 2^x
+ let cap = (2i32.pow(cap_pwr) - 1) as usize;
+
+ // In these cases there is enough free space to solve it with copies
+ for len in 0..((cap + 1) / 2) {
+ // Test contiguous cases
+ for offset in 0..(cap - len) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at end of buffer is bigger than block at start
+ for offset in (cap - len)..(cap - (len / 2)) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at start of buffer is bigger than block at end
+ for offset in (cap - (len / 2))..cap {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+ }
+
+ // Now there's not (necessarily) space to straighten the ring with simple copies,
+ // the ring will use swapping when:
+ // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
+ // right block size > free space && left block size > free space
+ for len in ((cap + 1) / 2)..cap {
+ // Test contiguous cases
+ for offset in 0..(cap - len) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at end of buffer is bigger than block at start
+ for offset in (cap - len)..(cap - (len / 2)) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at start of buffer is bigger than block at end
+ for offset in (cap - (len / 2))..cap {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+ }
+ }
+ }
+
+}
//! This library provides smart pointers and collections for managing
//! heap-allocated values.
//!
-//! This library, like libcore, is not intended for general usage, but rather as
-//! a building block of other libraries. The types and interfaces in this
-//! library are re-exported through the [standard library](../std/index.html),
-//! and should not be used through this library.
+//! This library, like libcore, normally doesn’t need to be used directly
+//! since its contents are re-exported in the [`std` crate](../std/index.html).
+//! Crates that use the `#![no_std]` attribute however will typically
+//! not depend on `std`, so they’d use this crate instead.
//!
//! ## Boxed values
//!
//!
//! ## Atomically reference counted pointers
//!
-//! The [`Arc`](arc/index.html) type is the threadsafe equivalent of the `Rc`
+//! The [`Arc`](sync/index.html) type is the threadsafe equivalent of the `Rc`
//! type. It provides all the same functionality of `Rc`, except it requires
//! that the contained type `T` is shareable. Additionally, `Arc<T>` is itself
//! sendable while `Rc<T>` is not.
#[macro_use]
mod macros;
-#[rustc_deprecated(since = "1.27.0", reason = "use the heap module in core, alloc, or std instead")]
-#[unstable(feature = "allocator_api", issue = "32838")]
-/// Use the `alloc` module instead.
-pub mod allocator {
- pub use alloc::*;
-}
-
// Heaps provided for low-level allocation strategies
pub mod alloc;
}
#[cfg(test)]
mod boxed_test;
+pub mod collections;
#[cfg(target_has_atomic = "ptr")]
-pub mod arc;
+pub mod sync;
pub mod rc;
pub mod raw_vec;
-// collections modules
-pub mod binary_heap;
-mod btree;
pub mod borrow;
pub mod fmt;
-pub mod linked_list;
pub mod slice;
pub mod str;
pub mod string;
pub mod vec;
-pub mod vec_deque;
-
-#[stable(feature = "rust1", since = "1.0.0")]
-pub mod btree_map {
- //! A map based on a B-Tree.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub use btree::map::*;
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-pub mod btree_set {
- //! A set based on a B-Tree.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub use btree::set::*;
-}
#[cfg(not(test))]
mod std {
pub use core::ops; // RangeFull
}
-
-/// An intermediate trait for specialization of `Extend`.
-#[doc(hidden)]
-trait SpecExtend<I: IntoIterator> {
- /// Extends `self` with the contents of the given iterator.
- fn spec_extend(&mut self, iter: I);
-}
-
-#[doc(no_inline)]
-pub use binary_heap::BinaryHeap;
-#[doc(no_inline)]
-pub use btree_map::BTreeMap;
-#[doc(no_inline)]
-pub use btree_set::BTreeSet;
-#[doc(no_inline)]
-pub use linked_list::LinkedList;
-#[doc(no_inline)]
-pub use vec_deque::VecDeque;
-#[doc(no_inline)]
-pub use string::String;
-#[doc(no_inline)]
-pub use vec::Vec;
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A doubly-linked list with owned nodes.
-//!
-//! The `LinkedList` allows pushing and popping elements at either end
-//! in constant time.
-//!
-//! Almost always it is better to use `Vec` or [`VecDeque`] instead of
-//! [`LinkedList`]. In general, array-based containers are faster,
-//! more memory efficient and make better use of CPU cache.
-//!
-//! [`LinkedList`]: ../linked_list/struct.LinkedList.html
-//! [`VecDeque`]: ../vec_deque/struct.VecDeque.html
-
-#![stable(feature = "rust1", since = "1.0.0")]
-
-use core::cmp::Ordering;
-use core::fmt;
-use core::hash::{Hasher, Hash};
-use core::iter::{FromIterator, FusedIterator};
-use core::marker::PhantomData;
-use core::mem;
-use core::ptr::NonNull;
-
-use boxed::Box;
-use super::SpecExtend;
-
-/// A doubly-linked list with owned nodes.
-///
-/// The `LinkedList` allows pushing and popping elements at either end
-/// in constant time.
-///
-/// Almost always it is better to use `Vec` or `VecDeque` instead of
-/// `LinkedList`. In general, array-based containers are faster,
-/// more memory efficient and make better use of CPU cache.
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct LinkedList<T> {
- head: Option<NonNull<Node<T>>>,
- tail: Option<NonNull<Node<T>>>,
- len: usize,
- marker: PhantomData<Box<Node<T>>>,
-}
-
-struct Node<T> {
- next: Option<NonNull<Node<T>>>,
- prev: Option<NonNull<Node<T>>>,
- element: T,
-}
-
-/// An iterator over the elements of a `LinkedList`.
-///
-/// This `struct` is created by the [`iter`] method on [`LinkedList`]. See its
-/// documentation for more.
-///
-/// [`iter`]: struct.LinkedList.html#method.iter
-/// [`LinkedList`]: struct.LinkedList.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Iter<'a, T: 'a> {
- head: Option<NonNull<Node<T>>>,
- tail: Option<NonNull<Node<T>>>,
- len: usize,
- marker: PhantomData<&'a Node<T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Iter")
- .field(&self.len)
- .finish()
- }
-}
-
-// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Iter<'a, T> {
- fn clone(&self) -> Self {
- Iter { ..*self }
- }
-}
-
-/// A mutable iterator over the elements of a `LinkedList`.
-///
-/// This `struct` is created by the [`iter_mut`] method on [`LinkedList`]. See its
-/// documentation for more.
-///
-/// [`iter_mut`]: struct.LinkedList.html#method.iter_mut
-/// [`LinkedList`]: struct.LinkedList.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct IterMut<'a, T: 'a> {
- list: &'a mut LinkedList<T>,
- head: Option<NonNull<Node<T>>>,
- tail: Option<NonNull<Node<T>>>,
- len: usize,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("IterMut")
- .field(&self.list)
- .field(&self.len)
- .finish()
- }
-}
-
-/// An owning iterator over the elements of a `LinkedList`.
-///
-/// This `struct` is created by the [`into_iter`] method on [`LinkedList`][`LinkedList`]
-/// (provided by the `IntoIterator` trait). See its documentation for more.
-///
-/// [`into_iter`]: struct.LinkedList.html#method.into_iter
-/// [`LinkedList`]: struct.LinkedList.html
-#[derive(Clone)]
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct IntoIter<T> {
- list: LinkedList<T>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("IntoIter")
- .field(&self.list)
- .finish()
- }
-}
-
-impl<T> Node<T> {
- fn new(element: T) -> Self {
- Node {
- next: None,
- prev: None,
- element,
- }
- }
-
- fn into_element(self: Box<Self>) -> T {
- self.element
- }
-}
-
-// private methods
-impl<T> LinkedList<T> {
- /// Adds the given node to the front of the list.
- #[inline]
- fn push_front_node(&mut self, mut node: Box<Node<T>>) {
- unsafe {
- node.next = self.head;
- node.prev = None;
- let node = Some(Box::into_raw_non_null(node));
-
- match self.head {
- None => self.tail = node,
- Some(mut head) => head.as_mut().prev = node,
- }
-
- self.head = node;
- self.len += 1;
- }
- }
-
- /// Removes and returns the node at the front of the list.
- #[inline]
- fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
- self.head.map(|node| unsafe {
- let node = Box::from_raw(node.as_ptr());
- self.head = node.next;
-
- match self.head {
- None => self.tail = None,
- Some(mut head) => head.as_mut().prev = None,
- }
-
- self.len -= 1;
- node
- })
- }
-
- /// Adds the given node to the back of the list.
- #[inline]
- fn push_back_node(&mut self, mut node: Box<Node<T>>) {
- unsafe {
- node.next = None;
- node.prev = self.tail;
- let node = Some(Box::into_raw_non_null(node));
-
- match self.tail {
- None => self.head = node,
- Some(mut tail) => tail.as_mut().next = node,
- }
-
- self.tail = node;
- self.len += 1;
- }
- }
-
- /// Removes and returns the node at the back of the list.
- #[inline]
- fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
- self.tail.map(|node| unsafe {
- let node = Box::from_raw(node.as_ptr());
- self.tail = node.prev;
-
- match self.tail {
- None => self.head = None,
- Some(mut tail) => tail.as_mut().next = None,
- }
-
- self.len -= 1;
- node
- })
- }
-
- /// Unlinks the specified node from the current list.
- ///
- /// Warning: this will not check that the provided node belongs to the current list.
- #[inline]
- unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
- let node = node.as_mut();
-
- match node.prev {
- Some(mut prev) => prev.as_mut().next = node.next.clone(),
- // this node is the head node
- None => self.head = node.next.clone(),
- };
-
- match node.next {
- Some(mut next) => next.as_mut().prev = node.prev.clone(),
- // this node is the tail node
- None => self.tail = node.prev.clone(),
- };
-
- self.len -= 1;
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Default for LinkedList<T> {
- /// Creates an empty `LinkedList<T>`.
- #[inline]
- fn default() -> Self {
- Self::new()
- }
-}
-
-impl<T> LinkedList<T> {
- /// Creates an empty `LinkedList`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let list: LinkedList<u32> = LinkedList::new();
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn new() -> Self {
- LinkedList {
- head: None,
- tail: None,
- len: 0,
- marker: PhantomData,
- }
- }
-
- /// Moves all elements from `other` to the end of the list.
- ///
- /// This reuses all the nodes from `other` and moves them into `self`. After
- /// this operation, `other` becomes empty.
- ///
- /// This operation should compute in O(1) time and O(1) memory.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut list1 = LinkedList::new();
- /// list1.push_back('a');
- ///
- /// let mut list2 = LinkedList::new();
- /// list2.push_back('b');
- /// list2.push_back('c');
- ///
- /// list1.append(&mut list2);
- ///
- /// let mut iter = list1.iter();
- /// assert_eq!(iter.next(), Some(&'a'));
- /// assert_eq!(iter.next(), Some(&'b'));
- /// assert_eq!(iter.next(), Some(&'c'));
- /// assert!(iter.next().is_none());
- ///
- /// assert!(list2.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn append(&mut self, other: &mut Self) {
- match self.tail {
- None => mem::swap(self, other),
- Some(mut tail) => {
- if let Some(mut other_head) = other.head.take() {
- unsafe {
- tail.as_mut().next = Some(other_head);
- other_head.as_mut().prev = Some(tail);
- }
-
- self.tail = other.tail.take();
- self.len += mem::replace(&mut other.len, 0);
- }
- }
- }
- }
-
- /// Provides a forward iterator.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut list: LinkedList<u32> = LinkedList::new();
- ///
- /// list.push_back(0);
- /// list.push_back(1);
- /// list.push_back(2);
- ///
- /// let mut iter = list.iter();
- /// assert_eq!(iter.next(), Some(&0));
- /// assert_eq!(iter.next(), Some(&1));
- /// assert_eq!(iter.next(), Some(&2));
- /// assert_eq!(iter.next(), None);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter(&self) -> Iter<T> {
- Iter {
- head: self.head,
- tail: self.tail,
- len: self.len,
- marker: PhantomData,
- }
- }
-
- /// Provides a forward iterator with mutable references.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut list: LinkedList<u32> = LinkedList::new();
- ///
- /// list.push_back(0);
- /// list.push_back(1);
- /// list.push_back(2);
- ///
- /// for element in list.iter_mut() {
- /// *element += 10;
- /// }
- ///
- /// let mut iter = list.iter();
- /// assert_eq!(iter.next(), Some(&10));
- /// assert_eq!(iter.next(), Some(&11));
- /// assert_eq!(iter.next(), Some(&12));
- /// assert_eq!(iter.next(), None);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter_mut(&mut self) -> IterMut<T> {
- IterMut {
- head: self.head,
- tail: self.tail,
- len: self.len,
- list: self,
- }
- }
-
- /// Returns `true` if the `LinkedList` is empty.
- ///
- /// This operation should compute in O(1) time.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- /// assert!(dl.is_empty());
- ///
- /// dl.push_front("foo");
- /// assert!(!dl.is_empty());
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_empty(&self) -> bool {
- self.head.is_none()
- }
-
- /// Returns the length of the `LinkedList`.
- ///
- /// This operation should compute in O(1) time.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- ///
- /// dl.push_front(2);
- /// assert_eq!(dl.len(), 1);
- ///
- /// dl.push_front(1);
- /// assert_eq!(dl.len(), 2);
- ///
- /// dl.push_back(3);
- /// assert_eq!(dl.len(), 3);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn len(&self) -> usize {
- self.len
- }
-
- /// Removes all elements from the `LinkedList`.
- ///
- /// This operation should compute in O(n) time.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- ///
- /// dl.push_front(2);
- /// dl.push_front(1);
- /// assert_eq!(dl.len(), 2);
- /// assert_eq!(dl.front(), Some(&1));
- ///
- /// dl.clear();
- /// assert_eq!(dl.len(), 0);
- /// assert_eq!(dl.front(), None);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn clear(&mut self) {
- *self = Self::new();
- }
-
- /// Returns `true` if the `LinkedList` contains an element equal to the
- /// given value.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut list: LinkedList<u32> = LinkedList::new();
- ///
- /// list.push_back(0);
- /// list.push_back(1);
- /// list.push_back(2);
- ///
- /// assert_eq!(list.contains(&0), true);
- /// assert_eq!(list.contains(&10), false);
- /// ```
- #[stable(feature = "linked_list_contains", since = "1.12.0")]
- pub fn contains(&self, x: &T) -> bool
- where T: PartialEq<T>
- {
- self.iter().any(|e| e == x)
- }
-
- /// Provides a reference to the front element, or `None` if the list is
- /// empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- /// assert_eq!(dl.front(), None);
- ///
- /// dl.push_front(1);
- /// assert_eq!(dl.front(), Some(&1));
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn front(&self) -> Option<&T> {
- unsafe {
- self.head.as_ref().map(|node| &node.as_ref().element)
- }
- }
-
- /// Provides a mutable reference to the front element, or `None` if the list
- /// is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- /// assert_eq!(dl.front(), None);
- ///
- /// dl.push_front(1);
- /// assert_eq!(dl.front(), Some(&1));
- ///
- /// match dl.front_mut() {
- /// None => {},
- /// Some(x) => *x = 5,
- /// }
- /// assert_eq!(dl.front(), Some(&5));
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn front_mut(&mut self) -> Option<&mut T> {
- unsafe {
- self.head.as_mut().map(|node| &mut node.as_mut().element)
- }
- }
-
- /// Provides a reference to the back element, or `None` if the list is
- /// empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- /// assert_eq!(dl.back(), None);
- ///
- /// dl.push_back(1);
- /// assert_eq!(dl.back(), Some(&1));
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn back(&self) -> Option<&T> {
- unsafe {
- self.tail.as_ref().map(|node| &node.as_ref().element)
- }
- }
-
- /// Provides a mutable reference to the back element, or `None` if the list
- /// is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- /// assert_eq!(dl.back(), None);
- ///
- /// dl.push_back(1);
- /// assert_eq!(dl.back(), Some(&1));
- ///
- /// match dl.back_mut() {
- /// None => {},
- /// Some(x) => *x = 5,
- /// }
- /// assert_eq!(dl.back(), Some(&5));
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn back_mut(&mut self) -> Option<&mut T> {
- unsafe {
- self.tail.as_mut().map(|node| &mut node.as_mut().element)
- }
- }
-
- /// Adds an element first in the list.
- ///
- /// This operation should compute in O(1) time.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut dl = LinkedList::new();
- ///
- /// dl.push_front(2);
- /// assert_eq!(dl.front().unwrap(), &2);
- ///
- /// dl.push_front(1);
- /// assert_eq!(dl.front().unwrap(), &1);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn push_front(&mut self, elt: T) {
- self.push_front_node(box Node::new(elt));
- }
-
- /// Removes the first element and returns it, or `None` if the list is
- /// empty.
- ///
- /// This operation should compute in O(1) time.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut d = LinkedList::new();
- /// assert_eq!(d.pop_front(), None);
- ///
- /// d.push_front(1);
- /// d.push_front(3);
- /// assert_eq!(d.pop_front(), Some(3));
- /// assert_eq!(d.pop_front(), Some(1));
- /// assert_eq!(d.pop_front(), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn pop_front(&mut self) -> Option<T> {
- self.pop_front_node().map(Node::into_element)
- }
-
- /// Appends an element to the back of a list
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut d = LinkedList::new();
- /// d.push_back(1);
- /// d.push_back(3);
- /// assert_eq!(3, *d.back().unwrap());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn push_back(&mut self, elt: T) {
- self.push_back_node(box Node::new(elt));
- }
-
- /// Removes the last element from a list and returns it, or `None` if
- /// it is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut d = LinkedList::new();
- /// assert_eq!(d.pop_back(), None);
- /// d.push_back(1);
- /// d.push_back(3);
- /// assert_eq!(d.pop_back(), Some(3));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn pop_back(&mut self) -> Option<T> {
- self.pop_back_node().map(Node::into_element)
- }
-
- /// Splits the list into two at the given index. Returns everything after the given index,
- /// including the index.
- ///
- /// This operation should compute in O(n) time.
- ///
- /// # Panics
- ///
- /// Panics if `at > len`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::LinkedList;
- ///
- /// let mut d = LinkedList::new();
- ///
- /// d.push_front(1);
- /// d.push_front(2);
- /// d.push_front(3);
- ///
- /// let mut splitted = d.split_off(2);
- ///
- /// assert_eq!(splitted.pop_front(), Some(1));
- /// assert_eq!(splitted.pop_front(), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn split_off(&mut self, at: usize) -> LinkedList<T> {
- let len = self.len();
- assert!(at <= len, "Cannot split off at a nonexistent index");
- if at == 0 {
- return mem::replace(self, Self::new());
- } else if at == len {
- return Self::new();
- }
-
- // Below, we iterate towards the `i-1`th node, either from the start or the end,
- // depending on which would be faster.
- let split_node = if at - 1 <= len - 1 - (at - 1) {
- let mut iter = self.iter_mut();
- // instead of skipping using .skip() (which creates a new struct),
- // we skip manually so we can access the head field without
- // depending on implementation details of Skip
- for _ in 0..at - 1 {
- iter.next();
- }
- iter.head
- } else {
- // better off starting from the end
- let mut iter = self.iter_mut();
- for _ in 0..len - 1 - (at - 1) {
- iter.next_back();
- }
- iter.tail
- };
-
- // The split node is the new tail node of the first part and owns
- // the head of the second part.
- let second_part_head;
-
- unsafe {
- second_part_head = split_node.unwrap().as_mut().next.take();
- if let Some(mut head) = second_part_head {
- head.as_mut().prev = None;
- }
- }
-
- let second_part = LinkedList {
- head: second_part_head,
- tail: self.tail,
- len: len - at,
- marker: PhantomData,
- };
-
- // Fix the tail ptr of the first part
- self.tail = split_node;
- self.len = at;
-
- second_part
- }
-
- /// Creates an iterator which uses a closure to determine if an element should be removed.
- ///
- /// If the closure returns true, then the element is removed and yielded.
- /// If the closure returns false, the element will remain in the list and will not be yielded
- /// by the iterator.
- ///
- /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
- /// whether you choose to keep or remove it.
- ///
- /// # Examples
- ///
- /// Splitting a list into evens and odds, reusing the original list:
- ///
- /// ```
- /// #![feature(drain_filter)]
- /// use std::collections::LinkedList;
- ///
- /// let mut numbers: LinkedList<u32> = LinkedList::new();
- /// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
- ///
- /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
- /// let odds = numbers;
- ///
- /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
- /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
- /// ```
- #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
- pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<T, F>
- where F: FnMut(&mut T) -> bool
- {
- // avoid borrow issues.
- let it = self.head;
- let old_len = self.len;
-
- DrainFilter {
- list: self,
- it: it,
- pred: filter,
- idx: 0,
- old_len: old_len,
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T> Drop for LinkedList<T> {
- fn drop(&mut self) {
- while let Some(_) = self.pop_front_node() {}
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Iterator for Iter<'a, T> {
- type Item = &'a T;
-
- #[inline]
- fn next(&mut self) -> Option<&'a T> {
- if self.len == 0 {
- None
- } else {
- self.head.map(|node| unsafe {
- // Need an unbound lifetime to get 'a
- let node = &*node.as_ptr();
- self.len -= 1;
- self.head = node.next;
- &node.element
- })
- }
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.len, Some(self.len))
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a T> {
- if self.len == 0 {
- None
- } else {
- self.tail.map(|node| unsafe {
- // Need an unbound lifetime to get 'a
- let node = &*node.as_ptr();
- self.len -= 1;
- self.tail = node.prev;
- &node.element
- })
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for Iter<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Iterator for IterMut<'a, T> {
- type Item = &'a mut T;
-
- #[inline]
- fn next(&mut self) -> Option<&'a mut T> {
- if self.len == 0 {
- None
- } else {
- self.head.map(|node| unsafe {
- // Need an unbound lifetime to get 'a
- let node = &mut *node.as_ptr();
- self.len -= 1;
- self.head = node.next;
- &mut node.element
- })
- }
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.len, Some(self.len))
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a mut T> {
- if self.len == 0 {
- None
- } else {
- self.tail.map(|node| unsafe {
- // Need an unbound lifetime to get 'a
- let node = &mut *node.as_ptr();
- self.len -= 1;
- self.tail = node.prev;
- &mut node.element
- })
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for IterMut<'a, T> {}
-
-impl<'a, T> IterMut<'a, T> {
- /// Inserts the given element just after the element most recently returned by `.next()`.
- /// The inserted element does not appear in the iteration.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(linked_list_extras)]
- ///
- /// use std::collections::LinkedList;
- ///
- /// let mut list: LinkedList<_> = vec![1, 3, 4].into_iter().collect();
- ///
- /// {
- /// let mut it = list.iter_mut();
- /// assert_eq!(it.next().unwrap(), &1);
- /// // insert `2` after `1`
- /// it.insert_next(2);
- /// }
- /// {
- /// let vec: Vec<_> = list.into_iter().collect();
- /// assert_eq!(vec, [1, 2, 3, 4]);
- /// }
- /// ```
- #[inline]
- #[unstable(feature = "linked_list_extras",
- reason = "this is probably better handled by a cursor type -- we'll see",
- issue = "27794")]
- pub fn insert_next(&mut self, element: T) {
- match self.head {
- None => self.list.push_back(element),
- Some(mut head) => unsafe {
- let mut prev = match head.as_ref().prev {
- None => return self.list.push_front(element),
- Some(prev) => prev,
- };
-
- let node = Some(Box::into_raw_non_null(box Node {
- next: Some(head),
- prev: Some(prev),
- element,
- }));
-
- prev.as_mut().next = node;
- head.as_mut().prev = node;
-
- self.list.len += 1;
- },
- }
- }
-
- /// Provides a reference to the next element, without changing the iterator.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(linked_list_extras)]
- ///
- /// use std::collections::LinkedList;
- ///
- /// let mut list: LinkedList<_> = vec![1, 2, 3].into_iter().collect();
- ///
- /// let mut it = list.iter_mut();
- /// assert_eq!(it.next().unwrap(), &1);
- /// assert_eq!(it.peek_next().unwrap(), &2);
- /// // We just peeked at 2, so it was not consumed from the iterator.
- /// assert_eq!(it.next().unwrap(), &2);
- /// ```
- #[inline]
- #[unstable(feature = "linked_list_extras",
- reason = "this is probably better handled by a cursor type -- we'll see",
- issue = "27794")]
- pub fn peek_next(&mut self) -> Option<&mut T> {
- if self.len == 0 {
- None
- } else {
- unsafe {
- self.head.as_mut().map(|node| &mut node.as_mut().element)
- }
- }
- }
-}
-
-/// An iterator produced by calling `drain_filter` on LinkedList.
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-pub struct DrainFilter<'a, T: 'a, F: 'a>
- where F: FnMut(&mut T) -> bool,
-{
- list: &'a mut LinkedList<T>,
- it: Option<NonNull<Node<T>>>,
- pred: F,
- idx: usize,
- old_len: usize,
-}
-
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<'a, T, F> Iterator for DrainFilter<'a, T, F>
- where F: FnMut(&mut T) -> bool,
-{
- type Item = T;
-
- fn next(&mut self) -> Option<T> {
- while let Some(mut node) = self.it {
- unsafe {
- self.it = node.as_ref().next;
- self.idx += 1;
-
- if (self.pred)(&mut node.as_mut().element) {
- self.list.unlink_node(node);
- return Some(Box::from_raw(node.as_ptr()).element);
- }
- }
- }
-
- None
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (0, Some(self.old_len - self.idx))
- }
-}
-
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<'a, T, F> Drop for DrainFilter<'a, T, F>
- where F: FnMut(&mut T) -> bool,
-{
- fn drop(&mut self) {
- self.for_each(drop);
- }
-}
-
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<'a, T: 'a + fmt::Debug, F> fmt::Debug for DrainFilter<'a, T, F>
- where F: FnMut(&mut T) -> bool
-{
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("DrainFilter")
- .field(&self.list)
- .finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Iterator for IntoIter<T> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- self.list.pop_front()
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.list.len, Some(self.list.len))
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> DoubleEndedIterator for IntoIter<T> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- self.list.pop_back()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> ExactSizeIterator for IntoIter<T> {}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for IntoIter<T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> FromIterator<T> for LinkedList<T> {
- fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
- let mut list = Self::new();
- list.extend(iter);
- list
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> IntoIterator for LinkedList<T> {
- type Item = T;
- type IntoIter = IntoIter<T>;
-
- /// Consumes the list into an iterator yielding elements by value.
- #[inline]
- fn into_iter(self) -> IntoIter<T> {
- IntoIter { list: self }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a LinkedList<T> {
- type Item = &'a T;
- type IntoIter = Iter<'a, T>;
-
- fn into_iter(self) -> Iter<'a, T> {
- self.iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a mut LinkedList<T> {
- type Item = &'a mut T;
- type IntoIter = IterMut<'a, T>;
-
- fn into_iter(self) -> IterMut<'a, T> {
- self.iter_mut()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Extend<T> for LinkedList<T> {
- fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
- <Self as SpecExtend<I>>::spec_extend(self, iter);
- }
-}
-
-impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
- default fn spec_extend(&mut self, iter: I) {
- for elt in iter {
- self.push_back(elt);
- }
- }
-}
-
-impl<T> SpecExtend<LinkedList<T>> for LinkedList<T> {
- fn spec_extend(&mut self, ref mut other: LinkedList<T>) {
- self.append(other);
- }
-}
-
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
- fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
- self.extend(iter.into_iter().cloned());
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: PartialEq> PartialEq for LinkedList<T> {
- fn eq(&self, other: &Self) -> bool {
- self.len() == other.len() && self.iter().eq(other)
- }
-
- fn ne(&self, other: &Self) -> bool {
- self.len() != other.len() || self.iter().ne(other)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Eq> Eq for LinkedList<T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: PartialOrd> PartialOrd for LinkedList<T> {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- self.iter().partial_cmp(other)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> Ord for LinkedList<T> {
- #[inline]
- fn cmp(&self, other: &Self) -> Ordering {
- self.iter().cmp(other)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone> Clone for LinkedList<T> {
- fn clone(&self) -> Self {
- self.iter().cloned().collect()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: fmt::Debug> fmt::Debug for LinkedList<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self).finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Hash> Hash for LinkedList<T> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.len().hash(state);
- for elt in self {
- elt.hash(state);
- }
- }
-}
-
-// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
-#[allow(dead_code)]
-fn assert_covariance() {
- fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
- x
- }
- fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
- x
- }
- fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
- x
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: Send> Send for LinkedList<T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: Sync> Sync for LinkedList<T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
-
-#[cfg(test)]
-mod tests {
- use std::thread;
- use std::vec::Vec;
-
- use rand::{thread_rng, Rng};
-
- use super::{LinkedList, Node};
-
- #[cfg(test)]
- fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
- v.iter().cloned().collect()
- }
-
- pub fn check_links<T>(list: &LinkedList<T>) {
- unsafe {
- let mut len = 0;
- let mut last_ptr: Option<&Node<T>> = None;
- let mut node_ptr: &Node<T>;
- match list.head {
- None => {
- // tail node should also be None.
- assert!(list.tail.is_none());
- assert_eq!(0, list.len);
- return;
- }
- Some(node) => node_ptr = &*node.as_ptr(),
- }
- loop {
- match (last_ptr, node_ptr.prev) {
- (None, None) => {}
- (None, _) => panic!("prev link for head"),
- (Some(p), Some(pptr)) => {
- assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
- }
- _ => panic!("prev link is none, not good"),
- }
- match node_ptr.next {
- Some(next) => {
- last_ptr = Some(node_ptr);
- node_ptr = &*next.as_ptr();
- len += 1;
- }
- None => {
- len += 1;
- break;
- }
- }
- }
-
- // verify that the tail node points to the last node.
- let tail = list.tail.as_ref().expect("some tail node").as_ref();
- assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
- // check that len matches interior links.
- assert_eq!(len, list.len);
- }
- }
-
- #[test]
- fn test_append() {
- // Empty to empty
- {
- let mut m = LinkedList::<i32>::new();
- let mut n = LinkedList::new();
- m.append(&mut n);
- check_links(&m);
- assert_eq!(m.len(), 0);
- assert_eq!(n.len(), 0);
- }
- // Non-empty to empty
- {
- let mut m = LinkedList::new();
- let mut n = LinkedList::new();
- n.push_back(2);
- m.append(&mut n);
- check_links(&m);
- assert_eq!(m.len(), 1);
- assert_eq!(m.pop_back(), Some(2));
- assert_eq!(n.len(), 0);
- check_links(&m);
- }
- // Empty to non-empty
- {
- let mut m = LinkedList::new();
- let mut n = LinkedList::new();
- m.push_back(2);
- m.append(&mut n);
- check_links(&m);
- assert_eq!(m.len(), 1);
- assert_eq!(m.pop_back(), Some(2));
- check_links(&m);
- }
-
- // Non-empty to non-empty
- let v = vec![1, 2, 3, 4, 5];
- let u = vec![9, 8, 1, 2, 3, 4, 5];
- let mut m = list_from(&v);
- let mut n = list_from(&u);
- m.append(&mut n);
- check_links(&m);
- let mut sum = v;
- sum.extend_from_slice(&u);
- assert_eq!(sum.len(), m.len());
- for elt in sum {
- assert_eq!(m.pop_front(), Some(elt))
- }
- assert_eq!(n.len(), 0);
- // let's make sure it's working properly, since we
- // did some direct changes to private members
- n.push_back(3);
- assert_eq!(n.len(), 1);
- assert_eq!(n.pop_front(), Some(3));
- check_links(&n);
- }
-
- #[test]
- fn test_insert_prev() {
- let mut m = list_from(&[0, 2, 4, 6, 8]);
- let len = m.len();
- {
- let mut it = m.iter_mut();
- it.insert_next(-2);
- loop {
- match it.next() {
- None => break,
- Some(elt) => {
- it.insert_next(*elt + 1);
- match it.peek_next() {
- Some(x) => assert_eq!(*x, *elt + 2),
- None => assert_eq!(8, *elt),
- }
- }
- }
- }
- it.insert_next(0);
- it.insert_next(1);
- }
- check_links(&m);
- assert_eq!(m.len(), 3 + len * 2);
- assert_eq!(m.into_iter().collect::<Vec<_>>(),
- [-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]);
- }
-
- #[test]
- #[cfg_attr(target_os = "emscripten", ignore)]
- fn test_send() {
- let n = list_from(&[1, 2, 3]);
- thread::spawn(move || {
- check_links(&n);
- let a: &[_] = &[&1, &2, &3];
- assert_eq!(a, &*n.iter().collect::<Vec<_>>());
- })
- .join()
- .ok()
- .unwrap();
- }
-
- #[test]
- fn test_fuzz() {
- for _ in 0..25 {
- fuzz_test(3);
- fuzz_test(16);
- fuzz_test(189);
- }
- }
-
- #[test]
- fn test_26021() {
- // There was a bug in split_off that failed to null out the RHS's head's prev ptr.
- // This caused the RHS's dtor to walk up into the LHS at drop and delete all of
- // its nodes.
- //
- // https://github.com/rust-lang/rust/issues/26021
- let mut v1 = LinkedList::new();
- v1.push_front(1);
- v1.push_front(1);
- v1.push_front(1);
- v1.push_front(1);
- let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption
- assert_eq!(v1.len(), 3);
-
- assert_eq!(v1.iter().len(), 3);
- assert_eq!(v1.iter().collect::<Vec<_>>().len(), 3);
- }
-
- #[test]
- fn test_split_off() {
- let mut v1 = LinkedList::new();
- v1.push_front(1);
- v1.push_front(1);
- v1.push_front(1);
- v1.push_front(1);
-
- // test all splits
- for ix in 0..1 + v1.len() {
- let mut a = v1.clone();
- let b = a.split_off(ix);
- check_links(&a);
- check_links(&b);
- a.extend(b);
- assert_eq!(v1, a);
- }
- }
-
- #[cfg(test)]
- fn fuzz_test(sz: i32) {
- let mut m: LinkedList<_> = LinkedList::new();
- let mut v = vec![];
- for i in 0..sz {
- check_links(&m);
- let r: u8 = thread_rng().next_u32() as u8;
- match r % 6 {
- 0 => {
- m.pop_back();
- v.pop();
- }
- 1 => {
- if !v.is_empty() {
- m.pop_front();
- v.remove(0);
- }
- }
- 2 | 4 => {
- m.push_front(-i);
- v.insert(0, -i);
- }
- 3 | 5 | _ => {
- m.push_back(i);
- v.push(i);
- }
- }
- }
-
- check_links(&m);
-
- let mut i = 0;
- for (a, &b) in m.into_iter().zip(&v) {
- i += 1;
- assert_eq!(a, b);
- }
- assert_eq!(i, v.len());
- }
-
- #[test]
- fn drain_filter_test() {
- let mut m: LinkedList<u32> = LinkedList::new();
- m.extend(&[1, 2, 3, 4, 5, 6]);
- let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
-
- check_links(&m);
-
- assert_eq!(deleted, &[1, 2, 3]);
- assert_eq!(m.into_iter().collect::<Vec<_>>(), &[4, 5, 6]);
- }
-
- #[test]
- fn drain_to_empty_test() {
- let mut m: LinkedList<u32> = LinkedList::new();
- m.extend(&[1, 2, 3, 4, 5, 6]);
- let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
-
- check_links(&m);
-
- assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]);
- assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
- }
-}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![unstable(feature = "raw_vec_internals", reason = "implemention detail", issue = "0")]
+#![doc(hidden)]
+
use core::cmp;
use core::mem;
use core::ops::Drop;
use core::slice;
use alloc::{Alloc, Layout, Global, handle_alloc_error};
-use alloc::CollectionAllocErr;
-use alloc::CollectionAllocErr::*;
+use collections::CollectionAllocErr;
+use collections::CollectionAllocErr::*;
use boxed::Box;
/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
/// # Examples
///
/// ```
- /// # #![feature(alloc)]
+ /// # #![feature(alloc, raw_vec_internals)]
/// # extern crate alloc;
/// # use std::ptr;
/// # use alloc::raw_vec::RawVec;
/// # Examples
///
/// ```
- /// # #![feature(alloc)]
+ /// # #![feature(alloc, raw_vec_internals)]
/// # extern crate alloc;
/// # use std::ptr;
/// # use alloc::raw_vec::RawVec;
use slice::{SliceConcatExt, SliceIndex};
use string::String;
use vec::Vec;
-use vec_deque::VecDeque;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{FromStr, Utf8Error};
use core::str::pattern::Pattern;
use core::str::lossy;
-use alloc::CollectionAllocErr;
+use collections::CollectionAllocErr;
use borrow::{Cow, ToOwned};
use boxed::Box;
use str::{self, from_boxed_utf8_unchecked, FromStr, Utf8Error, Chars};
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+//! Thread-safe reference-counting pointers.
+//!
+//! See the [`Arc<T>`][arc] documentation for more details.
+//!
+//! [arc]: struct.Arc.html
+
+use core::any::Any;
+use core::sync::atomic;
+use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
+use core::borrow;
+use core::fmt;
+use core::cmp::Ordering;
+use core::intrinsics::abort;
+use core::mem::{self, align_of_val, size_of_val};
+use core::ops::Deref;
+use core::ops::CoerceUnsized;
+use core::ptr::{self, NonNull};
+use core::marker::{Unsize, PhantomData};
+use core::hash::{Hash, Hasher};
+use core::{isize, usize};
+use core::convert::From;
+
+use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
+use boxed::Box;
+use string::String;
+use vec::Vec;
+
+/// A soft limit on the amount of references that may be made to an `Arc`.
+///
+/// Going above this limit will abort your program (although not
+/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
+const MAX_REFCOUNT: usize = (isize::MAX) as usize;
+
+/// A sentinel value that is used for the pointer of `Weak::new()`.
+const WEAK_EMPTY: usize = 1;
+
+/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
+/// Reference Counted'.
+///
+/// The type `Arc<T>` provides shared ownership of a value of type `T`,
+/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
+/// a new pointer to the same value in the heap. When the last `Arc`
+/// pointer to a given value is destroyed, the pointed-to value is
+/// also destroyed.
+///
+/// Shared references in Rust disallow mutation by default, and `Arc` is no
+/// exception: you cannot generally obtain a mutable reference to something
+/// inside an `Arc`. If you need to mutate through an `Arc`, use
+/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
+/// types.
+///
+/// ## Thread Safety
+///
+/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
+/// counting. This means that it is thread-safe. The disadvantage is that
+/// atomic operations are more expensive than ordinary memory accesses. If you
+/// are not sharing reference-counted values between threads, consider using
+/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
+/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
+/// However, a library might choose `Arc<T>` in order to give library consumers
+/// more flexibility.
+///
+/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
+/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
+/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
+/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
+/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
+/// data, but it doesn't add thread safety to its data. Consider
+/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
+/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
+/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
+/// non-atomic operations.
+///
+/// In the end, this means that you may need to pair `Arc<T>` with some sort of
+/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
+///
+/// ## Breaking cycles with `Weak`
+///
+/// The [`downgrade`][downgrade] method can be used to create a non-owning
+/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
+/// to an `Arc`, but this will return [`None`] if the value has already been
+/// dropped.
+///
+/// A cycle between `Arc` pointers will never be deallocated. For this reason,
+/// [`Weak`][weak] is used to break cycles. For example, a tree could have
+/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
+/// pointers from children back to their parents.
+///
+/// # Cloning references
+///
+/// Creating a new reference from an existing reference counted pointer is done using the
+/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
+///
+/// ```
+/// use std::sync::Arc;
+/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
+/// // The two syntaxes below are equivalent.
+/// let a = foo.clone();
+/// let b = Arc::clone(&foo);
+/// // a and b both point to the same memory location as foo.
+/// ```
+///
+/// The [`Arc::clone(&from)`] syntax is the most idiomatic because it conveys more explicitly
+/// the meaning of the code. In the example above, this syntax makes it easier to see that
+/// this code is creating a new reference rather than copying the whole content of foo.
+///
+/// ## `Deref` behavior
+///
+/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
+/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
+/// clashes with `T`'s methods, the methods of `Arc<T>` itself are [associated
+/// functions][assoc], called using function-like syntax:
+///
+/// ```
+/// use std::sync::Arc;
+/// let my_arc = Arc::new(());
+///
+/// Arc::downgrade(&my_arc);
+/// ```
+///
+/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the value may have
+/// already been destroyed.
+///
+/// [arc]: struct.Arc.html
+/// [weak]: struct.Weak.html
+/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
+/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+/// [mutex]: ../../std/sync/struct.Mutex.html
+/// [rwlock]: ../../std/sync/struct.RwLock.html
+/// [atomic]: ../../std/sync/atomic/index.html
+/// [`Send`]: ../../std/marker/trait.Send.html
+/// [`Sync`]: ../../std/marker/trait.Sync.html
+/// [deref]: ../../std/ops/trait.Deref.html
+/// [downgrade]: struct.Arc.html#method.downgrade
+/// [upgrade]: struct.Weak.html#method.upgrade
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+/// [assoc]: ../../book/first-edition/method-syntax.html#associated-functions
+/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
+/// [`std::sync`]: ../../std/sync/index.html
+/// [`Arc::clone(&from)`]: #method.clone
+///
+/// # Examples
+///
+/// Sharing some immutable data between threads:
+///
+// Note that we **do not** run these tests here. The windows builders get super
+// unhappy if a thread outlives the main thread and then exits at the same time
+// (something deadlocks) so we just avoid this entirely by not running these
+// tests.
+/// ```no_run
+/// use std::sync::Arc;
+/// use std::thread;
+///
+/// let five = Arc::new(5);
+///
+/// for _ in 0..10 {
+/// let five = Arc::clone(&five);
+///
+/// thread::spawn(move || {
+/// println!("{:?}", five);
+/// });
+/// }
+/// ```
+///
+/// Sharing a mutable [`AtomicUsize`]:
+///
+/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
+///
+/// ```no_run
+/// use std::sync::Arc;
+/// use std::sync::atomic::{AtomicUsize, Ordering};
+/// use std::thread;
+///
+/// let val = Arc::new(AtomicUsize::new(5));
+///
+/// for _ in 0..10 {
+/// let val = Arc::clone(&val);
+///
+/// thread::spawn(move || {
+/// let v = val.fetch_add(1, Ordering::SeqCst);
+/// println!("{:?}", v);
+/// });
+/// }
+/// ```
+///
+/// See the [`rc` documentation][rc_examples] for more examples of reference
+/// counting in general.
+///
+/// [rc_examples]: ../../std/rc/index.html#examples
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Arc<T: ?Sized> {
+ ptr: NonNull<ArcInner<T>>,
+ phantom: PhantomData<T>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
+
+/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
+/// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
+/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
+///
+/// Since a `Weak` reference does not count towards ownership, it will not
+/// prevent the inner value from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present and may return [`None`]
+/// when [`upgrade`]d.
+///
+/// A `Weak` pointer is useful for keeping a temporary reference to the value
+/// within [`Arc`] without extending its lifetime. It is also used to prevent
+/// circular references between [`Arc`] pointers, since mutual owning references
+/// would never allow either [`Arc`] to be dropped. For example, a tree could
+/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
+/// pointers from children back to their parents.
+///
+/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
+///
+/// [`Arc`]: struct.Arc.html
+/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
+/// [`upgrade`]: struct.Weak.html#method.upgrade
+/// [`Option`]: ../../std/option/enum.Option.html
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+#[stable(feature = "arc_weak", since = "1.4.0")]
+pub struct Weak<T: ?Sized> {
+ // This is a `NonNull` to allow optimizing the size of this type in enums,
+ // but it is actually not truly "non-null". A `Weak::new()` will set this
+ // to a sentinel value, instead of needing to allocate some space in the
+ // heap.
+ ptr: NonNull<ArcInner<T>>,
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
+#[stable(feature = "arc_weak", since = "1.4.0")]
+unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "(Weak)")
+ }
+}
+
+struct ArcInner<T: ?Sized> {
+ strong: atomic::AtomicUsize,
+
+ // the value usize::MAX acts as a sentinel for temporarily "locking" the
+ // ability to upgrade weak pointers or downgrade strong ones; this is used
+ // to avoid races in `make_mut` and `get_mut`.
+ weak: atomic::AtomicUsize,
+
+ data: T,
+}
+
+unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
+unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
+
+impl<T> Arc<T> {
+ /// Constructs a new `Arc<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new(data: T) -> Arc<T> {
+ // Start the weak pointer count as 1 which is the weak pointer that's
+ // held by all the strong pointers (kinda), see std/rc.rs for more info
+ let x: Box<_> = box ArcInner {
+ strong: atomic::AtomicUsize::new(1),
+ weak: atomic::AtomicUsize::new(1),
+ data,
+ };
+ Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData }
+ }
+
+ /// Returns the contained value, if the `Arc` has exactly one strong reference.
+ ///
+ /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
+ /// passed in.
+ ///
+ /// This will succeed even if there are outstanding weak references.
+ ///
+ /// [result]: ../../std/result/enum.Result.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new(3);
+ /// assert_eq!(Arc::try_unwrap(x), Ok(3));
+ ///
+ /// let x = Arc::new(4);
+ /// let _y = Arc::clone(&x);
+ /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_unique", since = "1.4.0")]
+ pub fn try_unwrap(this: Self) -> Result<T, Self> {
+ // See `drop` for why all these atomics are like this
+ if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
+ return Err(this);
+ }
+
+ atomic::fence(Acquire);
+
+ unsafe {
+ let elem = ptr::read(&this.ptr.as_ref().data);
+
+ // Make a weak pointer to clean up the implicit strong-weak reference
+ let _weak = Weak { ptr: this.ptr };
+ mem::forget(this);
+
+ Ok(elem)
+ }
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ /// Consumes the `Arc`, returning the wrapped pointer.
+ ///
+ /// To avoid a memory leak the pointer must be converted back to an `Arc` using
+ /// [`Arc::from_raw`][from_raw].
+ ///
+ /// [from_raw]: struct.Arc.html#method.from_raw
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new(10);
+ /// let x_ptr = Arc::into_raw(x);
+ /// assert_eq!(unsafe { *x_ptr }, 10);
+ /// ```
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub fn into_raw(this: Self) -> *const T {
+ let ptr: *const T = &*this;
+ mem::forget(this);
+ ptr
+ }
+
+ /// Constructs an `Arc` from a raw pointer.
+ ///
+ /// The raw pointer must have been previously returned by a call to a
+ /// [`Arc::into_raw`][into_raw].
+ ///
+ /// This function is unsafe because improper use may lead to memory problems. For example, a
+ /// double-free may occur if the function is called twice on the same raw pointer.
+ ///
+ /// [into_raw]: struct.Arc.html#method.into_raw
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new(10);
+ /// let x_ptr = Arc::into_raw(x);
+ ///
+ /// unsafe {
+ /// // Convert back to an `Arc` to prevent leak.
+ /// let x = Arc::from_raw(x_ptr);
+ /// assert_eq!(*x, 10);
+ ///
+ /// // Further calls to `Arc::from_raw(x_ptr)` would be memory unsafe.
+ /// }
+ ///
+ /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+ /// ```
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ // Align the unsized value to the end of the ArcInner.
+ // Because it is ?Sized, it will always be the last field in memory.
+ let align = align_of_val(&*ptr);
+ let layout = Layout::new::<ArcInner<()>>();
+ let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
+
+ // Reverse the offset to find the original ArcInner.
+ let fake_ptr = ptr as *mut ArcInner<T>;
+ let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+
+ Arc {
+ ptr: NonNull::new_unchecked(arc_ptr),
+ phantom: PhantomData,
+ }
+ }
+
+ /// Creates a new [`Weak`][weak] pointer to this value.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// let weak_five = Arc::downgrade(&five);
+ /// ```
+ #[stable(feature = "arc_weak", since = "1.4.0")]
+ pub fn downgrade(this: &Self) -> Weak<T> {
+ // This Relaxed is OK because we're checking the value in the CAS
+ // below.
+ let mut cur = this.inner().weak.load(Relaxed);
+
+ loop {
+ // check if the weak counter is currently "locked"; if so, spin.
+ if cur == usize::MAX {
+ cur = this.inner().weak.load(Relaxed);
+ continue;
+ }
+
+ // NOTE: this code currently ignores the possibility of overflow
+ // into usize::MAX; in general both Rc and Arc need to be adjusted
+ // to deal with overflow.
+
+ // Unlike with Clone(), we need this to be an Acquire read to
+ // synchronize with the write coming from `is_unique`, so that the
+ // events prior to that write happen before this read.
+ match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
+ Ok(_) => return Weak { ptr: this.ptr },
+ Err(old) => cur = old,
+ }
+ }
+ }
+
+ /// Gets the number of [`Weak`][weak] pointers to this value.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Safety
+ ///
+ /// This method by itself is safe, but using it correctly requires extra care.
+ /// Another thread can change the weak count at any time,
+ /// including potentially between calling this method and acting on the result.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// let _weak_five = Arc::downgrade(&five);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` or `Weak` between threads.
+ /// assert_eq!(1, Arc::weak_count(&five));
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_counts", since = "1.15.0")]
+ pub fn weak_count(this: &Self) -> usize {
+ let cnt = this.inner().weak.load(SeqCst);
+ // If the weak count is currently locked, the value of the
+ // count was 0 just before taking the lock.
+ if cnt == usize::MAX { 0 } else { cnt - 1 }
+ }
+
+ /// Gets the number of strong (`Arc`) pointers to this value.
+ ///
+ /// # Safety
+ ///
+ /// This method by itself is safe, but using it correctly requires extra care.
+ /// Another thread can change the strong count at any time,
+ /// including potentially between calling this method and acting on the result.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// let _also_five = Arc::clone(&five);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_counts", since = "1.15.0")]
+ pub fn strong_count(this: &Self) -> usize {
+ this.inner().strong.load(SeqCst)
+ }
+
+ #[inline]
+ fn inner(&self) -> &ArcInner<T> {
+ // This unsafety is ok because while this arc is alive we're guaranteed
+ // that the inner pointer is valid. Furthermore, we know that the
+ // `ArcInner` structure itself is `Sync` because the inner data is
+ // `Sync` as well, so we're ok loaning out an immutable pointer to these
+ // contents.
+ unsafe { self.ptr.as_ref() }
+ }
+
+ // Non-inlined part of `drop`.
+ #[inline(never)]
+ unsafe fn drop_slow(&mut self) {
+ // Destroy the data at this time, even though we may not free the box
+ // allocation itself (there may still be weak pointers lying around).
+ ptr::drop_in_place(&mut self.ptr.as_mut().data);
+
+ if self.inner().weak.fetch_sub(1, Release) == 1 {
+ atomic::fence(Acquire);
+ Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
+ }
+ }
+
+ #[inline]
+ #[stable(feature = "ptr_eq", since = "1.17.0")]
+ /// Returns true if the two `Arc`s point to the same value (not
+ /// just values that compare as equal).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// let same_five = Arc::clone(&five);
+ /// let other_five = Arc::new(5);
+ ///
+ /// assert!(Arc::ptr_eq(&five, &same_five));
+ /// assert!(!Arc::ptr_eq(&five, &other_five));
+ /// ```
+ pub fn ptr_eq(this: &Self, other: &Self) -> bool {
+ this.ptr.as_ptr() == other.ptr.as_ptr()
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ // Allocates an `ArcInner<T>` with sufficient space for an unsized value
+ unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
+ // Create a fake ArcInner to find allocation size and alignment
+ let fake_ptr = ptr as *mut ArcInner<T>;
+
+ let layout = Layout::for_value(&*fake_ptr);
+
+ let mem = Global.alloc(layout)
+ .unwrap_or_else(|_| handle_alloc_error(layout));
+
+ // Initialize the real ArcInner
+ let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
+
+ ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
+ ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
+
+ inner
+ }
+
+ fn from_box(v: Box<T>) -> Arc<T> {
+ unsafe {
+ let box_unique = Box::into_unique(v);
+ let bptr = box_unique.as_ptr();
+
+ let value_size = size_of_val(&*bptr);
+ let ptr = Self::allocate_for_ptr(bptr);
+
+ // Copy value as bytes
+ ptr::copy_nonoverlapping(
+ bptr as *const T as *const u8,
+ &mut (*ptr).data as *mut _ as *mut u8,
+ value_size);
+
+ // Free the allocation without dropping its contents
+ box_free(box_unique);
+
+ Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
+ }
+ }
+}
+
+// Sets the data pointer of a `?Sized` raw pointer.
+//
+// For a slice/trait object, this sets the `data` field and leaves the rest
+// unchanged. For a sized raw pointer, this simply sets the pointer.
+unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
+ ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
+ ptr
+}
+
+impl<T> Arc<[T]> {
+ // Copy elements from slice into newly allocated Arc<[T]>
+ //
+ // Unsafe because the caller must either take ownership or bind `T: Copy`
+ unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
+ let v_ptr = v as *const [T];
+ let ptr = Self::allocate_for_ptr(v_ptr);
+
+ ptr::copy_nonoverlapping(
+ v.as_ptr(),
+ &mut (*ptr).data as *mut [T] as *mut T,
+ v.len());
+
+ Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
+ }
+}
+
+// Specialization trait used for From<&[T]>
+trait ArcFromSlice<T> {
+ fn from_slice(slice: &[T]) -> Self;
+}
+
+impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
+ #[inline]
+ default fn from_slice(v: &[T]) -> Self {
+ // Panic guard while cloning T elements.
+ // In the event of a panic, elements that have been written
+ // into the new ArcInner will be dropped, then the memory freed.
+ struct Guard<T> {
+ mem: NonNull<u8>,
+ elems: *mut T,
+ layout: Layout,
+ n_elems: usize,
+ }
+
+ impl<T> Drop for Guard<T> {
+ fn drop(&mut self) {
+ use core::slice::from_raw_parts_mut;
+
+ unsafe {
+ let slice = from_raw_parts_mut(self.elems, self.n_elems);
+ ptr::drop_in_place(slice);
+
+ Global.dealloc(self.mem.cast(), self.layout.clone());
+ }
+ }
+ }
+
+ unsafe {
+ let v_ptr = v as *const [T];
+ let ptr = Self::allocate_for_ptr(v_ptr);
+
+ let mem = ptr as *mut _ as *mut u8;
+ let layout = Layout::for_value(&*ptr);
+
+ // Pointer to first element
+ let elems = &mut (*ptr).data as *mut [T] as *mut T;
+
+ let mut guard = Guard{
+ mem: NonNull::new_unchecked(mem),
+ elems: elems,
+ layout: layout,
+ n_elems: 0,
+ };
+
+ for (i, item) in v.iter().enumerate() {
+ ptr::write(elems.offset(i as isize), item.clone());
+ guard.n_elems += 1;
+ }
+
+ // All clear. Forget the guard so it doesn't free the new ArcInner.
+ mem::forget(guard);
+
+ Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
+ }
+ }
+}
+
+impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
+ #[inline]
+ fn from_slice(v: &[T]) -> Self {
+ unsafe { Arc::copy_from_slice(v) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Clone for Arc<T> {
+ /// Makes a clone of the `Arc` pointer.
+ ///
+ /// This creates another pointer to the same inner value, increasing the
+ /// strong reference count.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// Arc::clone(&five);
+ /// ```
+ #[inline]
+ fn clone(&self) -> Arc<T> {
+ // Using a relaxed ordering is alright here, as knowledge of the
+ // original reference prevents other threads from erroneously deleting
+ // the object.
+ //
+ // As explained in the [Boost documentation][1], Increasing the
+ // reference counter can always be done with memory_order_relaxed: New
+ // references to an object can only be formed from an existing
+ // reference, and passing an existing reference from one thread to
+ // another must already provide any required synchronization.
+ //
+ // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+ let old_size = self.inner().strong.fetch_add(1, Relaxed);
+
+ // However we need to guard against massive refcounts in case someone
+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
+ // and users will use-after free. We racily saturate to `isize::MAX` on
+ // the assumption that there aren't ~2 billion threads incrementing
+ // the reference count at once. This branch will never be taken in
+ // any realistic program.
+ //
+ // We abort because such a program is incredibly degenerate, and we
+ // don't care to support it.
+ if old_size > MAX_REFCOUNT {
+ unsafe {
+ abort();
+ }
+ }
+
+ Arc { ptr: self.ptr, phantom: PhantomData }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Arc<T> {
+ type Target = T;
+
+ #[inline]
+ fn deref(&self) -> &T {
+ &self.inner().data
+ }
+}
+
+impl<T: Clone> Arc<T> {
+ /// Makes a mutable reference into the given `Arc`.
+ ///
+ /// If there are other `Arc` or [`Weak`][weak] pointers to the same value,
+ /// then `make_mut` will invoke [`clone`][clone] on the inner value to
+ /// ensure unique ownership. This is also referred to as clone-on-write.
+ ///
+ /// See also [`get_mut`][get_mut], which will fail rather than cloning.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+ /// [get_mut]: struct.Arc.html#method.get_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let mut data = Arc::new(5);
+ ///
+ /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
+ /// let mut other_data = Arc::clone(&data); // Won't clone inner data
+ /// *Arc::make_mut(&mut data) += 1; // Clones inner data
+ /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
+ /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
+ ///
+ /// // Now `data` and `other_data` point to different values.
+ /// assert_eq!(*data, 8);
+ /// assert_eq!(*other_data, 12);
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_unique", since = "1.4.0")]
+ pub fn make_mut(this: &mut Self) -> &mut T {
+ // Note that we hold both a strong reference and a weak reference.
+ // Thus, releasing our strong reference only will not, by itself, cause
+ // the memory to be deallocated.
+ //
+ // Use Acquire to ensure that we see any writes to `weak` that happen
+ // before release writes (i.e., decrements) to `strong`. Since we hold a
+ // weak count, there's no chance the ArcInner itself could be
+ // deallocated.
+ if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
+ // Another strong pointer exists; clone
+ *this = Arc::new((**this).clone());
+ } else if this.inner().weak.load(Relaxed) != 1 {
+ // Relaxed suffices in the above because this is fundamentally an
+ // optimization: we are always racing with weak pointers being
+ // dropped. Worst case, we end up allocated a new Arc unnecessarily.
+
+ // We removed the last strong ref, but there are additional weak
+ // refs remaining. We'll move the contents to a new Arc, and
+ // invalidate the other weak refs.
+
+ // Note that it is not possible for the read of `weak` to yield
+ // usize::MAX (i.e., locked), since the weak count can only be
+ // locked by a thread with a strong reference.
+
+ // Materialize our own implicit weak pointer, so that it can clean
+ // up the ArcInner as needed.
+ let weak = Weak { ptr: this.ptr };
+
+ // mark the data itself as already deallocated
+ unsafe {
+ // there is no data race in the implicit write caused by `read`
+ // here (due to zeroing) because data is no longer accessed by
+ // other threads (due to there being no more strong refs at this
+ // point).
+ let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
+ mem::swap(this, &mut swap);
+ mem::forget(swap);
+ }
+ } else {
+ // We were the sole reference of either kind; bump back up the
+ // strong ref count.
+ this.inner().strong.store(1, Release);
+ }
+
+ // As with `get_mut()`, the unsafety is ok because our reference was
+ // either unique to begin with, or became one upon cloning the contents.
+ unsafe {
+ &mut this.ptr.as_mut().data
+ }
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ /// Returns a mutable reference to the inner value, if there are
+ /// no other `Arc` or [`Weak`][weak] pointers to the same value.
+ ///
+ /// Returns [`None`][option] otherwise, because it is not safe to
+ /// mutate a shared value.
+ ///
+ /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+ /// the inner value when it's shared.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [option]: ../../std/option/enum.Option.html
+ /// [make_mut]: struct.Arc.html#method.make_mut
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let mut x = Arc::new(3);
+ /// *Arc::get_mut(&mut x).unwrap() = 4;
+ /// assert_eq!(*x, 4);
+ ///
+ /// let _y = Arc::clone(&x);
+ /// assert!(Arc::get_mut(&mut x).is_none());
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_unique", since = "1.4.0")]
+ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
+ if this.is_unique() {
+ // This unsafety is ok because we're guaranteed that the pointer
+ // returned is the *only* pointer that will ever be returned to T. Our
+ // reference count is guaranteed to be 1 at this point, and we required
+ // the Arc itself to be `mut`, so we're returning the only possible
+ // reference to the inner data.
+ unsafe {
+ Some(&mut this.ptr.as_mut().data)
+ }
+ } else {
+ None
+ }
+ }
+
+ /// Determine whether this is the unique reference (including weak refs) to
+ /// the underlying data.
+ ///
+ /// Note that this requires locking the weak ref count.
+ fn is_unique(&mut self) -> bool {
+ // lock the weak pointer count if we appear to be the sole weak pointer
+ // holder.
+ //
+ // The acquire label here ensures a happens-before relationship with any
+ // writes to `strong` prior to decrements of the `weak` count (via drop,
+ // which uses Release).
+ if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
+ // Due to the previous acquire read, this will observe any writes to
+ // `strong` that were due to upgrading weak pointers; only strong
+ // clones remain, which require that the strong count is > 1 anyway.
+ let unique = self.inner().strong.load(Relaxed) == 1;
+
+ // The release write here synchronizes with a read in `downgrade`,
+ // effectively preventing the above read of `strong` from happening
+ // after the write.
+ self.inner().weak.store(1, Release); // release the lock
+ unique
+ } else {
+ false
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
+ /// Drops the `Arc`.
+ ///
+ /// This will decrement the strong reference count. If the strong reference
+ /// count reaches zero then the only other references (if any) are
+ /// [`Weak`][weak], so we `drop` the inner value.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// struct Foo;
+ ///
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
+ /// }
+ ///
+ /// let foo = Arc::new(Foo);
+ /// let foo2 = Arc::clone(&foo);
+ ///
+ /// drop(foo); // Doesn't print anything
+ /// drop(foo2); // Prints "dropped!"
+ /// ```
+ #[inline]
+ fn drop(&mut self) {
+ // Because `fetch_sub` is already atomic, we do not need to synchronize
+ // with other threads unless we are going to delete the object. This
+ // same logic applies to the below `fetch_sub` to the `weak` count.
+ if self.inner().strong.fetch_sub(1, Release) != 1 {
+ return;
+ }
+
+ // This fence is needed to prevent reordering of use of the data and
+ // deletion of the data. Because it is marked `Release`, the decreasing
+ // of the reference count synchronizes with this `Acquire` fence. This
+ // means that use of the data happens before decreasing the reference
+ // count, which happens before this fence, which happens before the
+ // deletion of the data.
+ //
+ // As explained in the [Boost documentation][1],
+ //
+ // > It is important to enforce any possible access to the object in one
+ // > thread (through an existing reference) to *happen before* deleting
+ // > the object in a different thread. This is achieved by a "release"
+ // > operation after dropping a reference (any access to the object
+ // > through this reference must obviously happened before), and an
+ // > "acquire" operation before deleting the object.
+ //
+ // In particular, while the contents of an Arc are usually immutable, it's
+ // possible to have interior writes to something like a Mutex<T>. Since a
+ // Mutex is not acquired when it is deleted, we can't rely on its
+ // synchronization logic to make writes in thread A visible to a destructor
+ // running in thread B.
+ //
+ // Also note that the Acquire fence here could probably be replaced with an
+ // Acquire load, which could improve performance in highly-contended
+ // situations. See [2].
+ //
+ // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+ // [2]: (https://github.com/rust-lang/rust/pull/41714)
+ atomic::fence(Acquire);
+
+ unsafe {
+ self.drop_slow();
+ }
+ }
+}
+
+impl Arc<Any + Send + Sync> {
+ #[inline]
+ #[unstable(feature = "rc_downcast", issue = "44608")]
+ /// Attempt to downcast the `Arc<Any + Send + Sync>` to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(rc_downcast)]
+ /// use std::any::Any;
+ /// use std::sync::Arc;
+ ///
+ /// fn print_if_string(value: Arc<Any + Send + Sync>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// fn main() {
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Arc::new(my_string));
+ /// print_if_string(Arc::new(0i8));
+ /// }
+ /// ```
+ pub fn downcast<T>(self) -> Result<Arc<T>, Self>
+ where
+ T: Any + Send + Sync + 'static,
+ {
+ if (*self).is::<T>() {
+ let ptr = self.ptr.cast::<ArcInner<T>>();
+ mem::forget(self);
+ Ok(Arc { ptr, phantom: PhantomData })
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<T> Weak<T> {
+ /// Constructs a new `Weak<T>`, without allocating any memory.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: struct.Weak.html#method.upgrade
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Weak;
+ ///
+ /// let empty: Weak<i64> = Weak::new();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ #[stable(feature = "downgraded_weak", since = "1.10.0")]
+ pub fn new() -> Weak<T> {
+ unsafe {
+ Weak {
+ ptr: NonNull::new_unchecked(WEAK_EMPTY as *mut _),
+ }
+ }
+ }
+}
+
+impl<T: ?Sized> Weak<T> {
+ /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending
+ /// the lifetime of the value if successful.
+ ///
+ /// Returns [`None`] if the value has since been dropped.
+ ///
+ /// [`Arc`]: struct.Arc.html
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// let weak_five = Arc::downgrade(&five);
+ ///
+ /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
+ /// assert!(strong_five.is_some());
+ ///
+ /// // Destroy all strong pointers.
+ /// drop(strong_five);
+ /// drop(five);
+ ///
+ /// assert!(weak_five.upgrade().is_none());
+ /// ```
+ #[stable(feature = "arc_weak", since = "1.4.0")]
+ pub fn upgrade(&self) -> Option<Arc<T>> {
+ // We use a CAS loop to increment the strong count instead of a
+ // fetch_add because once the count hits 0 it must never be above 0.
+ let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
+ return None;
+ } else {
+ unsafe { self.ptr.as_ref() }
+ };
+
+ // Relaxed load because any write of 0 that we can observe
+ // leaves the field in a permanently zero state (so a
+ // "stale" read of 0 is fine), and any other value is
+ // confirmed via the CAS below.
+ let mut n = inner.strong.load(Relaxed);
+
+ loop {
+ if n == 0 {
+ return None;
+ }
+
+ // See comments in `Arc::clone` for why we do this (for `mem::forget`).
+ if n > MAX_REFCOUNT {
+ unsafe {
+ abort();
+ }
+ }
+
+ // Relaxed is valid for the same reason it is on Arc's Clone impl
+ match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
+ Ok(_) => return Some(Arc {
+ // null checked above
+ ptr: self.ptr,
+ phantom: PhantomData,
+ }),
+ Err(old) => n = old,
+ }
+ }
+ }
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized> Clone for Weak<T> {
+ /// Makes a clone of the `Weak` pointer that points to the same value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// let weak_five = Arc::downgrade(&Arc::new(5));
+ ///
+ /// Weak::clone(&weak_five);
+ /// ```
+ #[inline]
+ fn clone(&self) -> Weak<T> {
+ let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
+ return Weak { ptr: self.ptr };
+ } else {
+ unsafe { self.ptr.as_ref() }
+ };
+ // See comments in Arc::clone() for why this is relaxed. This can use a
+ // fetch_add (ignoring the lock) because the weak count is only locked
+ // where are *no other* weak pointers in existence. (So we can't be
+ // running this code in that case).
+ let old_size = inner.weak.fetch_add(1, Relaxed);
+
+ // See comments in Arc::clone() for why we do this (for mem::forget).
+ if old_size > MAX_REFCOUNT {
+ unsafe {
+ abort();
+ }
+ }
+
+ return Weak { ptr: self.ptr };
+ }
+}
+
+#[stable(feature = "downgraded_weak", since = "1.10.0")]
+impl<T> Default for Weak<T> {
+ /// Constructs a new `Weak<T>`, without allocating memory.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: struct.Weak.html#method.upgrade
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Weak;
+ ///
+ /// let empty: Weak<i64> = Default::default();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ fn default() -> Weak<T> {
+ Weak::new()
+ }
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized> Drop for Weak<T> {
+ /// Drops the `Weak` pointer.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// struct Foo;
+ ///
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
+ /// }
+ ///
+ /// let foo = Arc::new(Foo);
+ /// let weak_foo = Arc::downgrade(&foo);
+ /// let other_weak_foo = Weak::clone(&weak_foo);
+ ///
+ /// drop(weak_foo); // Doesn't print anything
+ /// drop(foo); // Prints "dropped!"
+ ///
+ /// assert!(other_weak_foo.upgrade().is_none());
+ /// ```
+ fn drop(&mut self) {
+ // If we find out that we were the last weak pointer, then its time to
+ // deallocate the data entirely. See the discussion in Arc::drop() about
+ // the memory orderings
+ //
+ // It's not necessary to check for the locked state here, because the
+ // weak count can only be locked if there was precisely one weak ref,
+ // meaning that drop could only subsequently run ON that remaining weak
+ // ref, which can only happen after the lock is released.
+ let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
+ return;
+ } else {
+ unsafe { self.ptr.as_ref() }
+ };
+
+ if inner.weak.fetch_sub(1, Release) == 1 {
+ atomic::fence(Acquire);
+ unsafe {
+ Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
+ /// Equality for two `Arc`s.
+ ///
+ /// Two `Arc`s are equal if their inner values are equal.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five == Arc::new(5));
+ /// ```
+ fn eq(&self, other: &Arc<T>) -> bool {
+ *(*self) == *(*other)
+ }
+
+ /// Inequality for two `Arc`s.
+ ///
+ /// Two `Arc`s are unequal if their inner values are unequal.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five != Arc::new(6));
+ /// ```
+ fn ne(&self, other: &Arc<T>) -> bool {
+ *(*self) != *(*other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
+ /// Partial comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `partial_cmp()` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ /// use std::cmp::Ordering;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
+ /// ```
+ fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
+ (**self).partial_cmp(&**other)
+ }
+
+ /// Less-than comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `<` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five < Arc::new(6));
+ /// ```
+ fn lt(&self, other: &Arc<T>) -> bool {
+ *(*self) < *(*other)
+ }
+
+ /// 'Less than or equal to' comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `<=` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five <= Arc::new(5));
+ /// ```
+ fn le(&self, other: &Arc<T>) -> bool {
+ *(*self) <= *(*other)
+ }
+
+ /// Greater-than comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `>` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five > Arc::new(4));
+ /// ```
+ fn gt(&self, other: &Arc<T>) -> bool {
+ *(*self) > *(*other)
+ }
+
+ /// 'Greater than or equal to' comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `>=` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five >= Arc::new(5));
+ /// ```
+ fn ge(&self, other: &Arc<T>) -> bool {
+ *(*self) >= *(*other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Arc<T> {
+ /// Comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `cmp()` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ /// use std::cmp::Ordering;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
+ /// ```
+ fn cmp(&self, other: &Arc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Arc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Arc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Pointer::fmt(&(&**self as *const T), f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Arc<T> {
+ /// Creates a new `Arc<T>`, with the `Default` value for `T`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x: Arc<i32> = Default::default();
+ /// assert_eq!(*x, 0);
+ /// ```
+ fn default() -> Arc<T> {
+ Arc::new(Default::default())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Arc<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (**self).hash(state)
+ }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Arc<T> {
+ fn from(t: T) -> Self {
+ Arc::new(t)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<'a, T: Clone> From<&'a [T]> for Arc<[T]> {
+ #[inline]
+ fn from(v: &[T]) -> Arc<[T]> {
+ <Self as ArcFromSlice<T>>::from_slice(v)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<'a> From<&'a str> for Arc<str> {
+ #[inline]
+ fn from(v: &str) -> Arc<str> {
+ let arc = Arc::<[u8]>::from(v.as_bytes());
+ unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<String> for Arc<str> {
+ #[inline]
+ fn from(v: String) -> Arc<str> {
+ Arc::from(&v[..])
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: ?Sized> From<Box<T>> for Arc<T> {
+ #[inline]
+ fn from(v: Box<T>) -> Arc<T> {
+ Arc::from_box(v)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T> From<Vec<T>> for Arc<[T]> {
+ #[inline]
+ fn from(mut v: Vec<T>) -> Arc<[T]> {
+ unsafe {
+ let arc = Arc::copy_from_slice(&v);
+
+ // Allow the Vec to free its memory, but not destroy its contents
+ v.set_len(0);
+
+ arc
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::boxed::Box;
+ use std::clone::Clone;
+ use std::sync::mpsc::channel;
+ use std::mem::drop;
+ use std::ops::Drop;
+ use std::option::Option;
+ use std::option::Option::{None, Some};
+ use std::sync::atomic;
+ use std::sync::atomic::Ordering::{Acquire, SeqCst};
+ use std::thread;
+ use std::sync::Mutex;
+ use std::convert::From;
+
+ use super::{Arc, Weak};
+ use vec::Vec;
+
+ struct Canary(*mut atomic::AtomicUsize);
+
+ impl Drop for Canary {
+ fn drop(&mut self) {
+ unsafe {
+ match *self {
+ Canary(c) => {
+ (*c).fetch_add(1, SeqCst);
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ #[cfg_attr(target_os = "emscripten", ignore)]
+ fn manually_share_arc() {
+ let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ let arc_v = Arc::new(v);
+
+ let (tx, rx) = channel();
+
+ let _t = thread::spawn(move || {
+ let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
+ assert_eq!((*arc_v)[3], 4);
+ });
+
+ tx.send(arc_v.clone()).unwrap();
+
+ assert_eq!((*arc_v)[2], 3);
+ assert_eq!((*arc_v)[4], 5);
+ }
+
+ #[test]
+ fn test_arc_get_mut() {
+ let mut x = Arc::new(3);
+ *Arc::get_mut(&mut x).unwrap() = 4;
+ assert_eq!(*x, 4);
+ let y = x.clone();
+ assert!(Arc::get_mut(&mut x).is_none());
+ drop(y);
+ assert!(Arc::get_mut(&mut x).is_some());
+ let _w = Arc::downgrade(&x);
+ assert!(Arc::get_mut(&mut x).is_none());
+ }
+
+ #[test]
+ fn try_unwrap() {
+ let x = Arc::new(3);
+ assert_eq!(Arc::try_unwrap(x), Ok(3));
+ let x = Arc::new(4);
+ let _y = x.clone();
+ assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
+ let x = Arc::new(5);
+ let _w = Arc::downgrade(&x);
+ assert_eq!(Arc::try_unwrap(x), Ok(5));
+ }
+
+ #[test]
+ fn into_from_raw() {
+ let x = Arc::new(box "hello");
+ let y = x.clone();
+
+ let x_ptr = Arc::into_raw(x);
+ drop(y);
+ unsafe {
+ assert_eq!(**x_ptr, "hello");
+
+ let x = Arc::from_raw(x_ptr);
+ assert_eq!(**x, "hello");
+
+ assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
+ }
+ }
+
+ #[test]
+ fn test_into_from_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let arc: Arc<str> = Arc::from("foo");
+
+ let ptr = Arc::into_raw(arc.clone());
+ let arc2 = unsafe { Arc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert_eq!(arc, arc2);
+
+ let arc: Arc<Display> = Arc::new(123);
+
+ let ptr = Arc::into_raw(arc.clone());
+ let arc2 = unsafe { Arc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert_eq!(arc2.to_string(), "123");
+ }
+
+ #[test]
+ fn test_cowarc_clone_make_mut() {
+ let mut cow0 = Arc::new(75);
+ let mut cow1 = cow0.clone();
+ let mut cow2 = cow1.clone();
+
+ assert!(75 == *Arc::make_mut(&mut cow0));
+ assert!(75 == *Arc::make_mut(&mut cow1));
+ assert!(75 == *Arc::make_mut(&mut cow2));
+
+ *Arc::make_mut(&mut cow0) += 1;
+ *Arc::make_mut(&mut cow1) += 2;
+ *Arc::make_mut(&mut cow2) += 3;
+
+ assert!(76 == *cow0);
+ assert!(77 == *cow1);
+ assert!(78 == *cow2);
+
+ // none should point to the same backing memory
+ assert!(*cow0 != *cow1);
+ assert!(*cow0 != *cow2);
+ assert!(*cow1 != *cow2);
+ }
+
+ #[test]
+ fn test_cowarc_clone_unique2() {
+ let mut cow0 = Arc::new(75);
+ let cow1 = cow0.clone();
+ let cow2 = cow1.clone();
+
+ assert!(75 == *cow0);
+ assert!(75 == *cow1);
+ assert!(75 == *cow2);
+
+ *Arc::make_mut(&mut cow0) += 1;
+ assert!(76 == *cow0);
+ assert!(75 == *cow1);
+ assert!(75 == *cow2);
+
+ // cow1 and cow2 should share the same contents
+ // cow0 should have a unique reference
+ assert!(*cow0 != *cow1);
+ assert!(*cow0 != *cow2);
+ assert!(*cow1 == *cow2);
+ }
+
+ #[test]
+ fn test_cowarc_clone_weak() {
+ let mut cow0 = Arc::new(75);
+ let cow1_weak = Arc::downgrade(&cow0);
+
+ assert!(75 == *cow0);
+ assert!(75 == *cow1_weak.upgrade().unwrap());
+
+ *Arc::make_mut(&mut cow0) += 1;
+
+ assert!(76 == *cow0);
+ assert!(cow1_weak.upgrade().is_none());
+ }
+
+ #[test]
+ fn test_live() {
+ let x = Arc::new(5);
+ let y = Arc::downgrade(&x);
+ assert!(y.upgrade().is_some());
+ }
+
+ #[test]
+ fn test_dead() {
+ let x = Arc::new(5);
+ let y = Arc::downgrade(&x);
+ drop(x);
+ assert!(y.upgrade().is_none());
+ }
+
+ #[test]
+ fn weak_self_cyclic() {
+ struct Cycle {
+ x: Mutex<Option<Weak<Cycle>>>,
+ }
+
+ let a = Arc::new(Cycle { x: Mutex::new(None) });
+ let b = Arc::downgrade(&a.clone());
+ *a.x.lock().unwrap() = Some(b);
+
+ // hopefully we don't double-free (or leak)...
+ }
+
+ #[test]
+ fn drop_arc() {
+ let mut canary = atomic::AtomicUsize::new(0);
+ let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
+ drop(x);
+ assert!(canary.load(Acquire) == 1);
+ }
+
+ #[test]
+ fn drop_arc_weak() {
+ let mut canary = atomic::AtomicUsize::new(0);
+ let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
+ let arc_weak = Arc::downgrade(&arc);
+ assert!(canary.load(Acquire) == 0);
+ drop(arc);
+ assert!(canary.load(Acquire) == 1);
+ drop(arc_weak);
+ }
+
+ #[test]
+ fn test_strong_count() {
+ let a = Arc::new(0);
+ assert!(Arc::strong_count(&a) == 1);
+ let w = Arc::downgrade(&a);
+ assert!(Arc::strong_count(&a) == 1);
+ let b = w.upgrade().expect("");
+ assert!(Arc::strong_count(&b) == 2);
+ assert!(Arc::strong_count(&a) == 2);
+ drop(w);
+ drop(a);
+ assert!(Arc::strong_count(&b) == 1);
+ let c = b.clone();
+ assert!(Arc::strong_count(&b) == 2);
+ assert!(Arc::strong_count(&c) == 2);
+ }
+
+ #[test]
+ fn test_weak_count() {
+ let a = Arc::new(0);
+ assert!(Arc::strong_count(&a) == 1);
+ assert!(Arc::weak_count(&a) == 0);
+ let w = Arc::downgrade(&a);
+ assert!(Arc::strong_count(&a) == 1);
+ assert!(Arc::weak_count(&a) == 1);
+ let x = w.clone();
+ assert!(Arc::weak_count(&a) == 2);
+ drop(w);
+ drop(x);
+ assert!(Arc::strong_count(&a) == 1);
+ assert!(Arc::weak_count(&a) == 0);
+ let c = a.clone();
+ assert!(Arc::strong_count(&a) == 2);
+ assert!(Arc::weak_count(&a) == 0);
+ let d = Arc::downgrade(&c);
+ assert!(Arc::weak_count(&c) == 1);
+ assert!(Arc::strong_count(&c) == 2);
+
+ drop(a);
+ drop(c);
+ drop(d);
+ }
+
+ #[test]
+ fn show_arc() {
+ let a = Arc::new(5);
+ assert_eq!(format!("{:?}", a), "5");
+ }
+
+ // Make sure deriving works with Arc<T>
+ #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
+ struct Foo {
+ inner: Arc<i32>,
+ }
+
+ #[test]
+ fn test_unsized() {
+ let x: Arc<[i32]> = Arc::new([1, 2, 3]);
+ assert_eq!(format!("{:?}", x), "[1, 2, 3]");
+ let y = Arc::downgrade(&x.clone());
+ drop(x);
+ assert!(y.upgrade().is_none());
+ }
+
+ #[test]
+ fn test_from_owned() {
+ let foo = 123;
+ let foo_arc = Arc::from(foo);
+ assert!(123 == *foo_arc);
+ }
+
+ #[test]
+ fn test_new_weak() {
+ let foo: Weak<usize> = Weak::new();
+ assert!(foo.upgrade().is_none());
+ }
+
+ #[test]
+ fn test_ptr_eq() {
+ let five = Arc::new(5);
+ let same_five = five.clone();
+ let other_five = Arc::new(5);
+
+ assert!(Arc::ptr_eq(&five, &same_five));
+ assert!(!Arc::ptr_eq(&five, &other_five));
+ }
+
+ #[test]
+ #[cfg_attr(target_os = "emscripten", ignore)]
+ fn test_weak_count_locked() {
+ let mut a = Arc::new(atomic::AtomicBool::new(false));
+ let a2 = a.clone();
+ let t = thread::spawn(move || {
+ for _i in 0..1000000 {
+ Arc::get_mut(&mut a);
+ }
+ a.store(true, SeqCst);
+ });
+
+ while !a2.load(SeqCst) {
+ let n = Arc::weak_count(&a2);
+ assert!(n < 2, "bad weak count: {}", n);
+ }
+ t.join().unwrap();
+ }
+
+ #[test]
+ fn test_from_str() {
+ let r: Arc<str> = Arc::from("foo");
+
+ assert_eq!(&r[..], "foo");
+ }
+
+ #[test]
+ fn test_copy_from_slice() {
+ let s: &[u32] = &[1, 2, 3];
+ let r: Arc<[u32]> = Arc::from(s);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+ }
+
+ #[test]
+ fn test_clone_from_slice() {
+ #[derive(Clone, Debug, Eq, PartialEq)]
+ struct X(u32);
+
+ let s: &[X] = &[X(1), X(2), X(3)];
+ let r: Arc<[X]> = Arc::from(s);
+
+ assert_eq!(&r[..], s);
+ }
+
+ #[test]
+ #[should_panic]
+ fn test_clone_from_slice_panic() {
+ use std::string::{String, ToString};
+
+ struct Fail(u32, String);
+
+ impl Clone for Fail {
+ fn clone(&self) -> Fail {
+ if self.0 == 2 {
+ panic!();
+ }
+ Fail(self.0, self.1.clone())
+ }
+ }
+
+ let s: &[Fail] = &[
+ Fail(0, "foo".to_string()),
+ Fail(1, "bar".to_string()),
+ Fail(2, "baz".to_string()),
+ ];
+
+ // Should panic, but not cause memory corruption
+ let _r: Arc<[Fail]> = Arc::from(s);
+ }
+
+ #[test]
+ fn test_from_box() {
+ let b: Box<u32> = box 123;
+ let r: Arc<u32> = Arc::from(b);
+
+ assert_eq!(*r, 123);
+ }
+
+ #[test]
+ fn test_from_box_str() {
+ use std::string::String;
+
+ let s = String::from("foo").into_boxed_str();
+ let r: Arc<str> = Arc::from(s);
+
+ assert_eq!(&r[..], "foo");
+ }
+
+ #[test]
+ fn test_from_box_slice() {
+ let s = vec![1, 2, 3].into_boxed_slice();
+ let r: Arc<[u32]> = Arc::from(s);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+ }
+
+ #[test]
+ fn test_from_box_trait() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let b: Box<Display> = box 123;
+ let r: Arc<Display> = Arc::from(b);
+
+ assert_eq!(r.to_string(), "123");
+ }
+
+ #[test]
+ fn test_from_box_trait_zero_sized() {
+ use std::fmt::Debug;
+
+ let b: Box<Debug> = box ();
+ let r: Arc<Debug> = Arc::from(b);
+
+ assert_eq!(format!("{:?}", r), "()");
+ }
+
+ #[test]
+ fn test_from_vec() {
+ let v = vec![1, 2, 3];
+ let r: Arc<[u32]> = Arc::from(v);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+ }
+
+ #[test]
+ fn test_downcast() {
+ use std::any::Any;
+
+ let r1: Arc<Any + Send + Sync> = Arc::new(i32::max_value());
+ let r2: Arc<Any + Send + Sync> = Arc::new("abc");
+
+ assert!(r1.clone().downcast::<u32>().is_err());
+
+ let r1i32 = r1.downcast::<i32>();
+ assert!(r1i32.is_ok());
+ assert_eq!(r1i32.unwrap(), Arc::new(i32::max_value()));
+
+ assert!(r2.clone().downcast::<i32>().is_err());
+
+ let r2str = r2.downcast::<&'static str>();
+ assert!(r2str.is_ok());
+ assert_eq!(r2str.unwrap(), Arc::new("abc"));
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
+ fn borrow(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Arc<T> {
+ fn as_ref(&self) -> &T {
+ &**self
+ }
+}
#[cfg(target_has_atomic = "ptr")]
mod if_arc {
use super::*;
- use arc::Arc;
use core::marker::PhantomData;
use core::mem;
use core::ptr::{self, NonNull};
+ use sync::Arc;
/// A way of waking up a specific task.
///
use core::ptr::NonNull;
use core::slice;
-use alloc::CollectionAllocErr;
+use collections::CollectionAllocErr;
use borrow::ToOwned;
use borrow::Cow;
use boxed::Box;
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A double-ended queue implemented with a growable ring buffer.
-//!
-//! This queue has `O(1)` amortized inserts and removals from both ends of the
-//! container. It also has `O(1)` indexing like a vector. The contained elements
-//! are not required to be copyable, and the queue will be sendable if the
-//! contained type is sendable.
-
-#![stable(feature = "rust1", since = "1.0.0")]
-
-use core::cmp::Ordering;
-use core::fmt;
-use core::iter::{repeat, FromIterator, FusedIterator};
-use core::mem;
-use core::ops::Bound::{Excluded, Included, Unbounded};
-use core::ops::{Index, IndexMut, RangeBounds};
-use core::ptr;
-use core::ptr::NonNull;
-use core::slice;
-
-use core::hash::{Hash, Hasher};
-use core::cmp;
-
-use alloc::CollectionAllocErr;
-use raw_vec::RawVec;
-use vec::Vec;
-
-const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
-const MINIMUM_CAPACITY: usize = 1; // 2 - 1
-#[cfg(target_pointer_width = "32")]
-const MAXIMUM_ZST_CAPACITY: usize = 1 << (32 - 1); // Largest possible power of two
-#[cfg(target_pointer_width = "64")]
-const MAXIMUM_ZST_CAPACITY: usize = 1 << (64 - 1); // Largest possible power of two
-
-/// A double-ended queue implemented with a growable ring buffer.
-///
-/// The "default" usage of this type as a queue is to use [`push_back`] to add to
-/// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`]
-/// push onto the back in this manner, and iterating over `VecDeque` goes front
-/// to back.
-///
-/// [`push_back`]: #method.push_back
-/// [`pop_front`]: #method.pop_front
-/// [`extend`]: #method.extend
-/// [`append`]: #method.append
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct VecDeque<T> {
- // tail and head are pointers into the buffer. Tail always points
- // to the first element that could be read, Head always points
- // to where data should be written.
- // If tail == head the buffer is empty. The length of the ringbuffer
- // is defined as the distance between the two.
- tail: usize,
- head: usize,
- buf: RawVec<T>,
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone> Clone for VecDeque<T> {
- fn clone(&self) -> VecDeque<T> {
- self.iter().cloned().collect()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T> Drop for VecDeque<T> {
- fn drop(&mut self) {
- let (front, back) = self.as_mut_slices();
- unsafe {
- // use drop for [T]
- ptr::drop_in_place(front);
- ptr::drop_in_place(back);
- }
- // RawVec handles deallocation
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Default for VecDeque<T> {
- /// Creates an empty `VecDeque<T>`.
- #[inline]
- fn default() -> VecDeque<T> {
- VecDeque::new()
- }
-}
-
-impl<T> VecDeque<T> {
- /// Marginally more convenient
- #[inline]
- fn ptr(&self) -> *mut T {
- self.buf.ptr()
- }
-
- /// Marginally more convenient
- #[inline]
- fn cap(&self) -> usize {
- if mem::size_of::<T>() == 0 {
- // For zero sized types, we are always at maximum capacity
- MAXIMUM_ZST_CAPACITY
- } else {
- self.buf.cap()
- }
- }
-
- /// Turn ptr into a slice
- #[inline]
- unsafe fn buffer_as_slice(&self) -> &[T] {
- slice::from_raw_parts(self.ptr(), self.cap())
- }
-
- /// Turn ptr into a mut slice
- #[inline]
- unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
- slice::from_raw_parts_mut(self.ptr(), self.cap())
- }
-
- /// Moves an element out of the buffer
- #[inline]
- unsafe fn buffer_read(&mut self, off: usize) -> T {
- ptr::read(self.ptr().offset(off as isize))
- }
-
- /// Writes an element into the buffer, moving it.
- #[inline]
- unsafe fn buffer_write(&mut self, off: usize, value: T) {
- ptr::write(self.ptr().offset(off as isize), value);
- }
-
- /// Returns `true` if and only if the buffer is at full capacity.
- #[inline]
- fn is_full(&self) -> bool {
- self.cap() - self.len() == 1
- }
-
- /// Returns the index in the underlying buffer for a given logical element
- /// index.
- #[inline]
- fn wrap_index(&self, idx: usize) -> usize {
- wrap_index(idx, self.cap())
- }
-
- /// Returns the index in the underlying buffer for a given logical element
- /// index + addend.
- #[inline]
- fn wrap_add(&self, idx: usize, addend: usize) -> usize {
- wrap_index(idx.wrapping_add(addend), self.cap())
- }
-
- /// Returns the index in the underlying buffer for a given logical element
- /// index - subtrahend.
- #[inline]
- fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize {
- wrap_index(idx.wrapping_sub(subtrahend), self.cap())
- }
-
- /// Copies a contiguous block of memory len long from src to dst
- #[inline]
- unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
- debug_assert!(dst + len <= self.cap(),
- "cpy dst={} src={} len={} cap={}",
- dst,
- src,
- len,
- self.cap());
- debug_assert!(src + len <= self.cap(),
- "cpy dst={} src={} len={} cap={}",
- dst,
- src,
- len,
- self.cap());
- ptr::copy(self.ptr().offset(src as isize),
- self.ptr().offset(dst as isize),
- len);
- }
-
- /// Copies a contiguous block of memory len long from src to dst
- #[inline]
- unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
- debug_assert!(dst + len <= self.cap(),
- "cno dst={} src={} len={} cap={}",
- dst,
- src,
- len,
- self.cap());
- debug_assert!(src + len <= self.cap(),
- "cno dst={} src={} len={} cap={}",
- dst,
- src,
- len,
- self.cap());
- ptr::copy_nonoverlapping(self.ptr().offset(src as isize),
- self.ptr().offset(dst as isize),
- len);
- }
-
- /// Copies a potentially wrapping block of memory len long from src to dest.
- /// (abs(dst - src) + len) must be no larger than cap() (There must be at
- /// most one continuous overlapping region between src and dest).
- unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
- #[allow(dead_code)]
- fn diff(a: usize, b: usize) -> usize {
- if a <= b { b - a } else { a - b }
- }
- debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
- "wrc dst={} src={} len={} cap={}",
- dst,
- src,
- len,
- self.cap());
-
- if src == dst || len == 0 {
- return;
- }
-
- let dst_after_src = self.wrap_sub(dst, src) < len;
-
- let src_pre_wrap_len = self.cap() - src;
- let dst_pre_wrap_len = self.cap() - dst;
- let src_wraps = src_pre_wrap_len < len;
- let dst_wraps = dst_pre_wrap_len < len;
-
- match (dst_after_src, src_wraps, dst_wraps) {
- (_, false, false) => {
- // src doesn't wrap, dst doesn't wrap
- //
- // S . . .
- // 1 [_ _ A A B B C C _]
- // 2 [_ _ A A A A B B _]
- // D . . .
- //
- self.copy(dst, src, len);
- }
- (false, false, true) => {
- // dst before src, src doesn't wrap, dst wraps
- //
- // S . . .
- // 1 [A A B B _ _ _ C C]
- // 2 [A A B B _ _ _ A A]
- // 3 [B B B B _ _ _ A A]
- // . . D .
- //
- self.copy(dst, src, dst_pre_wrap_len);
- self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
- }
- (true, false, true) => {
- // src before dst, src doesn't wrap, dst wraps
- //
- // S . . .
- // 1 [C C _ _ _ A A B B]
- // 2 [B B _ _ _ A A B B]
- // 3 [B B _ _ _ A A A A]
- // . . D .
- //
- self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
- self.copy(dst, src, dst_pre_wrap_len);
- }
- (false, true, false) => {
- // dst before src, src wraps, dst doesn't wrap
- //
- // . . S .
- // 1 [C C _ _ _ A A B B]
- // 2 [C C _ _ _ B B B B]
- // 3 [C C _ _ _ B B C C]
- // D . . .
- //
- self.copy(dst, src, src_pre_wrap_len);
- self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
- }
- (true, true, false) => {
- // src before dst, src wraps, dst doesn't wrap
- //
- // . . S .
- // 1 [A A B B _ _ _ C C]
- // 2 [A A A A _ _ _ C C]
- // 3 [C C A A _ _ _ C C]
- // D . . .
- //
- self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
- self.copy(dst, src, src_pre_wrap_len);
- }
- (false, true, true) => {
- // dst before src, src wraps, dst wraps
- //
- // . . . S .
- // 1 [A B C D _ E F G H]
- // 2 [A B C D _ E G H H]
- // 3 [A B C D _ E G H A]
- // 4 [B C C D _ E G H A]
- // . . D . .
- //
- debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
- let delta = dst_pre_wrap_len - src_pre_wrap_len;
- self.copy(dst, src, src_pre_wrap_len);
- self.copy(dst + src_pre_wrap_len, 0, delta);
- self.copy(0, delta, len - dst_pre_wrap_len);
- }
- (true, true, true) => {
- // src before dst, src wraps, dst wraps
- //
- // . . S . .
- // 1 [A B C D _ E F G H]
- // 2 [A A B D _ E F G H]
- // 3 [H A B D _ E F G H]
- // 4 [H A B D _ E F F G]
- // . . . D .
- //
- debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
- let delta = src_pre_wrap_len - dst_pre_wrap_len;
- self.copy(delta, 0, len - src_pre_wrap_len);
- self.copy(0, self.cap() - delta, delta);
- self.copy(dst, src, dst_pre_wrap_len);
- }
- }
- }
-
- /// Frobs the head and tail sections around to handle the fact that we
- /// just reallocated. Unsafe because it trusts old_cap.
- #[inline]
- unsafe fn handle_cap_increase(&mut self, old_cap: usize) {
- let new_cap = self.cap();
-
- // Move the shortest contiguous section of the ring buffer
- // T H
- // [o o o o o o o . ]
- // T H
- // A [o o o o o o o . . . . . . . . . ]
- // H T
- // [o o . o o o o o ]
- // T H
- // B [. . . o o o o o o o . . . . . . ]
- // H T
- // [o o o o o . o o ]
- // H T
- // C [o o o o o . . . . . . . . . o o ]
-
- if self.tail <= self.head {
- // A
- // Nop
- } else if self.head < old_cap - self.tail {
- // B
- self.copy_nonoverlapping(old_cap, 0, self.head);
- self.head += old_cap;
- debug_assert!(self.head > self.tail);
- } else {
- // C
- let new_tail = new_cap - (old_cap - self.tail);
- self.copy_nonoverlapping(new_tail, self.tail, old_cap - self.tail);
- self.tail = new_tail;
- debug_assert!(self.head < self.tail);
- }
- debug_assert!(self.head < self.cap());
- debug_assert!(self.tail < self.cap());
- debug_assert!(self.cap().count_ones() == 1);
- }
-}
-
-impl<T> VecDeque<T> {
- /// Creates an empty `VecDeque`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let vector: VecDeque<u32> = VecDeque::new();
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn new() -> VecDeque<T> {
- VecDeque::with_capacity(INITIAL_CAPACITY)
- }
-
- /// Creates an empty `VecDeque` with space for at least `n` elements.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let vector: VecDeque<u32> = VecDeque::with_capacity(10);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn with_capacity(n: usize) -> VecDeque<T> {
- // +1 since the ringbuffer always leaves one space empty
- let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
- assert!(cap > n, "capacity overflow");
-
- VecDeque {
- tail: 0,
- head: 0,
- buf: RawVec::with_capacity(cap),
- }
- }
-
- /// Retrieves an element in the `VecDeque` by index.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(3);
- /// buf.push_back(4);
- /// buf.push_back(5);
- /// assert_eq!(buf.get(1), Some(&4));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get(&self, index: usize) -> Option<&T> {
- if index < self.len() {
- let idx = self.wrap_add(self.tail, index);
- unsafe { Some(&*self.ptr().offset(idx as isize)) }
- } else {
- None
- }
- }
-
- /// Retrieves an element in the `VecDeque` mutably by index.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(3);
- /// buf.push_back(4);
- /// buf.push_back(5);
- /// if let Some(elem) = buf.get_mut(1) {
- /// *elem = 7;
- /// }
- ///
- /// assert_eq!(buf[1], 7);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
- if index < self.len() {
- let idx = self.wrap_add(self.tail, index);
- unsafe { Some(&mut *self.ptr().offset(idx as isize)) }
- } else {
- None
- }
- }
-
- /// Swaps elements at indices `i` and `j`.
- ///
- /// `i` and `j` may be equal.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Panics
- ///
- /// Panics if either index is out of bounds.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(3);
- /// buf.push_back(4);
- /// buf.push_back(5);
- /// assert_eq!(buf, [3, 4, 5]);
- /// buf.swap(0, 2);
- /// assert_eq!(buf, [5, 4, 3]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn swap(&mut self, i: usize, j: usize) {
- assert!(i < self.len());
- assert!(j < self.len());
- let ri = self.wrap_add(self.tail, i);
- let rj = self.wrap_add(self.tail, j);
- unsafe {
- ptr::swap(self.ptr().offset(ri as isize),
- self.ptr().offset(rj as isize))
- }
- }
-
- /// Returns the number of elements the `VecDeque` can hold without
- /// reallocating.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let buf: VecDeque<i32> = VecDeque::with_capacity(10);
- /// assert!(buf.capacity() >= 10);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn capacity(&self) -> usize {
- self.cap() - 1
- }
-
- /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
- /// given `VecDeque`. Does nothing if the capacity is already sufficient.
- ///
- /// Note that the allocator may give the collection more space than it requests. Therefore
- /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
- /// insertions are expected.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity overflows `usize`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
- /// buf.reserve_exact(10);
- /// assert!(buf.capacity() >= 11);
- /// ```
- ///
- /// [`reserve`]: #method.reserve
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn reserve_exact(&mut self, additional: usize) {
- self.reserve(additional);
- }
-
- /// Reserves capacity for at least `additional` more elements to be inserted in the given
- /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity overflows `usize`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
- /// buf.reserve(10);
- /// assert!(buf.capacity() >= 11);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn reserve(&mut self, additional: usize) {
- let old_cap = self.cap();
- let used_cap = self.len() + 1;
- let new_cap = used_cap.checked_add(additional)
- .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
- .expect("capacity overflow");
-
- if new_cap > old_cap {
- self.buf.reserve_exact(used_cap, new_cap - used_cap);
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- }
- }
-
- /// Tries to reserves the minimum capacity for exactly `additional` more elements to
- /// be inserted in the given `VecDeque<T>`. After calling `reserve_exact`,
- /// capacity will be greater than or equal to `self.len() + additional`.
- /// Does nothing if the capacity is already sufficient.
- ///
- /// Note that the allocator may give the collection more space than it
- /// requests. Therefore capacity can not be relied upon to be precisely
- /// minimal. Prefer `reserve` if future insertions are expected.
- ///
- /// # Errors
- ///
- /// If the capacity overflows, or the allocator reports a failure, then an error
- /// is returned.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(try_reserve)]
- /// use std::collections::CollectionAllocErr;
- /// use std::collections::VecDeque;
- ///
- /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, CollectionAllocErr> {
- /// let mut output = VecDeque::new();
- ///
- /// // Pre-reserve the memory, exiting if we can't
- /// output.try_reserve_exact(data.len())?;
- ///
- /// // Now we know this can't OOM in the middle of our complex work
- /// output.extend(data.iter().map(|&val| {
- /// val * 2 + 5 // very complicated
- /// }));
- ///
- /// Ok(output)
- /// }
- /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
- /// ```
- #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
- pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
- self.try_reserve(additional)
- }
-
- /// Tries to reserve capacity for at least `additional` more elements to be inserted
- /// in the given `VecDeque<T>`. The collection may reserve more space to avoid
- /// frequent reallocations. After calling `reserve`, capacity will be
- /// greater than or equal to `self.len() + additional`. Does nothing if
- /// capacity is already sufficient.
- ///
- /// # Errors
- ///
- /// If the capacity overflows, or the allocator reports a failure, then an error
- /// is returned.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(try_reserve)]
- /// use std::collections::CollectionAllocErr;
- /// use std::collections::VecDeque;
- ///
- /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, CollectionAllocErr> {
- /// let mut output = VecDeque::new();
- ///
- /// // Pre-reserve the memory, exiting if we can't
- /// output.try_reserve(data.len())?;
- ///
- /// // Now we know this can't OOM in the middle of our complex work
- /// output.extend(data.iter().map(|&val| {
- /// val * 2 + 5 // very complicated
- /// }));
- ///
- /// Ok(output)
- /// }
- /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
- /// ```
- #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
- pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
- let old_cap = self.cap();
- let used_cap = self.len() + 1;
- let new_cap = used_cap.checked_add(additional)
- .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
- .ok_or(CollectionAllocErr::CapacityOverflow)?;
-
- if new_cap > old_cap {
- self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?;
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- }
- Ok(())
- }
-
- /// Shrinks the capacity of the `VecDeque` as much as possible.
- ///
- /// It will drop down as close as possible to the length but the allocator may still inform the
- /// `VecDeque` that there is space for a few more elements.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::with_capacity(15);
- /// buf.extend(0..4);
- /// assert_eq!(buf.capacity(), 15);
- /// buf.shrink_to_fit();
- /// assert!(buf.capacity() >= 4);
- /// ```
- #[stable(feature = "deque_extras_15", since = "1.5.0")]
- pub fn shrink_to_fit(&mut self) {
- self.shrink_to(0);
- }
-
- /// Shrinks the capacity of the `VecDeque` with a lower bound.
- ///
- /// The capacity will remain at least as large as both the length
- /// and the supplied value.
- ///
- /// Panics if the current capacity is smaller than the supplied
- /// minimum capacity.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(shrink_to)]
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::with_capacity(15);
- /// buf.extend(0..4);
- /// assert_eq!(buf.capacity(), 15);
- /// buf.shrink_to(6);
- /// assert!(buf.capacity() >= 6);
- /// buf.shrink_to(0);
- /// assert!(buf.capacity() >= 4);
- /// ```
- #[unstable(feature = "shrink_to", reason = "new API", issue="0")]
- pub fn shrink_to(&mut self, min_capacity: usize) {
- assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity");
-
- // +1 since the ringbuffer always leaves one space empty
- // len + 1 can't overflow for an existing, well-formed ringbuffer.
- let target_cap = cmp::max(
- cmp::max(min_capacity, self.len()) + 1,
- MINIMUM_CAPACITY + 1
- ).next_power_of_two();
-
- if target_cap < self.cap() {
- // There are three cases of interest:
- // All elements are out of desired bounds
- // Elements are contiguous, and head is out of desired bounds
- // Elements are discontiguous, and tail is out of desired bounds
- //
- // At all other times, element positions are unaffected.
- //
- // Indicates that elements at the head should be moved.
- let head_outside = self.head == 0 || self.head >= target_cap;
- // Move elements from out of desired bounds (positions after target_cap)
- if self.tail >= target_cap && head_outside {
- // T H
- // [. . . . . . . . o o o o o o o . ]
- // T H
- // [o o o o o o o . ]
- unsafe {
- self.copy_nonoverlapping(0, self.tail, self.len());
- }
- self.head = self.len();
- self.tail = 0;
- } else if self.tail != 0 && self.tail < target_cap && head_outside {
- // T H
- // [. . . o o o o o o o . . . . . . ]
- // H T
- // [o o . o o o o o ]
- let len = self.wrap_sub(self.head, target_cap);
- unsafe {
- self.copy_nonoverlapping(0, target_cap, len);
- }
- self.head = len;
- debug_assert!(self.head < self.tail);
- } else if self.tail >= target_cap {
- // H T
- // [o o o o o . . . . . . . . . o o ]
- // H T
- // [o o o o o . o o ]
- debug_assert!(self.wrap_sub(self.head, 1) < target_cap);
- let len = self.cap() - self.tail;
- let new_tail = target_cap - len;
- unsafe {
- self.copy_nonoverlapping(new_tail, self.tail, len);
- }
- self.tail = new_tail;
- debug_assert!(self.head < self.tail);
- }
-
- self.buf.shrink_to_fit(target_cap);
-
- debug_assert!(self.head < self.cap());
- debug_assert!(self.tail < self.cap());
- debug_assert!(self.cap().count_ones() == 1);
- }
- }
-
- /// Shortens the `VecDeque`, dropping excess elements from the back.
- ///
- /// If `len` is greater than the `VecDeque`'s current length, this has no
- /// effect.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(5);
- /// buf.push_back(10);
- /// buf.push_back(15);
- /// assert_eq!(buf, [5, 10, 15]);
- /// buf.truncate(1);
- /// assert_eq!(buf, [5]);
- /// ```
- #[stable(feature = "deque_extras", since = "1.16.0")]
- pub fn truncate(&mut self, len: usize) {
- for _ in len..self.len() {
- self.pop_back();
- }
- }
-
- /// Returns a front-to-back iterator.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(5);
- /// buf.push_back(3);
- /// buf.push_back(4);
- /// let b: &[_] = &[&5, &3, &4];
- /// let c: Vec<&i32> = buf.iter().collect();
- /// assert_eq!(&c[..], b);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter(&self) -> Iter<T> {
- Iter {
- tail: self.tail,
- head: self.head,
- ring: unsafe { self.buffer_as_slice() },
- }
- }
-
- /// Returns a front-to-back iterator that returns mutable references.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(5);
- /// buf.push_back(3);
- /// buf.push_back(4);
- /// for num in buf.iter_mut() {
- /// *num = *num - 2;
- /// }
- /// let b: &[_] = &[&mut 3, &mut 1, &mut 2];
- /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter_mut(&mut self) -> IterMut<T> {
- IterMut {
- tail: self.tail,
- head: self.head,
- ring: unsafe { self.buffer_as_mut_slice() },
- }
- }
-
- /// Returns a pair of slices which contain, in order, the contents of the
- /// `VecDeque`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut vector = VecDeque::new();
- ///
- /// vector.push_back(0);
- /// vector.push_back(1);
- /// vector.push_back(2);
- ///
- /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..]));
- ///
- /// vector.push_front(10);
- /// vector.push_front(9);
- ///
- /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
- /// ```
- #[inline]
- #[stable(feature = "deque_extras_15", since = "1.5.0")]
- pub fn as_slices(&self) -> (&[T], &[T]) {
- unsafe {
- let buf = self.buffer_as_slice();
- RingSlices::ring_slices(buf, self.head, self.tail)
- }
- }
-
- /// Returns a pair of slices which contain, in order, the contents of the
- /// `VecDeque`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut vector = VecDeque::new();
- ///
- /// vector.push_back(0);
- /// vector.push_back(1);
- ///
- /// vector.push_front(10);
- /// vector.push_front(9);
- ///
- /// vector.as_mut_slices().0[0] = 42;
- /// vector.as_mut_slices().1[0] = 24;
- /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..]));
- /// ```
- #[inline]
- #[stable(feature = "deque_extras_15", since = "1.5.0")]
- pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
- unsafe {
- let head = self.head;
- let tail = self.tail;
- let buf = self.buffer_as_mut_slice();
- RingSlices::ring_slices(buf, head, tail)
- }
- }
-
- /// Returns the number of elements in the `VecDeque`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut v = VecDeque::new();
- /// assert_eq!(v.len(), 0);
- /// v.push_back(1);
- /// assert_eq!(v.len(), 1);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn len(&self) -> usize {
- count(self.tail, self.head, self.cap())
- }
-
- /// Returns `true` if the `VecDeque` is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut v = VecDeque::new();
- /// assert!(v.is_empty());
- /// v.push_front(1);
- /// assert!(!v.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_empty(&self) -> bool {
- self.tail == self.head
- }
-
- /// Create a draining iterator that removes the specified range in the
- /// `VecDeque` and yields the removed items.
- ///
- /// Note 1: The element range is removed even if the iterator is not
- /// consumed until the end.
- ///
- /// Note 2: It is unspecified how many elements are removed from the deque,
- /// if the `Drain` value is not dropped, but the borrow it holds expires
- /// (eg. due to mem::forget).
- ///
- /// # Panics
- ///
- /// Panics if the starting point is greater than the end point or if
- /// the end point is greater than the length of the vector.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
- /// let drained = v.drain(2..).collect::<VecDeque<_>>();
- /// assert_eq!(drained, [3]);
- /// assert_eq!(v, [1, 2]);
- ///
- /// // A full range clears all contents
- /// v.drain(..);
- /// assert!(v.is_empty());
- /// ```
- #[inline]
- #[stable(feature = "drain", since = "1.6.0")]
- pub fn drain<R>(&mut self, range: R) -> Drain<T>
- where R: RangeBounds<usize>
- {
- // Memory safety
- //
- // When the Drain is first created, the source deque is shortened to
- // make sure no uninitialized or moved-from elements are accessible at
- // all if the Drain's destructor never gets to run.
- //
- // Drain will ptr::read out the values to remove.
- // When finished, the remaining data will be copied back to cover the hole,
- // and the head/tail values will be restored correctly.
- //
- let len = self.len();
- let start = match range.start_bound() {
- Included(&n) => n,
- Excluded(&n) => n + 1,
- Unbounded => 0,
- };
- let end = match range.end_bound() {
- Included(&n) => n + 1,
- Excluded(&n) => n,
- Unbounded => len,
- };
- assert!(start <= end, "drain lower bound was too large");
- assert!(end <= len, "drain upper bound was too large");
-
- // The deque's elements are parted into three segments:
- // * self.tail -> drain_tail
- // * drain_tail -> drain_head
- // * drain_head -> self.head
- //
- // T = self.tail; H = self.head; t = drain_tail; h = drain_head
- //
- // We store drain_tail as self.head, and drain_head and self.head as
- // after_tail and after_head respectively on the Drain. This also
- // truncates the effective array such that if the Drain is leaked, we
- // have forgotten about the potentially moved values after the start of
- // the drain.
- //
- // T t h H
- // [. . . o o x x o o . . .]
- //
- let drain_tail = self.wrap_add(self.tail, start);
- let drain_head = self.wrap_add(self.tail, end);
- let head = self.head;
-
- // "forget" about the values after the start of the drain until after
- // the drain is complete and the Drain destructor is run.
- self.head = drain_tail;
-
- Drain {
- deque: NonNull::from(&mut *self),
- after_tail: drain_head,
- after_head: head,
- iter: Iter {
- tail: drain_tail,
- head: drain_head,
- ring: unsafe { self.buffer_as_mut_slice() },
- },
- }
- }
-
- /// Clears the `VecDeque`, removing all values.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut v = VecDeque::new();
- /// v.push_back(1);
- /// v.clear();
- /// assert!(v.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- #[inline]
- pub fn clear(&mut self) {
- self.drain(..);
- }
-
- /// Returns `true` if the `VecDeque` contains an element equal to the
- /// given value.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut vector: VecDeque<u32> = VecDeque::new();
- ///
- /// vector.push_back(0);
- /// vector.push_back(1);
- ///
- /// assert_eq!(vector.contains(&1), true);
- /// assert_eq!(vector.contains(&10), false);
- /// ```
- #[stable(feature = "vec_deque_contains", since = "1.12.0")]
- pub fn contains(&self, x: &T) -> bool
- where T: PartialEq<T>
- {
- let (a, b) = self.as_slices();
- a.contains(x) || b.contains(x)
- }
-
- /// Provides a reference to the front element, or `None` if the `VecDeque` is
- /// empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut d = VecDeque::new();
- /// assert_eq!(d.front(), None);
- ///
- /// d.push_back(1);
- /// d.push_back(2);
- /// assert_eq!(d.front(), Some(&1));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn front(&self) -> Option<&T> {
- if !self.is_empty() {
- Some(&self[0])
- } else {
- None
- }
- }
-
- /// Provides a mutable reference to the front element, or `None` if the
- /// `VecDeque` is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut d = VecDeque::new();
- /// assert_eq!(d.front_mut(), None);
- ///
- /// d.push_back(1);
- /// d.push_back(2);
- /// match d.front_mut() {
- /// Some(x) => *x = 9,
- /// None => (),
- /// }
- /// assert_eq!(d.front(), Some(&9));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn front_mut(&mut self) -> Option<&mut T> {
- if !self.is_empty() {
- Some(&mut self[0])
- } else {
- None
- }
- }
-
- /// Provides a reference to the back element, or `None` if the `VecDeque` is
- /// empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut d = VecDeque::new();
- /// assert_eq!(d.back(), None);
- ///
- /// d.push_back(1);
- /// d.push_back(2);
- /// assert_eq!(d.back(), Some(&2));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn back(&self) -> Option<&T> {
- if !self.is_empty() {
- Some(&self[self.len() - 1])
- } else {
- None
- }
- }
-
- /// Provides a mutable reference to the back element, or `None` if the
- /// `VecDeque` is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut d = VecDeque::new();
- /// assert_eq!(d.back(), None);
- ///
- /// d.push_back(1);
- /// d.push_back(2);
- /// match d.back_mut() {
- /// Some(x) => *x = 9,
- /// None => (),
- /// }
- /// assert_eq!(d.back(), Some(&9));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn back_mut(&mut self) -> Option<&mut T> {
- let len = self.len();
- if !self.is_empty() {
- Some(&mut self[len - 1])
- } else {
- None
- }
- }
-
- /// Removes the first element and returns it, or `None` if the `VecDeque` is
- /// empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut d = VecDeque::new();
- /// d.push_back(1);
- /// d.push_back(2);
- ///
- /// assert_eq!(d.pop_front(), Some(1));
- /// assert_eq!(d.pop_front(), Some(2));
- /// assert_eq!(d.pop_front(), None);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn pop_front(&mut self) -> Option<T> {
- if self.is_empty() {
- None
- } else {
- let tail = self.tail;
- self.tail = self.wrap_add(self.tail, 1);
- unsafe { Some(self.buffer_read(tail)) }
- }
- }
-
- /// Prepends an element to the `VecDeque`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut d = VecDeque::new();
- /// d.push_front(1);
- /// d.push_front(2);
- /// assert_eq!(d.front(), Some(&2));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn push_front(&mut self, value: T) {
- self.grow_if_necessary();
-
- self.tail = self.wrap_sub(self.tail, 1);
- let tail = self.tail;
- unsafe {
- self.buffer_write(tail, value);
- }
- }
-
- /// Appends an element to the back of the `VecDeque`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(1);
- /// buf.push_back(3);
- /// assert_eq!(3, *buf.back().unwrap());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn push_back(&mut self, value: T) {
- self.grow_if_necessary();
-
- let head = self.head;
- self.head = self.wrap_add(self.head, 1);
- unsafe { self.buffer_write(head, value) }
- }
-
- /// Removes the last element from the `VecDeque` and returns it, or `None` if
- /// it is empty.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// assert_eq!(buf.pop_back(), None);
- /// buf.push_back(1);
- /// buf.push_back(3);
- /// assert_eq!(buf.pop_back(), Some(3));
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn pop_back(&mut self) -> Option<T> {
- if self.is_empty() {
- None
- } else {
- self.head = self.wrap_sub(self.head, 1);
- let head = self.head;
- unsafe { Some(self.buffer_read(head)) }
- }
- }
-
- #[inline]
- fn is_contiguous(&self) -> bool {
- self.tail <= self.head
- }
-
- /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
- /// last element.
- ///
- /// This does not preserve ordering, but is O(1).
- ///
- /// Returns `None` if `index` is out of bounds.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// assert_eq!(buf.swap_remove_back(0), None);
- /// buf.push_back(1);
- /// buf.push_back(2);
- /// buf.push_back(3);
- /// assert_eq!(buf, [1, 2, 3]);
- ///
- /// assert_eq!(buf.swap_remove_back(0), Some(1));
- /// assert_eq!(buf, [3, 2]);
- /// ```
- #[stable(feature = "deque_extras_15", since = "1.5.0")]
- pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
- let length = self.len();
- if length > 0 && index < length - 1 {
- self.swap(index, length - 1);
- } else if index >= length {
- return None;
- }
- self.pop_back()
- }
-
- /// Removes an element from anywhere in the `VecDeque` and returns it,
- /// replacing it with the first element.
- ///
- /// This does not preserve ordering, but is O(1).
- ///
- /// Returns `None` if `index` is out of bounds.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// assert_eq!(buf.swap_remove_front(0), None);
- /// buf.push_back(1);
- /// buf.push_back(2);
- /// buf.push_back(3);
- /// assert_eq!(buf, [1, 2, 3]);
- ///
- /// assert_eq!(buf.swap_remove_front(2), Some(3));
- /// assert_eq!(buf, [2, 1]);
- /// ```
- #[stable(feature = "deque_extras_15", since = "1.5.0")]
- pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
- let length = self.len();
- if length > 0 && index < length && index != 0 {
- self.swap(index, 0);
- } else if index >= length {
- return None;
- }
- self.pop_front()
- }
-
- /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices
- /// greater than or equal to `index` towards the back.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Panics
- ///
- /// Panics if `index` is greater than `VecDeque`'s length
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut vec_deque = VecDeque::new();
- /// vec_deque.push_back('a');
- /// vec_deque.push_back('b');
- /// vec_deque.push_back('c');
- /// assert_eq!(vec_deque, &['a', 'b', 'c']);
- ///
- /// vec_deque.insert(1, 'd');
- /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
- /// ```
- #[stable(feature = "deque_extras_15", since = "1.5.0")]
- pub fn insert(&mut self, index: usize, value: T) {
- assert!(index <= self.len(), "index out of bounds");
- self.grow_if_necessary();
-
- // Move the least number of elements in the ring buffer and insert
- // the given object
- //
- // At most len/2 - 1 elements will be moved. O(min(n, n-i))
- //
- // There are three main cases:
- // Elements are contiguous
- // - special case when tail is 0
- // Elements are discontiguous and the insert is in the tail section
- // Elements are discontiguous and the insert is in the head section
- //
- // For each of those there are two more cases:
- // Insert is closer to tail
- // Insert is closer to head
- //
- // Key: H - self.head
- // T - self.tail
- // o - Valid element
- // I - Insertion element
- // A - The element that should be after the insertion point
- // M - Indicates element was moved
-
- let idx = self.wrap_add(self.tail, index);
-
- let distance_to_tail = index;
- let distance_to_head = self.len() - index;
-
- let contiguous = self.is_contiguous();
-
- match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
- (true, true, _) if index == 0 => {
- // push_front
- //
- // T
- // I H
- // [A o o o o o o . . . . . . . . .]
- //
- // H T
- // [A o o o o o o o . . . . . I]
- //
-
- self.tail = self.wrap_sub(self.tail, 1);
- }
- (true, true, _) => {
- unsafe {
- // contiguous, insert closer to tail:
- //
- // T I H
- // [. . . o o A o o o o . . . . . .]
- //
- // T H
- // [. . o o I A o o o o . . . . . .]
- // M M
- //
- // contiguous, insert closer to tail and tail is 0:
- //
- //
- // T I H
- // [o o A o o o o . . . . . . . . .]
- //
- // H T
- // [o I A o o o o o . . . . . . . o]
- // M M
-
- let new_tail = self.wrap_sub(self.tail, 1);
-
- self.copy(new_tail, self.tail, 1);
- // Already moved the tail, so we only copy `index - 1` elements.
- self.copy(self.tail, self.tail + 1, index - 1);
-
- self.tail = new_tail;
- }
- }
- (true, false, _) => {
- unsafe {
- // contiguous, insert closer to head:
- //
- // T I H
- // [. . . o o o o A o o . . . . . .]
- //
- // T H
- // [. . . o o o o I A o o . . . . .]
- // M M M
-
- self.copy(idx + 1, idx, self.head - idx);
- self.head = self.wrap_add(self.head, 1);
- }
- }
- (false, true, true) => {
- unsafe {
- // discontiguous, insert closer to tail, tail section:
- //
- // H T I
- // [o o o o o o . . . . . o o A o o]
- //
- // H T
- // [o o o o o o . . . . o o I A o o]
- // M M
-
- self.copy(self.tail - 1, self.tail, index);
- self.tail -= 1;
- }
- }
- (false, false, true) => {
- unsafe {
- // discontiguous, insert closer to head, tail section:
- //
- // H T I
- // [o o . . . . . . . o o o o o A o]
- //
- // H T
- // [o o o . . . . . . o o o o o I A]
- // M M M M
-
- // copy elements up to new head
- self.copy(1, 0, self.head);
-
- // copy last element into empty spot at bottom of buffer
- self.copy(0, self.cap() - 1, 1);
-
- // move elements from idx to end forward not including ^ element
- self.copy(idx + 1, idx, self.cap() - 1 - idx);
-
- self.head += 1;
- }
- }
- (false, true, false) if idx == 0 => {
- unsafe {
- // discontiguous, insert is closer to tail, head section,
- // and is at index zero in the internal buffer:
- //
- // I H T
- // [A o o o o o o o o o . . . o o o]
- //
- // H T
- // [A o o o o o o o o o . . o o o I]
- // M M M
-
- // copy elements up to new tail
- self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
-
- // copy last element into empty spot at bottom of buffer
- self.copy(self.cap() - 1, 0, 1);
-
- self.tail -= 1;
- }
- }
- (false, true, false) => {
- unsafe {
- // discontiguous, insert closer to tail, head section:
- //
- // I H T
- // [o o o A o o o o o o . . . o o o]
- //
- // H T
- // [o o I A o o o o o o . . o o o o]
- // M M M M M M
-
- // copy elements up to new tail
- self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
-
- // copy last element into empty spot at bottom of buffer
- self.copy(self.cap() - 1, 0, 1);
-
- // move elements from idx-1 to end forward not including ^ element
- self.copy(0, 1, idx - 1);
-
- self.tail -= 1;
- }
- }
- (false, false, false) => {
- unsafe {
- // discontiguous, insert closer to head, head section:
- //
- // I H T
- // [o o o o A o o . . . . . . o o o]
- //
- // H T
- // [o o o o I A o o . . . . . o o o]
- // M M M
-
- self.copy(idx + 1, idx, self.head - idx);
- self.head += 1;
- }
- }
- }
-
- // tail might've been changed so we need to recalculate
- let new_idx = self.wrap_add(self.tail, index);
- unsafe {
- self.buffer_write(new_idx, value);
- }
- }
-
- /// Removes and returns the element at `index` from the `VecDeque`.
- /// Whichever end is closer to the removal point will be moved to make
- /// room, and all the affected elements will be moved to new positions.
- /// Returns `None` if `index` is out of bounds.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(1);
- /// buf.push_back(2);
- /// buf.push_back(3);
- /// assert_eq!(buf, [1, 2, 3]);
- ///
- /// assert_eq!(buf.remove(1), Some(2));
- /// assert_eq!(buf, [1, 3]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn remove(&mut self, index: usize) -> Option<T> {
- if self.is_empty() || self.len() <= index {
- return None;
- }
-
- // There are three main cases:
- // Elements are contiguous
- // Elements are discontiguous and the removal is in the tail section
- // Elements are discontiguous and the removal is in the head section
- // - special case when elements are technically contiguous,
- // but self.head = 0
- //
- // For each of those there are two more cases:
- // Insert is closer to tail
- // Insert is closer to head
- //
- // Key: H - self.head
- // T - self.tail
- // o - Valid element
- // x - Element marked for removal
- // R - Indicates element that is being removed
- // M - Indicates element was moved
-
- let idx = self.wrap_add(self.tail, index);
-
- let elem = unsafe { Some(self.buffer_read(idx)) };
-
- let distance_to_tail = index;
- let distance_to_head = self.len() - index;
-
- let contiguous = self.is_contiguous();
-
- match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
- (true, true, _) => {
- unsafe {
- // contiguous, remove closer to tail:
- //
- // T R H
- // [. . . o o x o o o o . . . . . .]
- //
- // T H
- // [. . . . o o o o o o . . . . . .]
- // M M
-
- self.copy(self.tail + 1, self.tail, index);
- self.tail += 1;
- }
- }
- (true, false, _) => {
- unsafe {
- // contiguous, remove closer to head:
- //
- // T R H
- // [. . . o o o o x o o . . . . . .]
- //
- // T H
- // [. . . o o o o o o . . . . . . .]
- // M M
-
- self.copy(idx, idx + 1, self.head - idx - 1);
- self.head -= 1;
- }
- }
- (false, true, true) => {
- unsafe {
- // discontiguous, remove closer to tail, tail section:
- //
- // H T R
- // [o o o o o o . . . . . o o x o o]
- //
- // H T
- // [o o o o o o . . . . . . o o o o]
- // M M
-
- self.copy(self.tail + 1, self.tail, index);
- self.tail = self.wrap_add(self.tail, 1);
- }
- }
- (false, false, false) => {
- unsafe {
- // discontiguous, remove closer to head, head section:
- //
- // R H T
- // [o o o o x o o . . . . . . o o o]
- //
- // H T
- // [o o o o o o . . . . . . . o o o]
- // M M
-
- self.copy(idx, idx + 1, self.head - idx - 1);
- self.head -= 1;
- }
- }
- (false, false, true) => {
- unsafe {
- // discontiguous, remove closer to head, tail section:
- //
- // H T R
- // [o o o . . . . . . o o o o o x o]
- //
- // H T
- // [o o . . . . . . . o o o o o o o]
- // M M M M
- //
- // or quasi-discontiguous, remove next to head, tail section:
- //
- // H T R
- // [. . . . . . . . . o o o o o x o]
- //
- // T H
- // [. . . . . . . . . o o o o o o .]
- // M
-
- // draw in elements in the tail section
- self.copy(idx, idx + 1, self.cap() - idx - 1);
-
- // Prevents underflow.
- if self.head != 0 {
- // copy first element into empty spot
- self.copy(self.cap() - 1, 0, 1);
-
- // move elements in the head section backwards
- self.copy(0, 1, self.head - 1);
- }
-
- self.head = self.wrap_sub(self.head, 1);
- }
- }
- (false, true, false) => {
- unsafe {
- // discontiguous, remove closer to tail, head section:
- //
- // R H T
- // [o o x o o o o o o o . . . o o o]
- //
- // H T
- // [o o o o o o o o o o . . . . o o]
- // M M M M M
-
- // draw in elements up to idx
- self.copy(1, 0, idx);
-
- // copy last element into empty spot
- self.copy(0, self.cap() - 1, 1);
-
- // move elements from tail to end forward, excluding the last one
- self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1);
-
- self.tail = self.wrap_add(self.tail, 1);
- }
- }
- }
-
- return elem;
- }
-
- /// Splits the `VecDeque` into two at the given index.
- ///
- /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`,
- /// and the returned `VecDeque` contains elements `[at, len)`.
- ///
- /// Note that the capacity of `self` does not change.
- ///
- /// Element at index 0 is the front of the queue.
- ///
- /// # Panics
- ///
- /// Panics if `at > len`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf: VecDeque<_> = vec![1,2,3].into_iter().collect();
- /// let buf2 = buf.split_off(1);
- /// assert_eq!(buf, [1]);
- /// assert_eq!(buf2, [2, 3]);
- /// ```
- #[inline]
- #[stable(feature = "split_off", since = "1.4.0")]
- pub fn split_off(&mut self, at: usize) -> Self {
- let len = self.len();
- assert!(at <= len, "`at` out of bounds");
-
- let other_len = len - at;
- let mut other = VecDeque::with_capacity(other_len);
-
- unsafe {
- let (first_half, second_half) = self.as_slices();
-
- let first_len = first_half.len();
- let second_len = second_half.len();
- if at < first_len {
- // `at` lies in the first half.
- let amount_in_first = first_len - at;
-
- ptr::copy_nonoverlapping(first_half.as_ptr().offset(at as isize),
- other.ptr(),
- amount_in_first);
-
- // just take all of the second half.
- ptr::copy_nonoverlapping(second_half.as_ptr(),
- other.ptr().offset(amount_in_first as isize),
- second_len);
- } else {
- // `at` lies in the second half, need to factor in the elements we skipped
- // in the first half.
- let offset = at - first_len;
- let amount_in_second = second_len - offset;
- ptr::copy_nonoverlapping(second_half.as_ptr().offset(offset as isize),
- other.ptr(),
- amount_in_second);
- }
- }
-
- // Cleanup where the ends of the buffers are
- self.head = self.wrap_sub(self.head, other_len);
- other.head = other.wrap_index(other_len);
-
- other
- }
-
- /// Moves all the elements of `other` into `Self`, leaving `other` empty.
- ///
- /// # Panics
- ///
- /// Panics if the new number of elements in self overflows a `usize`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect();
- /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect();
- /// buf.append(&mut buf2);
- /// assert_eq!(buf, [1, 2, 3, 4]);
- /// assert_eq!(buf2, []);
- /// ```
- #[inline]
- #[stable(feature = "append", since = "1.4.0")]
- pub fn append(&mut self, other: &mut Self) {
- // naive impl
- self.extend(other.drain(..));
- }
-
- /// Retains only the elements specified by the predicate.
- ///
- /// In other words, remove all elements `e` such that `f(&e)` returns false.
- /// This method operates in place and preserves the order of the retained
- /// elements.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.extend(1..5);
- /// buf.retain(|&x| x%2 == 0);
- /// assert_eq!(buf, [2, 4]);
- /// ```
- #[stable(feature = "vec_deque_retain", since = "1.4.0")]
- pub fn retain<F>(&mut self, mut f: F)
- where F: FnMut(&T) -> bool
- {
- let len = self.len();
- let mut del = 0;
- for i in 0..len {
- if !f(&self[i]) {
- del += 1;
- } else if del > 0 {
- self.swap(i - del, i);
- }
- }
- if del > 0 {
- self.truncate(len - del);
- }
- }
-
- // This may panic or abort
- #[inline]
- fn grow_if_necessary(&mut self) {
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
- }
-}
-
-impl<T: Clone> VecDeque<T> {
- /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
- /// either by removing excess elements from the back or by appending clones of `value`
- /// to the back.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::VecDeque;
- ///
- /// let mut buf = VecDeque::new();
- /// buf.push_back(5);
- /// buf.push_back(10);
- /// buf.push_back(15);
- /// assert_eq!(buf, [5, 10, 15]);
- ///
- /// buf.resize(2, 0);
- /// assert_eq!(buf, [5, 10]);
- ///
- /// buf.resize(5, 20);
- /// assert_eq!(buf, [5, 10, 20, 20, 20]);
- /// ```
- #[stable(feature = "deque_extras", since = "1.16.0")]
- pub fn resize(&mut self, new_len: usize, value: T) {
- let len = self.len();
-
- if new_len > len {
- self.extend(repeat(value).take(new_len - len))
- } else {
- self.truncate(new_len);
- }
- }
-}
-
-/// Returns the index in the underlying buffer for a given logical element index.
-#[inline]
-fn wrap_index(index: usize, size: usize) -> usize {
- // size is always a power of 2
- debug_assert!(size.is_power_of_two());
- index & (size - 1)
-}
-
-/// Returns the two slices that cover the `VecDeque`'s valid range
-trait RingSlices: Sized {
- fn slice(self, from: usize, to: usize) -> Self;
- fn split_at(self, i: usize) -> (Self, Self);
-
- fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) {
- let contiguous = tail <= head;
- if contiguous {
- let (empty, buf) = buf.split_at(0);
- (buf.slice(tail, head), empty)
- } else {
- let (mid, right) = buf.split_at(tail);
- let (left, _) = mid.split_at(head);
- (right, left)
- }
- }
-}
-
-impl<'a, T> RingSlices for &'a [T] {
- fn slice(self, from: usize, to: usize) -> Self {
- &self[from..to]
- }
- fn split_at(self, i: usize) -> (Self, Self) {
- (*self).split_at(i)
- }
-}
-
-impl<'a, T> RingSlices for &'a mut [T] {
- fn slice(self, from: usize, to: usize) -> Self {
- &mut self[from..to]
- }
- fn split_at(self, i: usize) -> (Self, Self) {
- (*self).split_at_mut(i)
- }
-}
-
-/// Calculate the number of elements left to be read in the buffer
-#[inline]
-fn count(tail: usize, head: usize, size: usize) -> usize {
- // size is always a power of 2
- (head.wrapping_sub(tail)) & (size - 1)
-}
-
-/// An iterator over the elements of a `VecDeque`.
-///
-/// This `struct` is created by the [`iter`] method on [`VecDeque`]. See its
-/// documentation for more.
-///
-/// [`iter`]: struct.VecDeque.html#method.iter
-/// [`VecDeque`]: struct.VecDeque.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Iter<'a, T: 'a> {
- ring: &'a [T],
- tail: usize,
- head: usize,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Iter")
- .field(&self.ring)
- .field(&self.tail)
- .field(&self.head)
- .finish()
- }
-}
-
-// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Clone for Iter<'a, T> {
- fn clone(&self) -> Iter<'a, T> {
- Iter {
- ring: self.ring,
- tail: self.tail,
- head: self.head,
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Iterator for Iter<'a, T> {
- type Item = &'a T;
-
- #[inline]
- fn next(&mut self) -> Option<&'a T> {
- if self.tail == self.head {
- return None;
- }
- let tail = self.tail;
- self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
- unsafe { Some(self.ring.get_unchecked(tail)) }
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- let len = count(self.tail, self.head, self.ring.len());
- (len, Some(len))
- }
-
- fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
- where F: FnMut(Acc, Self::Item) -> Acc
- {
- let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
- accum = front.iter().fold(accum, &mut f);
- back.iter().fold(accum, &mut f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a T> {
- if self.tail == self.head {
- return None;
- }
- self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
- unsafe { Some(self.ring.get_unchecked(self.head)) }
- }
-
- fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
- where F: FnMut(Acc, Self::Item) -> Acc
- {
- let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
- accum = back.iter().rfold(accum, &mut f);
- front.iter().rfold(accum, &mut f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> ExactSizeIterator for Iter<'a, T> {
- fn is_empty(&self) -> bool {
- self.head == self.tail
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for Iter<'a, T> {}
-
-
-/// A mutable iterator over the elements of a `VecDeque`.
-///
-/// This `struct` is created by the [`iter_mut`] method on [`VecDeque`]. See its
-/// documentation for more.
-///
-/// [`iter_mut`]: struct.VecDeque.html#method.iter_mut
-/// [`VecDeque`]: struct.VecDeque.html
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct IterMut<'a, T: 'a> {
- ring: &'a mut [T],
- tail: usize,
- head: usize,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("IterMut")
- .field(&self.ring)
- .field(&self.tail)
- .field(&self.head)
- .finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> Iterator for IterMut<'a, T> {
- type Item = &'a mut T;
-
- #[inline]
- fn next(&mut self) -> Option<&'a mut T> {
- if self.tail == self.head {
- return None;
- }
- let tail = self.tail;
- self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
-
- unsafe {
- let elem = self.ring.get_unchecked_mut(tail);
- Some(&mut *(elem as *mut _))
- }
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- let len = count(self.tail, self.head, self.ring.len());
- (len, Some(len))
- }
-
- fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
- where F: FnMut(Acc, Self::Item) -> Acc
- {
- let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
- accum = front.iter_mut().fold(accum, &mut f);
- back.iter_mut().fold(accum, &mut f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a mut T> {
- if self.tail == self.head {
- return None;
- }
- self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
-
- unsafe {
- let elem = self.ring.get_unchecked_mut(self.head);
- Some(&mut *(elem as *mut _))
- }
- }
-
- fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
- where F: FnMut(Acc, Self::Item) -> Acc
- {
- let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
- accum = back.iter_mut().rfold(accum, &mut f);
- front.iter_mut().rfold(accum, &mut f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
- fn is_empty(&self) -> bool {
- self.head == self.tail
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T> FusedIterator for IterMut<'a, T> {}
-
-/// An owning iterator over the elements of a `VecDeque`.
-///
-/// This `struct` is created by the [`into_iter`] method on [`VecDeque`][`VecDeque`]
-/// (provided by the `IntoIterator` trait). See its documentation for more.
-///
-/// [`into_iter`]: struct.VecDeque.html#method.into_iter
-/// [`VecDeque`]: struct.VecDeque.html
-#[derive(Clone)]
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct IntoIter<T> {
- inner: VecDeque<T>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("IntoIter")
- .field(&self.inner)
- .finish()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Iterator for IntoIter<T> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- self.inner.pop_front()
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- let len = self.inner.len();
- (len, Some(len))
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> DoubleEndedIterator for IntoIter<T> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- self.inner.pop_back()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> ExactSizeIterator for IntoIter<T> {
- fn is_empty(&self) -> bool {
- self.inner.is_empty()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for IntoIter<T> {}
-
-/// A draining iterator over the elements of a `VecDeque`.
-///
-/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
-/// documentation for more.
-///
-/// [`drain`]: struct.VecDeque.html#method.drain
-/// [`VecDeque`]: struct.VecDeque.html
-#[stable(feature = "drain", since = "1.6.0")]
-pub struct Drain<'a, T: 'a> {
- after_tail: usize,
- after_head: usize,
- iter: Iter<'a, T>,
- deque: NonNull<VecDeque<T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Drain")
- .field(&self.after_tail)
- .field(&self.after_head)
- .field(&self.iter)
- .finish()
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
-#[stable(feature = "drain", since = "1.6.0")]
-unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> Drop for Drain<'a, T> {
- fn drop(&mut self) {
- self.for_each(drop);
-
- let source_deque = unsafe { self.deque.as_mut() };
-
- // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
- //
- // T t h H
- // [. . . o o x x o o . . .]
- //
- let orig_tail = source_deque.tail;
- let drain_tail = source_deque.head;
- let drain_head = self.after_tail;
- let orig_head = self.after_head;
-
- let tail_len = count(orig_tail, drain_tail, source_deque.cap());
- let head_len = count(drain_head, orig_head, source_deque.cap());
-
- // Restore the original head value
- source_deque.head = orig_head;
-
- match (tail_len, head_len) {
- (0, 0) => {
- source_deque.head = 0;
- source_deque.tail = 0;
- }
- (0, _) => {
- source_deque.tail = drain_head;
- }
- (_, 0) => {
- source_deque.head = drain_tail;
- }
- _ => unsafe {
- if tail_len <= head_len {
- source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
- source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
- } else {
- source_deque.head = source_deque.wrap_add(drain_tail, head_len);
- source_deque.wrap_copy(drain_tail, drain_head, head_len);
- }
- },
- }
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> Iterator for Drain<'a, T> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- self.iter.next().map(|elt| unsafe { ptr::read(elt) })
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A: PartialEq> PartialEq for VecDeque<A> {
- fn eq(&self, other: &VecDeque<A>) -> bool {
- if self.len() != other.len() {
- return false;
- }
- let (sa, sb) = self.as_slices();
- let (oa, ob) = other.as_slices();
- if sa.len() == oa.len() {
- sa == oa && sb == ob
- } else if sa.len() < oa.len() {
- // Always divisible in three sections, for example:
- // self: [a b c|d e f]
- // other: [0 1 2 3|4 5]
- // front = 3, mid = 1,
- // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5]
- let front = sa.len();
- let mid = oa.len() - front;
-
- let (oa_front, oa_mid) = oa.split_at(front);
- let (sb_mid, sb_back) = sb.split_at(mid);
- debug_assert_eq!(sa.len(), oa_front.len());
- debug_assert_eq!(sb_mid.len(), oa_mid.len());
- debug_assert_eq!(sb_back.len(), ob.len());
- sa == oa_front && sb_mid == oa_mid && sb_back == ob
- } else {
- let front = oa.len();
- let mid = sa.len() - front;
-
- let (sa_front, sa_mid) = sa.split_at(front);
- let (ob_mid, ob_back) = ob.split_at(mid);
- debug_assert_eq!(sa_front.len(), oa.len());
- debug_assert_eq!(sa_mid.len(), ob_mid.len());
- debug_assert_eq!(sb.len(), ob_back.len());
- sa_front == oa && sa_mid == ob_mid && sb == ob_back
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A: Eq> Eq for VecDeque<A> {}
-
-macro_rules! __impl_slice_eq1 {
- ($Lhs: ty, $Rhs: ty) => {
- __impl_slice_eq1! { $Lhs, $Rhs, Sized }
- };
- ($Lhs: ty, $Rhs: ty, $Bound: ident) => {
- #[stable(feature = "vec-deque-partial-eq-slice", since = "1.17.0")]
- impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq<B> {
- fn eq(&self, other: &$Rhs) -> bool {
- if self.len() != other.len() {
- return false;
- }
- let (sa, sb) = self.as_slices();
- let (oa, ob) = other[..].split_at(sa.len());
- sa == oa && sb == ob
- }
- }
- }
-}
-
-__impl_slice_eq1! { VecDeque<A>, Vec<B> }
-__impl_slice_eq1! { VecDeque<A>, &'b [B] }
-__impl_slice_eq1! { VecDeque<A>, &'b mut [B] }
-
-macro_rules! array_impls {
- ($($N: expr)+) => {
- $(
- __impl_slice_eq1! { VecDeque<A>, [B; $N] }
- __impl_slice_eq1! { VecDeque<A>, &'b [B; $N] }
- __impl_slice_eq1! { VecDeque<A>, &'b mut [B; $N] }
- )+
- }
-}
-
-array_impls! {
- 0 1 2 3 4 5 6 7 8 9
- 10 11 12 13 14 15 16 17 18 19
- 20 21 22 23 24 25 26 27 28 29
- 30 31 32
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A: PartialOrd> PartialOrd for VecDeque<A> {
- fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> {
- self.iter().partial_cmp(other.iter())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A: Ord> Ord for VecDeque<A> {
- #[inline]
- fn cmp(&self, other: &VecDeque<A>) -> Ordering {
- self.iter().cmp(other.iter())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A: Hash> Hash for VecDeque<A> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.len().hash(state);
- let (a, b) = self.as_slices();
- Hash::hash_slice(a, state);
- Hash::hash_slice(b, state);
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A> Index<usize> for VecDeque<A> {
- type Output = A;
-
- #[inline]
- fn index(&self, index: usize) -> &A {
- self.get(index).expect("Out of bounds access")
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A> IndexMut<usize> for VecDeque<A> {
- #[inline]
- fn index_mut(&mut self, index: usize) -> &mut A {
- self.get_mut(index).expect("Out of bounds access")
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A> FromIterator<A> for VecDeque<A> {
- fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> {
- let iterator = iter.into_iter();
- let (lower, _) = iterator.size_hint();
- let mut deq = VecDeque::with_capacity(lower);
- deq.extend(iterator);
- deq
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> IntoIterator for VecDeque<T> {
- type Item = T;
- type IntoIter = IntoIter<T>;
-
- /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by
- /// value.
- fn into_iter(self) -> IntoIter<T> {
- IntoIter { inner: self }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a VecDeque<T> {
- type Item = &'a T;
- type IntoIter = Iter<'a, T>;
-
- fn into_iter(self) -> Iter<'a, T> {
- self.iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a mut VecDeque<T> {
- type Item = &'a mut T;
- type IntoIter = IterMut<'a, T>;
-
- fn into_iter(self) -> IterMut<'a, T> {
- self.iter_mut()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<A> Extend<A> for VecDeque<A> {
- fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
- for elt in iter {
- self.push_back(elt);
- }
- }
-}
-
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
- fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
- self.extend(iter.into_iter().cloned());
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: fmt::Debug> fmt::Debug for VecDeque<T> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_list().entries(self).finish()
- }
-}
-
-#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
-impl<T> From<Vec<T>> for VecDeque<T> {
- fn from(mut other: Vec<T>) -> Self {
- unsafe {
- let other_buf = other.as_mut_ptr();
- let mut buf = RawVec::from_raw_parts(other_buf, other.capacity());
- let len = other.len();
- mem::forget(other);
-
- // We need to extend the buf if it's not a power of two, too small
- // or doesn't have at least one free space
- if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) ||
- (buf.cap() == len) {
- let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
- buf.reserve_exact(len, cap - len);
- }
-
- VecDeque {
- tail: 0,
- head: len,
- buf,
- }
- }
- }
-}
-
-#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
-impl<T> From<VecDeque<T>> for Vec<T> {
- fn from(other: VecDeque<T>) -> Self {
- unsafe {
- let buf = other.buf.ptr();
- let len = other.len();
- let tail = other.tail;
- let head = other.head;
- let cap = other.cap();
-
- // Need to move the ring to the front of the buffer, as vec will expect this.
- if other.is_contiguous() {
- ptr::copy(buf.offset(tail as isize), buf, len);
- } else {
- if (tail - head) >= cmp::min(cap - tail, head) {
- // There is enough free space in the centre for the shortest block so we can
- // do this in at most three copy moves.
- if (cap - tail) > head {
- // right hand block is the long one; move that enough for the left
- ptr::copy(buf.offset(tail as isize),
- buf.offset((tail - head) as isize),
- cap - tail);
- // copy left in the end
- ptr::copy(buf, buf.offset((cap - head) as isize), head);
- // shift the new thing to the start
- ptr::copy(buf.offset((tail - head) as isize), buf, len);
- } else {
- // left hand block is the long one, we can do it in two!
- ptr::copy(buf, buf.offset((cap - tail) as isize), head);
- ptr::copy(buf.offset(tail as isize), buf, cap - tail);
- }
- } else {
- // Need to use N swaps to move the ring
- // We can use the space at the end of the ring as a temp store
-
- let mut left_edge: usize = 0;
- let mut right_edge: usize = tail;
-
- // The general problem looks like this
- // GHIJKLM...ABCDEF - before any swaps
- // ABCDEFM...GHIJKL - after 1 pass of swaps
- // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store
- // - then restart the algorithm with a new (smaller) store
- // Sometimes the temp store is reached when the right edge is at the end
- // of the buffer - this means we've hit the right order with fewer swaps!
- // E.g
- // EF..ABCD
- // ABCDEF.. - after four only swaps we've finished
-
- while left_edge < len && right_edge != cap {
- let mut right_offset = 0;
- for i in left_edge..right_edge {
- right_offset = (i - left_edge) % (cap - right_edge);
- let src: isize = (right_edge + right_offset) as isize;
- ptr::swap(buf.offset(i as isize), buf.offset(src));
- }
- let n_ops = right_edge - left_edge;
- left_edge += n_ops;
- right_edge += right_offset + 1;
-
- }
- }
-
- }
- let out = Vec::from_raw_parts(buf, len, cap);
- mem::forget(other);
- out
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use test;
-
- use super::VecDeque;
-
- #[bench]
- fn bench_push_back_100(b: &mut test::Bencher) {
- let mut deq = VecDeque::with_capacity(101);
- b.iter(|| {
- for i in 0..100 {
- deq.push_back(i);
- }
- deq.head = 0;
- deq.tail = 0;
- })
- }
-
- #[bench]
- fn bench_push_front_100(b: &mut test::Bencher) {
- let mut deq = VecDeque::with_capacity(101);
- b.iter(|| {
- for i in 0..100 {
- deq.push_front(i);
- }
- deq.head = 0;
- deq.tail = 0;
- })
- }
-
- #[bench]
- fn bench_pop_back_100(b: &mut test::Bencher) {
- let mut deq = VecDeque::<i32>::with_capacity(101);
-
- b.iter(|| {
- deq.head = 100;
- deq.tail = 0;
- while !deq.is_empty() {
- test::black_box(deq.pop_back());
- }
- })
- }
-
- #[bench]
- fn bench_pop_front_100(b: &mut test::Bencher) {
- let mut deq = VecDeque::<i32>::with_capacity(101);
-
- b.iter(|| {
- deq.head = 100;
- deq.tail = 0;
- while !deq.is_empty() {
- test::black_box(deq.pop_front());
- }
- })
- }
-
- #[test]
- fn test_swap_front_back_remove() {
- fn test(back: bool) {
- // This test checks that every single combination of tail position and length is tested.
- // Capacity 15 should be large enough to cover every case.
- let mut tester = VecDeque::with_capacity(15);
- let usable_cap = tester.capacity();
- let final_len = usable_cap / 2;
-
- for len in 0..final_len {
- let expected: VecDeque<_> = if back {
- (0..len).collect()
- } else {
- (0..len).rev().collect()
- };
- for tail_pos in 0..usable_cap {
- tester.tail = tail_pos;
- tester.head = tail_pos;
- if back {
- for i in 0..len * 2 {
- tester.push_front(i);
- }
- for i in 0..len {
- assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i));
- }
- } else {
- for i in 0..len * 2 {
- tester.push_back(i);
- }
- for i in 0..len {
- let idx = tester.len() - 1 - i;
- assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i));
- }
- }
- assert!(tester.tail < tester.cap());
- assert!(tester.head < tester.cap());
- assert_eq!(tester, expected);
- }
- }
- }
- test(true);
- test(false);
- }
-
- #[test]
- fn test_insert() {
- // This test checks that every single combination of tail position, length, and
- // insertion position is tested. Capacity 15 should be large enough to cover every case.
-
- let mut tester = VecDeque::with_capacity(15);
- // can't guarantee we got 15, so have to get what we got.
- // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
- // this test isn't covering what it wants to
- let cap = tester.capacity();
-
-
- // len is the length *after* insertion
- for len in 1..cap {
- // 0, 1, 2, .., len - 1
- let expected = (0..).take(len).collect::<VecDeque<_>>();
- for tail_pos in 0..cap {
- for to_insert in 0..len {
- tester.tail = tail_pos;
- tester.head = tail_pos;
- for i in 0..len {
- if i != to_insert {
- tester.push_back(i);
- }
- }
- tester.insert(to_insert, to_insert);
- assert!(tester.tail < tester.cap());
- assert!(tester.head < tester.cap());
- assert_eq!(tester, expected);
- }
- }
- }
- }
-
- #[test]
- fn test_remove() {
- // This test checks that every single combination of tail position, length, and
- // removal position is tested. Capacity 15 should be large enough to cover every case.
-
- let mut tester = VecDeque::with_capacity(15);
- // can't guarantee we got 15, so have to get what we got.
- // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
- // this test isn't covering what it wants to
- let cap = tester.capacity();
-
- // len is the length *after* removal
- for len in 0..cap - 1 {
- // 0, 1, 2, .., len - 1
- let expected = (0..).take(len).collect::<VecDeque<_>>();
- for tail_pos in 0..cap {
- for to_remove in 0..len + 1 {
- tester.tail = tail_pos;
- tester.head = tail_pos;
- for i in 0..len {
- if i == to_remove {
- tester.push_back(1234);
- }
- tester.push_back(i);
- }
- if to_remove == len {
- tester.push_back(1234);
- }
- tester.remove(to_remove);
- assert!(tester.tail < tester.cap());
- assert!(tester.head < tester.cap());
- assert_eq!(tester, expected);
- }
- }
- }
- }
-
- #[test]
- fn test_drain() {
- let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
-
- let cap = tester.capacity();
- for len in 0..cap + 1 {
- for tail in 0..cap + 1 {
- for drain_start in 0..len + 1 {
- for drain_end in drain_start..len + 1 {
- tester.tail = tail;
- tester.head = tail;
- for i in 0..len {
- tester.push_back(i);
- }
-
- // Check that we drain the correct values
- let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect();
- let drained_expected: VecDeque<_> = (drain_start..drain_end).collect();
- assert_eq!(drained, drained_expected);
-
- // We shouldn't have changed the capacity or made the
- // head or tail out of bounds
- assert_eq!(tester.capacity(), cap);
- assert!(tester.tail < tester.cap());
- assert!(tester.head < tester.cap());
-
- // We should see the correct values in the VecDeque
- let expected: VecDeque<_> = (0..drain_start)
- .chain(drain_end..len)
- .collect();
- assert_eq!(expected, tester);
- }
- }
- }
- }
- }
-
- #[test]
- fn test_shrink_to_fit() {
- // This test checks that every single combination of head and tail position,
- // is tested. Capacity 15 should be large enough to cover every case.
-
- let mut tester = VecDeque::with_capacity(15);
- // can't guarantee we got 15, so have to get what we got.
- // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
- // this test isn't covering what it wants to
- let cap = tester.capacity();
- tester.reserve(63);
- let max_cap = tester.capacity();
-
- for len in 0..cap + 1 {
- // 0, 1, 2, .., len - 1
- let expected = (0..).take(len).collect::<VecDeque<_>>();
- for tail_pos in 0..max_cap + 1 {
- tester.tail = tail_pos;
- tester.head = tail_pos;
- tester.reserve(63);
- for i in 0..len {
- tester.push_back(i);
- }
- tester.shrink_to_fit();
- assert!(tester.capacity() <= cap);
- assert!(tester.tail < tester.cap());
- assert!(tester.head < tester.cap());
- assert_eq!(tester, expected);
- }
- }
- }
-
- #[test]
- fn test_split_off() {
- // This test checks that every single combination of tail position, length, and
- // split position is tested. Capacity 15 should be large enough to cover every case.
-
- let mut tester = VecDeque::with_capacity(15);
- // can't guarantee we got 15, so have to get what we got.
- // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
- // this test isn't covering what it wants to
- let cap = tester.capacity();
-
- // len is the length *before* splitting
- for len in 0..cap {
- // index to split at
- for at in 0..len + 1 {
- // 0, 1, 2, .., at - 1 (may be empty)
- let expected_self = (0..).take(at).collect::<VecDeque<_>>();
- // at, at + 1, .., len - 1 (may be empty)
- let expected_other = (at..).take(len - at).collect::<VecDeque<_>>();
-
- for tail_pos in 0..cap {
- tester.tail = tail_pos;
- tester.head = tail_pos;
- for i in 0..len {
- tester.push_back(i);
- }
- let result = tester.split_off(at);
- assert!(tester.tail < tester.cap());
- assert!(tester.head < tester.cap());
- assert!(result.tail < result.cap());
- assert!(result.head < result.cap());
- assert_eq!(tester, expected_self);
- assert_eq!(result, expected_other);
- }
- }
- }
- }
-
- #[test]
- fn test_from_vec() {
- use super::super::vec::Vec;
- for cap in 0..35 {
- for len in 0..cap + 1 {
- let mut vec = Vec::with_capacity(cap);
- vec.extend(0..len);
-
- let vd = VecDeque::from(vec.clone());
- assert!(vd.cap().is_power_of_two());
- assert_eq!(vd.len(), vec.len());
- assert!(vd.into_iter().eq(vec));
- }
- }
- }
-
- #[test]
- fn test_vec_from_vecdeque() {
- use super::super::vec::Vec;
-
- fn create_vec_and_test_convert(cap: usize, offset: usize, len: usize) {
- let mut vd = VecDeque::with_capacity(cap);
- for _ in 0..offset {
- vd.push_back(0);
- vd.pop_front();
- }
- vd.extend(0..len);
-
- let vec: Vec<_> = Vec::from(vd.clone());
- assert_eq!(vec.len(), vd.len());
- assert!(vec.into_iter().eq(vd));
- }
-
- for cap_pwr in 0..7 {
- // Make capacity as a (2^x)-1, so that the ring size is 2^x
- let cap = (2i32.pow(cap_pwr) - 1) as usize;
-
- // In these cases there is enough free space to solve it with copies
- for len in 0..((cap + 1) / 2) {
- // Test contiguous cases
- for offset in 0..(cap - len) {
- create_vec_and_test_convert(cap, offset, len)
- }
-
- // Test cases where block at end of buffer is bigger than block at start
- for offset in (cap - len)..(cap - (len / 2)) {
- create_vec_and_test_convert(cap, offset, len)
- }
-
- // Test cases where block at start of buffer is bigger than block at end
- for offset in (cap - (len / 2))..cap {
- create_vec_and_test_convert(cap, offset, len)
- }
- }
-
- // Now there's not (necessarily) space to straighten the ring with simple copies,
- // the ring will use swapping when:
- // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
- // right block size > free space && left block size > free space
- for len in ((cap + 1) / 2)..cap {
- // Test contiguous cases
- for offset in 0..(cap - len) {
- create_vec_and_test_convert(cap, offset, len)
- }
-
- // Test cases where block at end of buffer is bigger than block at start
- for offset in (cap - len)..(cap - (len / 2)) {
- create_vec_and_test_convert(cap, offset, len)
- }
-
- // Test cases where block at start of buffer is bigger than block at end
- for offset in (cap - (len / 2))..cap {
- create_vec_and_test_convert(cap, offset, len)
- }
- }
- }
- }
-
-}
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
+#![feature(raw_vec_internals)]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
}
}
-/// Augments `AllocErr` with a CapacityOverflow variant.
-// FIXME: should this be in libcore or liballoc?
-#[derive(Clone, PartialEq, Eq, Debug)]
-#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-pub enum CollectionAllocErr {
- /// Error due to the computed capacity exceeding the collection's maximum
- /// (usually `isize::MAX` bytes).
- CapacityOverflow,
- /// Error due to the allocator (see the `AllocErr` type's docs).
- AllocErr,
-}
-
-#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-impl From<AllocErr> for CollectionAllocErr {
- #[inline]
- fn from(AllocErr: AllocErr) -> Self {
- CollectionAllocErr::AllocErr
- }
-}
-
-#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-impl From<LayoutErr> for CollectionAllocErr {
- #[inline]
- fn from(_: LayoutErr) -> Self {
- CollectionAllocErr::CapacityOverflow
- }
-}
-
/// A memory allocator that can be registered as the standard library’s default
/// though the `#[global_allocator]` attributes.
///
use self::Entry::*;
use self::VacantEntryState::*;
-use alloc::CollectionAllocErr;
+use collections::CollectionAllocErr;
use cell::Cell;
use borrow::Borrow;
use cmp::max;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use alloc::{Global, Alloc, Layout, LayoutErr, CollectionAllocErr, handle_alloc_error};
+use alloc::{Global, Alloc, Layout, LayoutErr, handle_alloc_error};
+use collections::CollectionAllocErr;
use hash::{BuildHasher, Hash, Hasher};
use marker;
use mem::{size_of, needs_drop};
#[doc(hidden)]
pub use ops::Bound;
#[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc_crate::{BinaryHeap, BTreeMap, BTreeSet};
+pub use alloc_crate::collections::{BinaryHeap, BTreeMap, BTreeSet};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc_crate::{LinkedList, VecDeque};
+pub use alloc_crate::collections::{LinkedList, VecDeque};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc_crate::{binary_heap, btree_map, btree_set};
+pub use alloc_crate::collections::{binary_heap, btree_map, btree_set};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc_crate::{linked_list, vec_deque};
+pub use alloc_crate::collections::{linked_list, vec_deque};
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::hash_map::HashMap;
pub use self::hash_set::HashSet;
#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-pub use alloc::CollectionAllocErr;
+pub use alloc_crate::collections::CollectionAllocErr;
mod hash;
#![stable(feature = "rust1", since = "1.0.0")]
#[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc_crate::arc::{Arc, Weak};
+pub use alloc_crate::sync::{Arc, Weak};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::sync::atomic;
'others': [
{ 'path': 'std::vec', 'name': 'Vec' },
{ 'path': 'std::collections', 'name': 'VecDeque' },
- { 'path': 'alloc::raw_vec', 'name': 'RawVec' },
],
};