1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type with heap-allocated contents, written `Vec<T>` but pronounced 'vector.'
13 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
17 //! Explicitly creating a `Vec<T>` with `new()`:
20 //! let xs: Vec<i32> = Vec::new();
23 //! Using the `vec!` macro:
26 //! let ys: Vec<i32> = vec![];
28 //! let zs = vec![1i32, 2, 3, 4, 5];
34 //! let mut xs = vec![1i32, 2];
42 //! let mut xs = vec![1i32, 2];
44 //! let two = xs.pop();
47 #![stable(feature = "rust1", since = "1.0.0")]
51 use alloc::boxed::Box;
52 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
53 use core::borrow::{Cow, IntoCow};
55 use core::cmp::{Ordering};
56 use core::default::Default;
58 use core::hash::{self, Hash};
59 use core::iter::{repeat, FromIterator, IntoIterator};
60 use core::marker::{ContravariantLifetime, InvariantType};
62 use core::nonzero::NonZero;
63 use core::num::{Int, UnsignedInt};
64 use core::ops::{Index, IndexMut, Deref, Add};
67 use core::raw::Slice as RawSlice;
71 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
76 /// let mut vec = Vec::new();
80 /// assert_eq!(vec.len(), 2);
81 /// assert_eq!(vec[0], 1);
83 /// assert_eq!(vec.pop(), Some(2));
84 /// assert_eq!(vec.len(), 1);
87 /// assert_eq!(vec[0], 7);
89 /// vec.push_all(&[1, 2, 3]);
91 /// for x in vec.iter() {
92 /// println!("{}", x);
94 /// assert_eq!(vec, vec![7, 1, 2, 3]);
97 /// The `vec!` macro is provided to make initialization more convenient:
100 /// let mut vec = vec![1, 2, 3];
102 /// assert_eq!(vec, vec![1, 2, 3, 4]);
105 /// Use a `Vec<T>` as an efficient stack:
108 /// let mut stack = Vec::new();
115 /// let top = match stack.pop() {
116 /// None => break, // empty
119 /// // Prints 3, 2, 1
120 /// println!("{}", top);
124 /// # Capacity and reallocation
126 /// The capacity of a vector is the amount of space allocated for any future elements that will be
127 /// added onto the vector. This is not to be confused with the *length* of a vector, which
128 /// specifies the number of actual elements within the vector. If a vector's length exceeds its
129 /// capacity, its capacity will automatically be increased, but its elements will have to be
132 /// For example, a vector with capacity 10 and length 0 would be an empty vector with space for 10
133 /// more elements. Pushing 10 or fewer elements onto the vector will not change its capacity or
134 /// cause reallocation to occur. However, if the vector's length is increased to 11, it will have
135 /// to reallocate, which can be slow. For this reason, it is recommended to use
136 /// `Vec::with_capacity` whenever possible to specify how big the vector is expected to get.
137 #[unsafe_no_drop_flag]
138 #[stable(feature = "rust1", since = "1.0.0")]
140 ptr: NonZero<*mut T>,
145 unsafe impl<T: Send> Send for Vec<T> { }
146 unsafe impl<T: Sync> Sync for Vec<T> { }
148 ////////////////////////////////////////////////////////////////////////////////
150 ////////////////////////////////////////////////////////////////////////////////
153 /// Constructs a new, empty `Vec<T>`.
155 /// The vector will not allocate until elements are pushed onto it.
160 /// let mut vec: Vec<int> = Vec::new();
163 #[stable(feature = "rust1", since = "1.0.0")]
164 pub fn new() -> Vec<T> {
165 // We want ptr to never be NULL so instead we set it to some arbitrary
166 // non-null value which is fine since we never call deallocate on the ptr
167 // if cap is 0. The reason for this is because the pointer of a slice
168 // being NULL would break the null pointer optimization for enums.
169 Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
172 /// Constructs a new, empty `Vec<T>` with the specified capacity.
174 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
175 /// `capacity` is 0, the vector will not allocate.
177 /// It is important to note that this function does not specify the *length* of the returned
178 /// vector, but only the *capacity*. (For an explanation of the difference between length and
179 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
184 /// let mut vec: Vec<_> = Vec::with_capacity(10);
186 /// // The vector contains no items, even though it has capacity for more
187 /// assert_eq!(vec.len(), 0);
189 /// // These are all done without reallocating...
194 /// // ...but this may make the vector reallocate
198 #[stable(feature = "rust1", since = "1.0.0")]
199 pub fn with_capacity(capacity: usize) -> Vec<T> {
200 if mem::size_of::<T>() == 0 {
201 Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX }
202 } else if capacity == 0 {
205 let size = capacity.checked_mul(mem::size_of::<T>())
206 .expect("capacity overflow");
207 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
208 if ptr.is_null() { ::alloc::oom() }
209 Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
213 /// Creates a `Vec<T>` directly from the raw components of another vector.
215 /// This is highly unsafe, due to the number of invariants that aren't checked.
224 /// let mut v = vec![1, 2, 3];
226 /// // Pull out the various important pieces of information about `v`
227 /// let p = v.as_mut_ptr();
228 /// let len = v.len();
229 /// let cap = v.capacity();
232 /// // Cast `v` into the void: no destructor run, so we are in
233 /// // complete control of the allocation to which `p` points.
236 /// // Overwrite memory with 4, 5, 6
237 /// for i in 0..len as isize {
238 /// ptr::write(p.offset(i), 4 + i);
241 /// // Put everything back together into a Vec
242 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
243 /// assert_eq!(rebuilt, vec![4, 5, 6]);
247 #[stable(feature = "rust1", since = "1.0.0")]
248 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
249 capacity: usize) -> Vec<T> {
250 Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
253 /// Creates a vector by copying the elements from a raw pointer.
255 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
256 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
257 /// without cloning, as if `ptr::read()` were called on them.
259 #[unstable(feature = "collections",
260 reason = "may be better expressed via composition")]
261 pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
262 let mut dst = Vec::with_capacity(elts);
264 ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
268 /// Returns the number of elements the vector can hold without
274 /// let vec: Vec<int> = Vec::with_capacity(10);
275 /// assert_eq!(vec.capacity(), 10);
278 #[stable(feature = "rust1", since = "1.0.0")]
279 pub fn capacity(&self) -> usize {
283 /// Reserves capacity for at least `additional` more elements to be inserted in the given
284 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
288 /// Panics if the new capacity overflows `usize`.
293 /// let mut vec: Vec<int> = vec![1];
295 /// assert!(vec.capacity() >= 11);
297 #[stable(feature = "rust1", since = "1.0.0")]
298 pub fn reserve(&mut self, additional: usize) {
299 if self.cap - self.len < additional {
300 let err_msg = "Vec::reserve: `usize` overflow";
301 let new_cap = self.len.checked_add(additional).expect(err_msg)
302 .checked_next_power_of_two().expect(err_msg);
303 self.grow_capacity(new_cap);
307 /// Reserves the minimum capacity for exactly `additional` more elements to
308 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
311 /// Note that the allocator may give the collection more space than it
312 /// requests. Therefore capacity can not be relied upon to be precisely
313 /// minimal. Prefer `reserve` if future insertions are expected.
317 /// Panics if the new capacity overflows `usize`.
322 /// let mut vec: Vec<int> = vec![1];
323 /// vec.reserve_exact(10);
324 /// assert!(vec.capacity() >= 11);
326 #[stable(feature = "rust1", since = "1.0.0")]
327 pub fn reserve_exact(&mut self, additional: usize) {
328 if self.cap - self.len < additional {
329 match self.len.checked_add(additional) {
330 None => panic!("Vec::reserve: `usize` overflow"),
331 Some(new_cap) => self.grow_capacity(new_cap)
336 /// Shrinks the capacity of the vector as much as possible.
338 /// It will drop down as close as possible to the length but the allocator
339 /// may still inform the vector that there is space for a few more elements.
344 /// let mut vec: Vec<int> = Vec::with_capacity(10);
345 /// vec.push_all(&[1, 2, 3]);
346 /// assert_eq!(vec.capacity(), 10);
347 /// vec.shrink_to_fit();
348 /// assert!(vec.capacity() >= 3);
350 #[stable(feature = "rust1", since = "1.0.0")]
351 pub fn shrink_to_fit(&mut self) {
352 if mem::size_of::<T>() == 0 { return }
357 dealloc(*self.ptr, self.cap)
361 } else if self.cap != self.len {
363 // Overflow check is unnecessary as the vector is already at
365 let ptr = reallocate(*self.ptr as *mut u8,
366 self.cap * mem::size_of::<T>(),
367 self.len * mem::size_of::<T>(),
368 mem::min_align_of::<T>()) as *mut T;
369 if ptr.is_null() { ::alloc::oom() }
370 self.ptr = NonZero::new(ptr);
376 /// Convert the vector into Box<[T]>.
378 /// Note that this will drop any excess capacity. Calling this and
379 /// converting back to a vector with `into_vec()` is equivalent to calling
380 /// `shrink_to_fit()`.
381 #[unstable(feature = "collections")]
382 pub fn into_boxed_slice(mut self) -> Box<[T]> {
383 self.shrink_to_fit();
385 let xs: Box<[T]> = mem::transmute(&mut *self);
391 /// Shorten a vector, dropping excess elements.
393 /// If `len` is greater than the vector's current length, this has no
399 /// let mut vec = vec![1, 2, 3, 4];
401 /// assert_eq!(vec, vec![1, 2]);
403 #[stable(feature = "rust1", since = "1.0.0")]
404 pub fn truncate(&mut self, len: usize) {
406 // drop any extra elements
407 while len < self.len {
408 // decrement len before the read(), so a panic on Drop doesn't
409 // re-drop the just-failed value.
411 ptr::read(self.get_unchecked(self.len));
416 /// Returns a mutable slice of the elements of `self`.
421 /// fn foo(slice: &mut [int]) {}
423 /// let mut vec = vec![1, 2];
424 /// foo(vec.as_mut_slice());
427 #[stable(feature = "rust1", since = "1.0.0")]
428 pub fn as_mut_slice(&mut self) -> &mut [T] {
430 mem::transmute(RawSlice {
437 /// Creates a consuming iterator, that is, one that moves each value out of
438 /// the vector (from start to end). The vector cannot be used after calling
444 /// let v = vec!["a".to_string(), "b".to_string()];
445 /// for s in v.into_iter() {
446 /// // s has type String, not &String
447 /// println!("{}", s);
451 #[stable(feature = "rust1", since = "1.0.0")]
452 pub fn into_iter(self) -> IntoIter<T> {
456 let begin = ptr as *const T;
457 let end = if mem::size_of::<T>() == 0 {
458 (ptr as usize + self.len()) as *const T
460 ptr.offset(self.len() as isize) as *const T
463 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
467 /// Sets the length of a vector.
469 /// This will explicitly set the size of the vector, without actually
470 /// modifying its buffers, so it is up to the caller to ensure that the
471 /// vector is actually the specified size.
476 /// let mut v = vec![1, 2, 3, 4];
482 #[stable(feature = "rust1", since = "1.0.0")]
483 pub unsafe fn set_len(&mut self, len: usize) {
487 /// Removes an element from anywhere in the vector and return it, replacing
488 /// it with the last element.
490 /// This does not preserve ordering, but is O(1).
494 /// Panics if `index` is out of bounds.
499 /// let mut v = vec!["foo", "bar", "baz", "qux"];
501 /// assert_eq!(v.swap_remove(1), "bar");
502 /// assert_eq!(v, vec!["foo", "qux", "baz"]);
504 /// assert_eq!(v.swap_remove(0), "foo");
505 /// assert_eq!(v, vec!["baz", "qux"]);
508 #[stable(feature = "rust1", since = "1.0.0")]
509 pub fn swap_remove(&mut self, index: usize) -> T {
510 let length = self.len();
511 self.swap(index, length - 1);
515 /// Inserts an element at position `index` within the vector, shifting all
516 /// elements after position `i` one position to the right.
520 /// Panics if `index` is not between `0` and the vector's length (both
521 /// bounds inclusive).
526 /// let mut vec = vec![1, 2, 3];
527 /// vec.insert(1, 4);
528 /// assert_eq!(vec, vec![1, 4, 2, 3]);
529 /// vec.insert(4, 5);
530 /// assert_eq!(vec, vec![1, 4, 2, 3, 5]);
532 #[stable(feature = "rust1", since = "1.0.0")]
533 pub fn insert(&mut self, index: usize, element: T) {
534 let len = self.len();
535 assert!(index <= len);
536 // space for the new element
539 unsafe { // infallible
540 // The spot to put the new value
542 let p = self.as_mut_ptr().offset(index as isize);
543 // Shift everything over to make space. (Duplicating the
544 // `index`th element into two consecutive places.)
545 ptr::copy_memory(p.offset(1), &*p, len - index);
546 // Write it in, overwriting the first copy of the `index`th
548 ptr::write(&mut *p, element);
550 self.set_len(len + 1);
554 /// Removes and returns the element at position `index` within the vector,
555 /// shifting all elements after position `index` one position to the left.
559 /// Panics if `i` is out of bounds.
564 /// let mut v = vec![1, 2, 3];
565 /// assert_eq!(v.remove(1), 2);
566 /// assert_eq!(v, vec![1, 3]);
568 #[stable(feature = "rust1", since = "1.0.0")]
569 pub fn remove(&mut self, index: usize) -> T {
570 let len = self.len();
571 assert!(index < len);
572 unsafe { // infallible
575 // the place we are taking from.
576 let ptr = self.as_mut_ptr().offset(index as isize);
577 // copy it out, unsafely having a copy of the value on
578 // the stack and in the vector at the same time.
579 ret = ptr::read(ptr);
581 // Shift everything down to fill in that spot.
582 ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
584 self.set_len(len - 1);
589 /// Retains only the elements specified by the predicate.
591 /// In other words, remove all elements `e` such that `f(&e)` returns false.
592 /// This method operates in place and preserves the order of the retained
598 /// let mut vec = vec![1, 2, 3, 4];
599 /// vec.retain(|&x| x%2 == 0);
600 /// assert_eq!(vec, vec![2, 4]);
602 #[stable(feature = "rust1", since = "1.0.0")]
603 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
604 let len = self.len();
618 self.truncate(len - del);
622 /// Appends an element to the back of a collection.
626 /// Panics if the number of elements in the vector overflows a `usize`.
631 /// let mut vec = vec!(1, 2);
633 /// assert_eq!(vec, vec!(1, 2, 3));
636 #[stable(feature = "rust1", since = "1.0.0")]
637 pub fn push(&mut self, value: T) {
638 if mem::size_of::<T>() == 0 {
639 // zero-size types consume no memory, so we can't rely on the
640 // address space running out
641 self.len = self.len.checked_add(1).expect("length overflow");
642 unsafe { mem::forget(value); }
645 if self.len == self.cap {
646 let old_size = self.cap * mem::size_of::<T>();
647 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
648 if old_size > size { panic!("capacity overflow") }
650 let ptr = alloc_or_realloc(*self.ptr, old_size, size);
651 if ptr.is_null() { ::alloc::oom() }
652 self.ptr = NonZero::new(ptr);
654 self.cap = max(self.cap, 2) * 2;
658 let end = (*self.ptr).offset(self.len as isize);
659 ptr::write(&mut *end, value);
664 /// Removes the last element from a vector and returns it, or `None` if it is empty.
669 /// let mut vec = vec![1, 2, 3];
670 /// assert_eq!(vec.pop(), Some(3));
671 /// assert_eq!(vec, vec![1, 2]);
674 #[stable(feature = "rust1", since = "1.0.0")]
675 pub fn pop(&mut self) -> Option<T> {
681 Some(ptr::read(self.get_unchecked(self.len())))
686 /// Moves all the elements of `other` into `Self`, leaving `other` empty.
690 /// Panics if the number of elements in the vector overflows a `usize`.
694 /// let mut vec = vec![1, 2, 3];
695 /// let mut vec2 = vec![4, 5, 6];
696 /// vec.append(&mut vec2);
697 /// assert_eq!(vec, vec![1, 2, 3, 4, 5, 6]);
698 /// assert_eq!(vec2, vec![]);
701 #[unstable(feature = "collections",
702 reason = "new API, waiting for dust to settle")]
703 pub fn append(&mut self, other: &mut Self) {
704 if mem::size_of::<T>() == 0 {
705 // zero-size types consume no memory, so we can't rely on the
706 // address space running out
707 self.len = self.len.checked_add(other.len()).expect("length overflow");
708 unsafe { other.set_len(0) }
711 self.reserve(other.len());
712 let len = self.len();
714 ptr::copy_nonoverlapping_memory(
715 self.get_unchecked_mut(len),
720 self.len += other.len();
721 unsafe { other.set_len(0); }
724 /// Creates a draining iterator that clears the `Vec` and iterates over
725 /// the removed items from start to end.
730 /// let mut v = vec!["a".to_string(), "b".to_string()];
731 /// for s in v.drain() {
732 /// // s has type String, not &String
733 /// println!("{}", s);
735 /// assert!(v.is_empty());
738 #[unstable(feature = "collections",
739 reason = "matches collection reform specification, waiting for dust to settle")]
740 pub fn drain(&mut self) -> Drain<T> {
742 let begin = *self.ptr as *const T;
743 let end = if mem::size_of::<T>() == 0 {
744 (*self.ptr as usize + self.len()) as *const T
746 (*self.ptr).offset(self.len() as isize) as *const T
752 marker: ContravariantLifetime,
757 /// Clears the vector, removing all values.
762 /// let mut v = vec![1, 2, 3];
766 /// assert!(v.is_empty());
769 #[stable(feature = "rust1", since = "1.0.0")]
770 pub fn clear(&mut self) {
774 /// Returns the number of elements in the vector.
779 /// let a = vec![1, 2, 3];
780 /// assert_eq!(a.len(), 3);
783 #[stable(feature = "rust1", since = "1.0.0")]
784 pub fn len(&self) -> usize { self.len }
786 /// Returns `true` if the vector contains no elements.
791 /// let mut v = Vec::new();
792 /// assert!(v.is_empty());
795 /// assert!(!v.is_empty());
797 #[stable(feature = "rust1", since = "1.0.0")]
798 pub fn is_empty(&self) -> bool { self.len() == 0 }
800 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
801 /// size and in case they are not zero-sized the same minimal alignment.
805 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
806 /// have differing minimal alignments.
811 /// let v = vec![0, 1, 2];
812 /// let w = v.map_in_place(|i| i + 3);
813 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
815 /// #[derive(PartialEq, Debug)]
816 /// struct Newtype(u8);
817 /// let bytes = vec![0x11, 0x22];
818 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
819 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
821 #[unstable(feature = "collections",
822 reason = "API may change to provide stronger guarantees")]
823 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
824 // FIXME: Assert statically that the types `T` and `U` have the same
826 assert!(mem::size_of::<T>() == mem::size_of::<U>());
830 if mem::size_of::<T>() != 0 {
831 // FIXME: Assert statically that the types `T` and `U` have the
832 // same minimal alignment in case they are not zero-sized.
834 // These asserts are necessary because the `min_align_of` of the
835 // types are passed to the allocator by `Vec`.
836 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
838 // This `as isize` cast is safe, because the size of the elements of the
839 // vector is not 0, and:
841 // 1) If the size of the elements in the vector is 1, the `int` may
842 // overflow, but it has the correct bit pattern so that the
843 // `.offset()` function will work.
846 // Address space 0x0-0xF.
847 // `u8` array at: 0x1.
848 // Size of `u8` array: 0x8.
849 // Calculated `offset`: -0x8.
850 // After `array.offset(offset)`: 0x9.
851 // (0x1 + 0x8 = 0x1 - 0x8)
853 // 2) If the size of the elements in the vector is >1, the `usize` ->
854 // `int` conversion can't overflow.
855 let offset = vec.len() as isize;
856 let start = vec.as_mut_ptr();
858 let mut pv = PartialVecNonZeroSized {
862 // This points inside the vector, as the vector has length
864 end_t: unsafe { start.offset(offset) },
865 start_u: start as *mut U,
866 end_u: start as *mut U,
877 while pv.end_u as *mut T != pv.end_t {
881 // +-+-+-+-+-+-+-+-+-+
882 // |U|...|U|T|T|...|T|
883 // +-+-+-+-+-+-+-+-+-+
887 let t = ptr::read(pv.start_t);
890 // +-+-+-+-+-+-+-+-+-+
891 // |U|...|U|X|T|...|T|
892 // +-+-+-+-+-+-+-+-+-+
895 // We must not panic here, one cell is marked as `T`
896 // although it is not `T`.
898 pv.start_t = pv.start_t.offset(1);
901 // +-+-+-+-+-+-+-+-+-+
902 // |U|...|U|X|T|...|T|
903 // +-+-+-+-+-+-+-+-+-+
906 // We may panic again.
908 // The function given by the user might panic.
911 ptr::write(pv.end_u, u);
914 // +-+-+-+-+-+-+-+-+-+
915 // |U|...|U|U|T|...|T|
916 // +-+-+-+-+-+-+-+-+-+
919 // We should not panic here, because that would leak the `U`
920 // pointed to by `end_u`.
922 pv.end_u = pv.end_u.offset(1);
925 // +-+-+-+-+-+-+-+-+-+
926 // |U|...|U|U|T|...|T|
927 // +-+-+-+-+-+-+-+-+-+
930 // We may panic again.
942 // Extract `vec` and prevent the destructor of
943 // `PartialVecNonZeroSized` from running. Note that none of the
944 // function calls can panic, thus no resources can be leaked (as the
945 // `vec` member of `PartialVec` is the only one which holds
946 // allocations -- and it is returned from this function. None of
949 let vec_len = pv.vec.len();
950 let vec_cap = pv.vec.capacity();
951 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
953 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
956 // Put the `Vec` into the `PartialVecZeroSized` structure and
957 // prevent the destructor of the `Vec` from running. Since the
958 // `Vec` contained zero-sized objects, it did not allocate, so we
959 // are not leaking memory here.
960 let mut pv = PartialVecZeroSized::<T,U> {
963 marker_t: InvariantType,
964 marker_u: InvariantType,
966 unsafe { mem::forget(vec); }
968 while pv.num_t != 0 {
970 // Create a `T` out of thin air and decrement `num_t`. This
971 // must not panic between these steps, as otherwise a
972 // destructor of `T` which doesn't exist runs.
973 let t = mem::uninitialized();
976 // The function given by the user might panic.
979 // Forget the `U` and increment `num_u`. This increment
980 // cannot overflow the `usize` as we only do this for a
981 // number of times that fits into a `usize` (and start with
982 // `0`). Again, we should not panic between these steps.
987 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
988 // destructor of the latter will not run. None of this can panic.
989 let mut result = Vec::new();
991 result.set_len(pv.num_u);
998 /// Splits the collection into two at the given index.
1000 /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`,
1001 /// and the returned `Self` contains elements `[at, len)`.
1003 /// Note that the capacity of `self` does not change.
1007 /// let mut vec = vec![1,2,3];
1008 /// let vec2 = vec.split_off(1);
1009 /// assert_eq!(vec, vec![1]);
1010 /// assert_eq!(vec2, vec![2, 3]);
1013 #[unstable(feature = "collections",
1014 reason = "new API, waiting for dust to settle")]
1015 pub fn split_off(&mut self, at: usize) -> Self {
1016 assert!(at < self.len(), "`at` out of bounds");
1018 let other_len = self.len - at;
1019 let mut other = Vec::with_capacity(other_len);
1021 // Unsafely `set_len` and copy items to `other`.
1024 other.set_len(other_len);
1026 ptr::copy_nonoverlapping_memory(
1028 self.as_ptr().offset(at as isize),
1036 impl<T: Clone> Vec<T> {
1037 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
1039 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
1040 /// is larger than the current value of `len()` or not.
1045 /// let mut vec = vec!["hello"];
1046 /// vec.resize(3, "world");
1047 /// assert_eq!(vec, vec!["hello", "world", "world"]);
1049 /// let mut vec = vec![1, 2, 3, 4];
1050 /// vec.resize(2, 0);
1051 /// assert_eq!(vec, vec![1, 2]);
1053 #[unstable(feature = "collections",
1054 reason = "matches collection reform specification; waiting for dust to settle")]
1055 pub fn resize(&mut self, new_len: usize, value: T) {
1056 let len = self.len();
1059 self.extend(repeat(value).take(new_len - len));
1061 self.truncate(new_len);
1065 /// Appends all elements in a slice to the `Vec`.
1067 /// Iterates over the slice `other`, clones each element, and then appends
1068 /// it to this `Vec`. The `other` vector is traversed in-order.
1073 /// let mut vec = vec![1];
1074 /// vec.push_all(&[2, 3, 4]);
1075 /// assert_eq!(vec, vec![1, 2, 3, 4]);
1078 #[unstable(feature = "collections",
1079 reason = "likely to be replaced by a more optimized extend")]
1080 pub fn push_all(&mut self, other: &[T]) {
1081 self.reserve(other.len());
1083 for i in 0..other.len() {
1084 let len = self.len();
1086 // Unsafe code so this can be optimised to a memcpy (or something similarly
1087 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1088 // during the loop can prevent this optimisation.
1091 self.get_unchecked_mut(len),
1092 other.get_unchecked(i).clone());
1093 self.set_len(len + 1);
1099 impl<T: PartialEq> Vec<T> {
1100 /// Removes consecutive repeated elements in the vector.
1102 /// If the vector is sorted, this removes all duplicates.
1107 /// let mut vec = vec![1, 2, 2, 3, 2];
1111 /// assert_eq!(vec, vec![1, 2, 3, 2]);
1113 #[stable(feature = "rust1", since = "1.0.0")]
1114 pub fn dedup(&mut self) {
1116 // Although we have a mutable reference to `self`, we cannot make
1117 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1118 // must ensure that the vector is in a valid state at all time.
1120 // The way that we handle this is by using swaps; we iterate
1121 // over all the elements, swapping as we go so that at the end
1122 // the elements we wish to keep are in the front, and those we
1123 // wish to reject are at the back. We can then truncate the
1124 // vector. This operation is still O(n).
1126 // Example: We start in this state, where `r` represents "next
1127 // read" and `w` represents "next_write`.
1130 // +---+---+---+---+---+---+
1131 // | 0 | 1 | 1 | 2 | 3 | 3 |
1132 // +---+---+---+---+---+---+
1135 // Comparing self[r] against self[w-1], this is not a duplicate, so
1136 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1137 // r and w, leaving us with:
1140 // +---+---+---+---+---+---+
1141 // | 0 | 1 | 1 | 2 | 3 | 3 |
1142 // +---+---+---+---+---+---+
1145 // Comparing self[r] against self[w-1], this value is a duplicate,
1146 // so we increment `r` but leave everything else unchanged:
1149 // +---+---+---+---+---+---+
1150 // | 0 | 1 | 1 | 2 | 3 | 3 |
1151 // +---+---+---+---+---+---+
1154 // Comparing self[r] against self[w-1], this is not a duplicate,
1155 // so swap self[r] and self[w] and advance r and w:
1158 // +---+---+---+---+---+---+
1159 // | 0 | 1 | 2 | 1 | 3 | 3 |
1160 // +---+---+---+---+---+---+
1163 // Not a duplicate, repeat:
1166 // +---+---+---+---+---+---+
1167 // | 0 | 1 | 2 | 3 | 1 | 3 |
1168 // +---+---+---+---+---+---+
1171 // Duplicate, advance r. End of vec. Truncate to w.
1173 let ln = self.len();
1174 if ln < 1 { return; }
1176 // Avoid bounds checks by using unsafe pointers.
1177 let p = self.as_mut_ptr();
1182 let p_r = p.offset(r as isize);
1183 let p_wm1 = p.offset((w - 1) as isize);
1186 let p_w = p_wm1.offset(1);
1187 mem::swap(&mut *p_r, &mut *p_w);
1199 ////////////////////////////////////////////////////////////////////////////////
1200 // Internal methods and functions
1201 ////////////////////////////////////////////////////////////////////////////////
1204 /// Reserves capacity for exactly `capacity` elements in the given vector.
1206 /// If the capacity for `self` is already equal to or greater than the
1207 /// requested capacity, then no action is taken.
1208 fn grow_capacity(&mut self, capacity: usize) {
1209 if mem::size_of::<T>() == 0 { return }
1211 if capacity > self.cap {
1212 let size = capacity.checked_mul(mem::size_of::<T>())
1213 .expect("capacity overflow");
1215 let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
1216 if ptr.is_null() { ::alloc::oom() }
1217 self.ptr = NonZero::new(ptr);
1219 self.cap = capacity;
1224 // FIXME: #13996: need a way to mark the return value as `noalias`
1226 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
1228 allocate(size, mem::min_align_of::<T>()) as *mut T
1230 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
1235 unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
1236 if mem::size_of::<T>() != 0 {
1237 deallocate(ptr as *mut u8,
1238 len * mem::size_of::<T>(),
1239 mem::min_align_of::<T>())
1243 ////////////////////////////////////////////////////////////////////////////////
1244 // Common trait implementations for Vec
1245 ////////////////////////////////////////////////////////////////////////////////
1247 #[unstable(feature = "collections")]
1248 impl<T:Clone> Clone for Vec<T> {
1249 fn clone(&self) -> Vec<T> { ::slice::SliceExt::to_vec(&**self) }
1251 fn clone_from(&mut self, other: &Vec<T>) {
1252 // drop anything in self that will not be overwritten
1253 if self.len() > other.len() {
1254 self.truncate(other.len())
1257 // reuse the contained values' allocations/resources.
1258 for (place, thing) in self.iter_mut().zip(other.iter()) {
1259 place.clone_from(thing)
1262 // self.len <= other.len due to the truncate above, so the
1263 // slice here is always in-bounds.
1264 let slice = &other[self.len()..];
1265 self.push_all(slice);
1269 impl<S: hash::Writer + hash::Hasher, T: Hash<S>> Hash<S> for Vec<T> {
1271 fn hash(&self, state: &mut S) {
1272 Hash::hash(&**self, state)
1276 #[stable(feature = "rust1", since = "1.0.0")]
1277 impl<T> Index<usize> for Vec<T> {
1281 fn index(&self, index: &usize) -> &T {
1282 // NB built-in indexing via `&[T]`
1287 #[stable(feature = "rust1", since = "1.0.0")]
1288 impl<T> IndexMut<usize> for Vec<T> {
1290 fn index_mut(&mut self, index: &usize) -> &mut T {
1291 // NB built-in indexing via `&mut [T]`
1292 &mut (**self)[*index]
1297 #[stable(feature = "rust1", since = "1.0.0")]
1298 impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
1301 fn index(&self, index: &ops::Range<usize>) -> &[T] {
1302 Index::index(&**self, index)
1305 #[stable(feature = "rust1", since = "1.0.0")]
1306 impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
1309 fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
1310 Index::index(&**self, index)
1313 #[stable(feature = "rust1", since = "1.0.0")]
1314 impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
1317 fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
1318 Index::index(&**self, index)
1321 #[stable(feature = "rust1", since = "1.0.0")]
1322 impl<T> ops::Index<ops::RangeFull> for Vec<T> {
1325 fn index(&self, _index: &ops::RangeFull) -> &[T] {
1330 #[stable(feature = "rust1", since = "1.0.0")]
1331 impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
1333 fn index_mut(&mut self, index: &ops::Range<usize>) -> &mut [T] {
1334 IndexMut::index_mut(&mut **self, index)
1337 #[stable(feature = "rust1", since = "1.0.0")]
1338 impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
1340 fn index_mut(&mut self, index: &ops::RangeTo<usize>) -> &mut [T] {
1341 IndexMut::index_mut(&mut **self, index)
1344 #[stable(feature = "rust1", since = "1.0.0")]
1345 impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
1347 fn index_mut(&mut self, index: &ops::RangeFrom<usize>) -> &mut [T] {
1348 IndexMut::index_mut(&mut **self, index)
1351 #[stable(feature = "rust1", since = "1.0.0")]
1352 impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
1354 fn index_mut(&mut self, _index: &ops::RangeFull) -> &mut [T] {
1359 #[stable(feature = "rust1", since = "1.0.0")]
1360 impl<T> ops::Deref for Vec<T> {
1363 fn deref(&self) -> &[T] { self.as_slice() }
1366 #[stable(feature = "rust1", since = "1.0.0")]
1367 impl<T> ops::DerefMut for Vec<T> {
1368 fn deref_mut(&mut self) -> &mut [T] { self.as_mut_slice() }
1371 #[stable(feature = "rust1", since = "1.0.0")]
1372 impl<T> FromIterator<T> for Vec<T> {
1374 fn from_iter<I:Iterator<Item=T>>(iterator: I) -> Vec<T> {
1375 let (lower, _) = iterator.size_hint();
1376 let mut vector = Vec::with_capacity(lower);
1377 for element in iterator {
1378 vector.push(element)
1384 impl<T> IntoIterator for Vec<T> {
1385 type Iter = IntoIter<T>;
1387 fn into_iter(self) -> IntoIter<T> {
1392 impl<'a, T> IntoIterator for &'a Vec<T> {
1393 type Iter = slice::Iter<'a, T>;
1395 fn into_iter(self) -> slice::Iter<'a, T> {
1400 impl<'a, T> IntoIterator for &'a mut Vec<T> {
1401 type Iter = slice::IterMut<'a, T>;
1403 fn into_iter(mut self) -> slice::IterMut<'a, T> {
1408 #[unstable(feature = "collections", reason = "waiting on Extend stability")]
1409 impl<T> Extend<T> for Vec<T> {
1411 fn extend<I: Iterator<Item=T>>(&mut self, iterator: I) {
1412 let (lower, _) = iterator.size_hint();
1413 self.reserve(lower);
1414 for element in iterator {
1420 impl<A, B> PartialEq<Vec<B>> for Vec<A> where A: PartialEq<B> {
1422 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1424 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1427 macro_rules! impl_eq {
1428 ($lhs:ty, $rhs:ty) => {
1429 impl<'b, A, B> PartialEq<$rhs> for $lhs where A: PartialEq<B> {
1431 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1433 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1436 impl<'b, A, B> PartialEq<$lhs> for $rhs where B: PartialEq<A> {
1438 fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
1440 fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) }
1445 impl_eq! { Vec<A>, &'b [B] }
1446 impl_eq! { Vec<A>, &'b mut [B] }
1448 impl<'a, A, B> PartialEq<Vec<B>> for CowVec<'a, A> where A: PartialEq<B> + Clone {
1450 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1452 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1455 impl<'a, A, B> PartialEq<CowVec<'a, A>> for Vec<B> where A: Clone, B: PartialEq<A> {
1457 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
1459 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
1462 macro_rules! impl_eq_for_cowvec {
1464 impl<'a, 'b, A, B> PartialEq<$rhs> for CowVec<'a, A> where A: PartialEq<B> + Clone {
1466 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1468 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1471 impl<'a, 'b, A, B> PartialEq<CowVec<'a, A>> for $rhs where A: Clone, B: PartialEq<A> {
1473 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
1475 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
1480 impl_eq_for_cowvec! { &'b [B] }
1481 impl_eq_for_cowvec! { &'b mut [B] }
1483 #[unstable(feature = "collections",
1484 reason = "waiting on PartialOrd stability")]
1485 impl<T: PartialOrd> PartialOrd for Vec<T> {
1487 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
1488 PartialOrd::partial_cmp(&**self, &**other)
1492 #[unstable(feature = "collections", reason = "waiting on Eq stability")]
1493 impl<T: Eq> Eq for Vec<T> {}
1495 #[unstable(feature = "collections", reason = "waiting on Ord stability")]
1496 impl<T: Ord> Ord for Vec<T> {
1498 fn cmp(&self, other: &Vec<T>) -> Ordering {
1499 Ord::cmp(&**self, &**other)
1503 impl<T> AsSlice<T> for Vec<T> {
1504 /// Returns a slice into `self`.
1509 /// fn foo(slice: &[int]) {}
1511 /// let vec = vec![1, 2];
1512 /// foo(vec.as_slice());
1515 #[stable(feature = "rust1", since = "1.0.0")]
1516 fn as_slice(&self) -> &[T] {
1518 mem::transmute(RawSlice {
1526 #[unstable(feature = "collections",
1527 reason = "recent addition, needs more experience")]
1528 impl<'a, T: Clone> Add<&'a [T]> for Vec<T> {
1529 type Output = Vec<T>;
1532 fn add(mut self, rhs: &[T]) -> Vec<T> {
1538 #[unsafe_destructor]
1539 #[stable(feature = "rust1", since = "1.0.0")]
1540 impl<T> Drop for Vec<T> {
1541 fn drop(&mut self) {
1542 // This is (and should always remain) a no-op if the fields are
1543 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1549 dealloc(*self.ptr, self.cap)
1555 #[stable(feature = "rust1", since = "1.0.0")]
1556 impl<T> Default for Vec<T> {
1557 #[stable(feature = "rust1", since = "1.0.0")]
1558 fn default() -> Vec<T> {
1563 #[stable(feature = "rust1", since = "1.0.0")]
1564 impl<T: fmt::Debug> fmt::Debug for Vec<T> {
1565 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1566 fmt::Debug::fmt(&**self, f)
1570 ////////////////////////////////////////////////////////////////////////////////
1572 ////////////////////////////////////////////////////////////////////////////////
1574 #[unstable(feature = "collections",
1575 reason = "unclear how valuable this alias is")]
1576 /// A clone-on-write vector
1577 pub type CowVec<'a, T> = Cow<'a, Vec<T>, [T]>;
1579 #[unstable(feature = "collections")]
1580 impl<'a, T> FromIterator<T> for CowVec<'a, T> where T: Clone {
1581 fn from_iter<I: Iterator<Item=T>>(it: I) -> CowVec<'a, T> {
1582 Cow::Owned(FromIterator::from_iter(it))
1586 impl<'a, T: 'a> IntoCow<'a, Vec<T>, [T]> for Vec<T> where T: Clone {
1587 fn into_cow(self) -> CowVec<'a, T> {
1592 impl<'a, T> IntoCow<'a, Vec<T>, [T]> for &'a [T] where T: Clone {
1593 fn into_cow(self) -> CowVec<'a, T> {
1598 ////////////////////////////////////////////////////////////////////////////////
1600 ////////////////////////////////////////////////////////////////////////////////
1602 /// An iterator that moves out of a vector.
1603 #[stable(feature = "rust1", since = "1.0.0")]
1604 pub struct IntoIter<T> {
1605 allocation: *mut T, // the block of memory allocated for the vector
1606 cap: usize, // the capacity of the vector
1611 unsafe impl<T: Send> Send for IntoIter<T> { }
1612 unsafe impl<T: Sync> Sync for IntoIter<T> { }
1614 impl<T> IntoIter<T> {
1616 /// Drops all items that have not yet been moved and returns the empty vector.
1617 #[unstable(feature = "collections")]
1618 pub fn into_inner(mut self) -> Vec<T> {
1620 for _x in self.by_ref() { }
1621 let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
1623 Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
1628 #[stable(feature = "rust1", since = "1.0.0")]
1629 impl<T> Iterator for IntoIter<T> {
1633 fn next(&mut self) -> Option<T> {
1635 if self.ptr == self.end {
1638 if mem::size_of::<T>() == 0 {
1639 // purposefully don't use 'ptr.offset' because for
1640 // vectors with 0-size elements this would return the
1642 self.ptr = mem::transmute(self.ptr as usize + 1);
1644 // Use a non-null pointer value
1645 Some(ptr::read(EMPTY as *mut T))
1648 self.ptr = self.ptr.offset(1);
1650 Some(ptr::read(old))
1657 fn size_hint(&self) -> (usize, Option<usize>) {
1658 let diff = (self.end as usize) - (self.ptr as usize);
1659 let size = mem::size_of::<T>();
1660 let exact = diff / (if size == 0 {1} else {size});
1661 (exact, Some(exact))
1665 #[stable(feature = "rust1", since = "1.0.0")]
1666 impl<T> DoubleEndedIterator for IntoIter<T> {
1668 fn next_back(&mut self) -> Option<T> {
1670 if self.end == self.ptr {
1673 if mem::size_of::<T>() == 0 {
1674 // See above for why 'ptr.offset' isn't used
1675 self.end = mem::transmute(self.end as usize - 1);
1677 // Use a non-null pointer value
1678 Some(ptr::read(EMPTY as *mut T))
1680 self.end = self.end.offset(-1);
1682 Some(ptr::read(mem::transmute(self.end)))
1689 #[stable(feature = "rust1", since = "1.0.0")]
1690 impl<T> ExactSizeIterator for IntoIter<T> {}
1692 #[unsafe_destructor]
1693 #[stable(feature = "rust1", since = "1.0.0")]
1694 impl<T> Drop for IntoIter<T> {
1695 fn drop(&mut self) {
1696 // destroy the remaining elements
1698 for _x in self.by_ref() {}
1700 dealloc(self.allocation, self.cap);
1706 /// An iterator that drains a vector.
1707 #[unsafe_no_drop_flag]
1708 #[unstable(feature = "collections",
1709 reason = "recently added as part of collections reform 2")]
1710 pub struct Drain<'a, T> {
1713 marker: ContravariantLifetime<'a>,
1716 #[stable(feature = "rust1", since = "1.0.0")]
1717 impl<'a, T> Iterator for Drain<'a, T> {
1721 fn next(&mut self) -> Option<T> {
1723 if self.ptr == self.end {
1726 if mem::size_of::<T>() == 0 {
1727 // purposefully don't use 'ptr.offset' because for
1728 // vectors with 0-size elements this would return the
1730 self.ptr = mem::transmute(self.ptr as usize + 1);
1732 // Use a non-null pointer value
1733 Some(ptr::read(EMPTY as *mut T))
1736 self.ptr = self.ptr.offset(1);
1738 Some(ptr::read(old))
1745 fn size_hint(&self) -> (usize, Option<usize>) {
1746 let diff = (self.end as usize) - (self.ptr as usize);
1747 let size = mem::size_of::<T>();
1748 let exact = diff / (if size == 0 {1} else {size});
1749 (exact, Some(exact))
1753 #[stable(feature = "rust1", since = "1.0.0")]
1754 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1756 fn next_back(&mut self) -> Option<T> {
1758 if self.end == self.ptr {
1761 if mem::size_of::<T>() == 0 {
1762 // See above for why 'ptr.offset' isn't used
1763 self.end = mem::transmute(self.end as usize - 1);
1765 // Use a non-null pointer value
1766 Some(ptr::read(EMPTY as *mut T))
1768 self.end = self.end.offset(-1);
1770 Some(ptr::read(self.end))
1777 #[stable(feature = "rust1", since = "1.0.0")]
1778 impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1780 #[unsafe_destructor]
1781 #[stable(feature = "rust1", since = "1.0.0")]
1782 impl<'a, T> Drop for Drain<'a, T> {
1783 fn drop(&mut self) {
1784 // self.ptr == self.end == null if drop has already been called,
1785 // so we can use #[unsafe_no_drop_flag].
1787 // destroy the remaining elements
1788 for _x in self.by_ref() {}
1792 ////////////////////////////////////////////////////////////////////////////////
1793 // Conversion from &[T] to &Vec<T>
1794 ////////////////////////////////////////////////////////////////////////////////
1796 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1797 #[unstable(feature = "collections")]
1798 pub struct DerefVec<'a, T> {
1800 l: ContravariantLifetime<'a>
1803 #[unstable(feature = "collections")]
1804 impl<'a, T> Deref for DerefVec<'a, T> {
1805 type Target = Vec<T>;
1807 fn deref<'b>(&'b self) -> &'b Vec<T> {
1812 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1813 #[unsafe_destructor]
1814 #[stable(feature = "rust1", since = "1.0.0")]
1815 impl<'a, T> Drop for DerefVec<'a, T> {
1816 fn drop(&mut self) {
1822 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1823 #[unstable(feature = "collections")]
1824 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1827 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1828 l: ContravariantLifetime::<'a>
1833 ////////////////////////////////////////////////////////////////////////////////
1834 // Partial vec, used for map_in_place
1835 ////////////////////////////////////////////////////////////////////////////////
1837 /// An owned, partially type-converted vector of elements with non-zero size.
1839 /// `T` and `U` must have the same, non-zero size. They must also have the same
1842 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1843 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1844 /// destructed. Additionally the underlying storage of `vec` will be freed.
1845 struct PartialVecNonZeroSized<T,U> {
1854 /// An owned, partially type-converted vector of zero-sized elements.
1856 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1858 struct PartialVecZeroSized<T,U> {
1861 marker_t: InvariantType<T>,
1862 marker_u: InvariantType<U>,
1865 #[unsafe_destructor]
1866 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1867 fn drop(&mut self) {
1869 // `vec` hasn't been modified until now. As it has a length
1870 // currently, this would run destructors of `T`s which might not be
1871 // there. So at first, set `vec`s length to `0`. This must be done
1872 // at first to remain memory-safe as the destructors of `U` or `T`
1873 // might cause unwinding where `vec`s destructor would be executed.
1874 self.vec.set_len(0);
1876 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1877 while self.start_u != self.end_u {
1878 let _ = ptr::read(self.start_u); // Run a `U` destructor.
1879 self.start_u = self.start_u.offset(1);
1881 while self.start_t != self.end_t {
1882 let _ = ptr::read(self.start_t); // Run a `T` destructor.
1883 self.start_t = self.start_t.offset(1);
1885 // After this destructor ran, the destructor of `vec` will run,
1886 // deallocating the underlying memory.
1891 #[unsafe_destructor]
1892 impl<T,U> Drop for PartialVecZeroSized<T,U> {
1893 fn drop(&mut self) {
1895 // Destruct the instances of `T` and `U` this struct owns.
1896 while self.num_t != 0 {
1897 let _: T = mem::uninitialized(); // Run a `T` destructor.
1900 while self.num_u != 0 {
1901 let _: U = mem::uninitialized(); // Run a `U` destructor.
1911 use core::mem::size_of;
1912 use core::iter::repeat;
1916 struct DropCounter<'a> {
1920 #[unsafe_destructor]
1921 impl<'a> Drop for DropCounter<'a> {
1922 fn drop(&mut self) {
1929 let xs = [1u8, 2u8, 3u8];
1930 assert_eq!(&**as_vec(&xs), xs);
1934 fn test_as_vec_dtor() {
1935 let (mut count_x, mut count_y) = (0, 0);
1937 let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }];
1938 assert_eq!(as_vec(xs).len(), 2);
1940 assert_eq!(count_x, 1);
1941 assert_eq!(count_y, 1);
1945 fn test_small_vec_struct() {
1946 assert!(size_of::<Vec<u8>>() == size_of::<usize>() * 3);
1950 fn test_double_drop() {
1956 let (mut count_x, mut count_y) = (0, 0);
1958 let mut tv = TwoVec {
1962 tv.x.push(DropCounter {count: &mut count_x});
1963 tv.y.push(DropCounter {count: &mut count_y});
1965 // If Vec had a drop flag, here is where it would be zeroed.
1966 // Instead, it should rely on its internal state to prevent
1967 // doing anything significant when dropped multiple times.
1970 // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
1973 assert_eq!(count_x, 1);
1974 assert_eq!(count_y, 1);
1979 let mut v = Vec::new();
1980 assert_eq!(v.capacity(), 0);
1983 assert!(v.capacity() >= 2);
1989 assert!(v.capacity() >= 16);
1991 assert!(v.capacity() >= 32);
1996 assert!(v.capacity() >= 33)
2001 let mut v = Vec::new();
2002 let mut w = Vec::new();
2005 for i in 0..3 { w.push(i) }
2010 for i in 3..10 { w.push(i) }
2016 fn test_slice_from_mut() {
2017 let mut values = vec![1, 2, 3, 4, 5];
2019 let slice = &mut values[2 ..];
2020 assert!(slice == [3, 4, 5]);
2026 assert!(values == [1, 2, 5, 6, 7]);
2030 fn test_slice_to_mut() {
2031 let mut values = vec![1, 2, 3, 4, 5];
2033 let slice = &mut values[.. 2];
2034 assert!(slice == [1, 2]);
2040 assert!(values == [2, 3, 3, 4, 5]);
2044 fn test_split_at_mut() {
2045 let mut values = vec![1, 2, 3, 4, 5];
2047 let (left, right) = values.split_at_mut(2);
2049 let left: &[_] = left;
2050 assert!(&left[..left.len()] == &[1, 2][]);
2057 let right: &[_] = right;
2058 assert!(&right[..right.len()] == &[3, 4, 5][]);
2065 assert!(values == vec![2, 3, 5, 6, 7]);
2070 let v: Vec<i32> = vec![];
2071 let w = vec!(1, 2, 3);
2073 assert_eq!(v, v.clone());
2077 // they should be disjoint in memory.
2078 assert!(w.as_ptr() != z.as_ptr())
2082 fn test_clone_from() {
2084 let three = vec!(box 1, box 2, box 3);
2085 let two = vec!(box 4, box 5);
2087 v.clone_from(&three);
2088 assert_eq!(v, three);
2091 v.clone_from(&three);
2092 assert_eq!(v, three);
2099 v.clone_from(&three);
2100 assert_eq!(v, three)
2105 let mut vec = vec![1, 2, 3, 4];
2106 vec.retain(|&x| x % 2 == 0);
2107 assert!(vec == vec![2, 4]);
2111 fn zero_sized_values() {
2112 let mut v = Vec::new();
2113 assert_eq!(v.len(), 0);
2115 assert_eq!(v.len(), 1);
2117 assert_eq!(v.len(), 2);
2118 assert_eq!(v.pop(), Some(()));
2119 assert_eq!(v.pop(), Some(()));
2120 assert_eq!(v.pop(), None);
2122 assert_eq!(v.iter().count(), 0);
2124 assert_eq!(v.iter().count(), 1);
2126 assert_eq!(v.iter().count(), 2);
2130 assert_eq!(v.iter_mut().count(), 2);
2132 assert_eq!(v.iter_mut().count(), 3);
2134 assert_eq!(v.iter_mut().count(), 4);
2136 for &mut () in &mut v {}
2137 unsafe { v.set_len(0); }
2138 assert_eq!(v.iter_mut().count(), 0);
2142 fn test_partition() {
2143 assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
2144 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
2145 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
2146 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
2150 fn test_zip_unzip() {
2151 let z1 = vec![(1, 4), (2, 5), (3, 6)];
2153 let (left, right): (Vec<_>, Vec<_>) = z1.iter().map(|&x| x).unzip();
2155 assert_eq!((1, 4), (left[0], right[0]));
2156 assert_eq!((2, 5), (left[1], right[1]));
2157 assert_eq!((3, 6), (left[2], right[2]));
2161 fn test_unsafe_ptrs() {
2163 // Test on-stack copy-from-buf.
2165 let ptr = a.as_ptr();
2166 let b = Vec::from_raw_buf(ptr, 3);
2167 assert_eq!(b, vec![1, 2, 3]);
2169 // Test on-heap copy-from-buf.
2170 let c = vec![1, 2, 3, 4, 5];
2171 let ptr = c.as_ptr();
2172 let d = Vec::from_raw_buf(ptr, 5);
2173 assert_eq!(d, vec![1, 2, 3, 4, 5]);
2178 fn test_vec_truncate_drop() {
2179 static mut drops: u32 = 0;
2181 impl Drop for Elem {
2182 fn drop(&mut self) {
2183 unsafe { drops += 1; }
2187 let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
2188 assert_eq!(unsafe { drops }, 0);
2190 assert_eq!(unsafe { drops }, 2);
2192 assert_eq!(unsafe { drops }, 5);
2197 fn test_vec_truncate_fail() {
2198 struct BadElem(i32);
2199 impl Drop for BadElem {
2200 fn drop(&mut self) {
2201 let BadElem(ref mut x) = *self;
2202 if *x == 0xbadbeef {
2203 panic!("BadElem panic: 0xbadbeef")
2208 let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
2214 let vec = vec![1, 2, 3];
2215 assert!(vec[1] == 2);
2220 fn test_index_out_of_bounds() {
2221 let vec = vec![1, 2, 3];
2227 fn test_slice_out_of_bounds_1() {
2228 let x = vec![1, 2, 3, 4, 5];
2234 fn test_slice_out_of_bounds_2() {
2235 let x = vec![1, 2, 3, 4, 5];
2241 fn test_slice_out_of_bounds_3() {
2242 let x = vec![1, 2, 3, 4, 5];
2248 fn test_slice_out_of_bounds_4() {
2249 let x = vec![1, 2, 3, 4, 5];
2255 fn test_slice_out_of_bounds_5() {
2256 let x = vec![1, 2, 3, 4, 5];
2262 fn test_swap_remove_empty() {
2263 let mut vec= Vec::<i32>::new();
2268 fn test_move_iter_unwrap() {
2269 let mut vec = Vec::with_capacity(7);
2272 let ptr = vec.as_ptr();
2273 vec = vec.into_iter().into_inner();
2274 assert_eq!(vec.as_ptr(), ptr);
2275 assert_eq!(vec.capacity(), 7);
2276 assert_eq!(vec.len(), 0);
2281 fn test_map_in_place_incompatible_types_fail() {
2282 let v = vec![0, 1, 2];
2283 v.map_in_place(|_| ());
2287 fn test_map_in_place() {
2288 let v = vec![0, 1, 2];
2289 assert_eq!(v.map_in_place(|i: u32| i as i32 - 1), [-1, 0, 1]);
2293 fn test_map_in_place_zero_sized() {
2294 let v = vec![(), ()];
2295 #[derive(PartialEq, Debug)]
2297 assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
2301 fn test_map_in_place_zero_drop_count() {
2302 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
2304 #[derive(Clone, PartialEq, Debug)]
2306 impl Drop for Nothing { fn drop(&mut self) { } }
2308 #[derive(Clone, PartialEq, Debug)]
2310 impl Drop for ZeroSized {
2311 fn drop(&mut self) {
2312 DROP_COUNTER.fetch_add(1, Ordering::Relaxed);
2315 const NUM_ELEMENTS: usize = 2;
2316 static DROP_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
2318 let v = repeat(Nothing).take(NUM_ELEMENTS).collect::<Vec<_>>();
2320 DROP_COUNTER.store(0, Ordering::Relaxed);
2322 let v = v.map_in_place(|_| ZeroSized);
2323 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), 0);
2325 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), NUM_ELEMENTS);
2329 fn test_move_items() {
2330 let vec = vec![1, 2, 3];
2331 let mut vec2 = vec![];
2335 assert!(vec2 == vec![1, 2, 3]);
2339 fn test_move_items_reverse() {
2340 let vec = vec![1, 2, 3];
2341 let mut vec2 = vec![];
2342 for i in vec.into_iter().rev() {
2345 assert!(vec2 == vec![3, 2, 1]);
2349 fn test_move_items_zero_sized() {
2350 let vec = vec![(), (), ()];
2351 let mut vec2 = vec![];
2355 assert!(vec2 == vec![(), (), ()]);
2359 fn test_drain_items() {
2360 let mut vec = vec![1, 2, 3];
2361 let mut vec2 = vec![];
2362 for i in vec.drain() {
2365 assert_eq!(vec, []);
2366 assert_eq!(vec2, [ 1, 2, 3 ]);
2370 fn test_drain_items_reverse() {
2371 let mut vec = vec![1, 2, 3];
2372 let mut vec2 = vec![];
2373 for i in vec.drain().rev() {
2376 assert_eq!(vec, []);
2377 assert_eq!(vec2, [3, 2, 1]);
2381 fn test_drain_items_zero_sized() {
2382 let mut vec = vec![(), (), ()];
2383 let mut vec2 = vec![];
2384 for i in vec.drain() {
2387 assert_eq!(vec, []);
2388 assert_eq!(vec2, [(), (), ()]);
2392 fn test_into_boxed_slice() {
2393 let xs = vec![1, 2, 3];
2394 let ys = xs.into_boxed_slice();
2395 assert_eq!(ys, [1, 2, 3]);
2400 let mut vec = vec![1, 2, 3];
2401 let mut vec2 = vec![4, 5, 6];
2402 vec.append(&mut vec2);
2403 assert_eq!(vec, vec![1, 2, 3, 4, 5, 6]);
2404 assert_eq!(vec2, vec![]);
2408 fn test_split_off() {
2409 let mut vec = vec![1, 2, 3, 4, 5, 6];
2410 let vec2 = vec.split_off(4);
2411 assert_eq!(vec, vec![1, 2, 3, 4]);
2412 assert_eq!(vec2, vec![5, 6]);
2416 fn bench_new(b: &mut Bencher) {
2418 let v: Vec<u32> = Vec::new();
2419 assert_eq!(v.len(), 0);
2420 assert_eq!(v.capacity(), 0);
2424 fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) {
2425 b.bytes = src_len as u64;
2428 let v: Vec<u32> = Vec::with_capacity(src_len);
2429 assert_eq!(v.len(), 0);
2430 assert_eq!(v.capacity(), src_len);
2435 fn bench_with_capacity_0000(b: &mut Bencher) {
2436 do_bench_with_capacity(b, 0)
2440 fn bench_with_capacity_0010(b: &mut Bencher) {
2441 do_bench_with_capacity(b, 10)
2445 fn bench_with_capacity_0100(b: &mut Bencher) {
2446 do_bench_with_capacity(b, 100)
2450 fn bench_with_capacity_1000(b: &mut Bencher) {
2451 do_bench_with_capacity(b, 1000)
2454 fn do_bench_from_fn(b: &mut Bencher, src_len: usize) {
2455 b.bytes = src_len as u64;
2458 let dst = (0..src_len).collect::<Vec<_>>();
2459 assert_eq!(dst.len(), src_len);
2460 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2465 fn bench_from_fn_0000(b: &mut Bencher) {
2466 do_bench_from_fn(b, 0)
2470 fn bench_from_fn_0010(b: &mut Bencher) {
2471 do_bench_from_fn(b, 10)
2475 fn bench_from_fn_0100(b: &mut Bencher) {
2476 do_bench_from_fn(b, 100)
2480 fn bench_from_fn_1000(b: &mut Bencher) {
2481 do_bench_from_fn(b, 1000)
2484 fn do_bench_from_elem(b: &mut Bencher, src_len: usize) {
2485 b.bytes = src_len as u64;
2488 let dst: Vec<usize> = repeat(5).take(src_len).collect();
2489 assert_eq!(dst.len(), src_len);
2490 assert!(dst.iter().all(|x| *x == 5));
2495 fn bench_from_elem_0000(b: &mut Bencher) {
2496 do_bench_from_elem(b, 0)
2500 fn bench_from_elem_0010(b: &mut Bencher) {
2501 do_bench_from_elem(b, 10)
2505 fn bench_from_elem_0100(b: &mut Bencher) {
2506 do_bench_from_elem(b, 100)
2510 fn bench_from_elem_1000(b: &mut Bencher) {
2511 do_bench_from_elem(b, 1000)
2514 fn do_bench_from_slice(b: &mut Bencher, src_len: usize) {
2515 let src: Vec<_> = FromIterator::from_iter(0..src_len);
2517 b.bytes = src_len as u64;
2520 let dst = src.clone()[].to_vec();
2521 assert_eq!(dst.len(), src_len);
2522 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2527 fn bench_from_slice_0000(b: &mut Bencher) {
2528 do_bench_from_slice(b, 0)
2532 fn bench_from_slice_0010(b: &mut Bencher) {
2533 do_bench_from_slice(b, 10)
2537 fn bench_from_slice_0100(b: &mut Bencher) {
2538 do_bench_from_slice(b, 100)
2542 fn bench_from_slice_1000(b: &mut Bencher) {
2543 do_bench_from_slice(b, 1000)
2546 fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
2547 let src: Vec<_> = FromIterator::from_iter(0..src_len);
2549 b.bytes = src_len as u64;
2552 let dst: Vec<_> = FromIterator::from_iter(src.clone().into_iter());
2553 assert_eq!(dst.len(), src_len);
2554 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2559 fn bench_from_iter_0000(b: &mut Bencher) {
2560 do_bench_from_iter(b, 0)
2564 fn bench_from_iter_0010(b: &mut Bencher) {
2565 do_bench_from_iter(b, 10)
2569 fn bench_from_iter_0100(b: &mut Bencher) {
2570 do_bench_from_iter(b, 100)
2574 fn bench_from_iter_1000(b: &mut Bencher) {
2575 do_bench_from_iter(b, 1000)
2578 fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
2579 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2580 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2582 b.bytes = src_len as u64;
2585 let mut dst = dst.clone();
2586 dst.extend(src.clone().into_iter());
2587 assert_eq!(dst.len(), dst_len + src_len);
2588 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2593 fn bench_extend_0000_0000(b: &mut Bencher) {
2594 do_bench_extend(b, 0, 0)
2598 fn bench_extend_0000_0010(b: &mut Bencher) {
2599 do_bench_extend(b, 0, 10)
2603 fn bench_extend_0000_0100(b: &mut Bencher) {
2604 do_bench_extend(b, 0, 100)
2608 fn bench_extend_0000_1000(b: &mut Bencher) {
2609 do_bench_extend(b, 0, 1000)
2613 fn bench_extend_0010_0010(b: &mut Bencher) {
2614 do_bench_extend(b, 10, 10)
2618 fn bench_extend_0100_0100(b: &mut Bencher) {
2619 do_bench_extend(b, 100, 100)
2623 fn bench_extend_1000_1000(b: &mut Bencher) {
2624 do_bench_extend(b, 1000, 1000)
2627 fn do_bench_push_all(b: &mut Bencher, dst_len: usize, src_len: usize) {
2628 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2629 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2631 b.bytes = src_len as u64;
2634 let mut dst = dst.clone();
2636 assert_eq!(dst.len(), dst_len + src_len);
2637 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2642 fn bench_push_all_0000_0000(b: &mut Bencher) {
2643 do_bench_push_all(b, 0, 0)
2647 fn bench_push_all_0000_0010(b: &mut Bencher) {
2648 do_bench_push_all(b, 0, 10)
2652 fn bench_push_all_0000_0100(b: &mut Bencher) {
2653 do_bench_push_all(b, 0, 100)
2657 fn bench_push_all_0000_1000(b: &mut Bencher) {
2658 do_bench_push_all(b, 0, 1000)
2662 fn bench_push_all_0010_0010(b: &mut Bencher) {
2663 do_bench_push_all(b, 10, 10)
2667 fn bench_push_all_0100_0100(b: &mut Bencher) {
2668 do_bench_push_all(b, 100, 100)
2672 fn bench_push_all_1000_1000(b: &mut Bencher) {
2673 do_bench_push_all(b, 1000, 1000)
2676 fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) {
2677 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2678 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2680 b.bytes = src_len as u64;
2683 let mut dst = dst.clone();
2684 dst.extend(src.clone().into_iter());
2685 assert_eq!(dst.len(), dst_len + src_len);
2686 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2691 fn bench_push_all_move_0000_0000(b: &mut Bencher) {
2692 do_bench_push_all_move(b, 0, 0)
2696 fn bench_push_all_move_0000_0010(b: &mut Bencher) {
2697 do_bench_push_all_move(b, 0, 10)
2701 fn bench_push_all_move_0000_0100(b: &mut Bencher) {
2702 do_bench_push_all_move(b, 0, 100)
2706 fn bench_push_all_move_0000_1000(b: &mut Bencher) {
2707 do_bench_push_all_move(b, 0, 1000)
2711 fn bench_push_all_move_0010_0010(b: &mut Bencher) {
2712 do_bench_push_all_move(b, 10, 10)
2716 fn bench_push_all_move_0100_0100(b: &mut Bencher) {
2717 do_bench_push_all_move(b, 100, 100)
2721 fn bench_push_all_move_1000_1000(b: &mut Bencher) {
2722 do_bench_push_all_move(b, 1000, 1000)
2725 fn do_bench_clone(b: &mut Bencher, src_len: usize) {
2726 let src: Vec<usize> = FromIterator::from_iter(0..src_len);
2728 b.bytes = src_len as u64;
2731 let dst = src.clone();
2732 assert_eq!(dst.len(), src_len);
2733 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2738 fn bench_clone_0000(b: &mut Bencher) {
2739 do_bench_clone(b, 0)
2743 fn bench_clone_0010(b: &mut Bencher) {
2744 do_bench_clone(b, 10)
2748 fn bench_clone_0100(b: &mut Bencher) {
2749 do_bench_clone(b, 100)
2753 fn bench_clone_1000(b: &mut Bencher) {
2754 do_bench_clone(b, 1000)
2757 fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) {
2758 let dst: Vec<_> = FromIterator::from_iter(0..src_len);
2759 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2761 b.bytes = (times * src_len) as u64;
2764 let mut dst = dst.clone();
2767 dst.clone_from(&src);
2769 assert_eq!(dst.len(), src_len);
2770 assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
2776 fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
2777 do_bench_clone_from(b, 1, 0, 0)
2781 fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
2782 do_bench_clone_from(b, 1, 0, 10)
2786 fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
2787 do_bench_clone_from(b, 1, 0, 100)
2791 fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
2792 do_bench_clone_from(b, 1, 0, 1000)
2796 fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
2797 do_bench_clone_from(b, 1, 10, 10)
2801 fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
2802 do_bench_clone_from(b, 1, 100, 100)
2806 fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
2807 do_bench_clone_from(b, 1, 1000, 1000)
2811 fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
2812 do_bench_clone_from(b, 1, 10, 100)
2816 fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
2817 do_bench_clone_from(b, 1, 100, 1000)
2821 fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
2822 do_bench_clone_from(b, 1, 10, 0)
2826 fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
2827 do_bench_clone_from(b, 1, 100, 10)
2831 fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
2832 do_bench_clone_from(b, 1, 1000, 100)
2836 fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
2837 do_bench_clone_from(b, 10, 0, 0)
2841 fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
2842 do_bench_clone_from(b, 10, 0, 10)
2846 fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
2847 do_bench_clone_from(b, 10, 0, 100)
2851 fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
2852 do_bench_clone_from(b, 10, 0, 1000)
2856 fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
2857 do_bench_clone_from(b, 10, 10, 10)
2861 fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
2862 do_bench_clone_from(b, 10, 100, 100)
2866 fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
2867 do_bench_clone_from(b, 10, 1000, 1000)
2871 fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
2872 do_bench_clone_from(b, 10, 10, 100)
2876 fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
2877 do_bench_clone_from(b, 10, 100, 1000)
2881 fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
2882 do_bench_clone_from(b, 10, 10, 0)
2886 fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
2887 do_bench_clone_from(b, 10, 100, 10)
2891 fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
2892 do_bench_clone_from(b, 10, 1000, 100)