1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type with heap-allocated contents, written `Vec<T>` but pronounced 'vector.'
13 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
17 //! Explicitly creating a `Vec<T>` with `new()`:
20 //! let xs: Vec<i32> = Vec::new();
23 //! Using the `vec!` macro:
26 //! let ys: Vec<i32> = vec![];
28 //! let zs = vec![1i32, 2, 3, 4, 5];
34 //! let mut xs = vec![1i32, 2];
42 //! let mut xs = vec![1i32, 2];
44 //! let two = xs.pop();
47 #![stable(feature = "rust1", since = "1.0.0")]
51 use alloc::boxed::Box;
52 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
54 use core::cmp::{Ordering};
55 use core::default::Default;
57 use core::hash::{self, Hash};
58 use core::intrinsics::assume;
59 use core::iter::{repeat, FromIterator, IntoIterator};
60 use core::marker::PhantomData;
62 use core::num::{Int, UnsignedInt};
63 use core::ops::{Index, IndexMut, Deref, Add};
66 use core::ptr::Unique;
67 use core::raw::Slice as RawSlice;
71 use borrow::{Cow, IntoCow};
73 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
78 /// let mut vec = Vec::new();
82 /// assert_eq!(vec.len(), 2);
83 /// assert_eq!(vec[0], 1);
85 /// assert_eq!(vec.pop(), Some(2));
86 /// assert_eq!(vec.len(), 1);
89 /// assert_eq!(vec[0], 7);
91 /// vec.push_all(&[1, 2, 3]);
93 /// for x in vec.iter() {
94 /// println!("{}", x);
96 /// assert_eq!(vec, vec![7, 1, 2, 3]);
99 /// The `vec!` macro is provided to make initialization more convenient:
102 /// let mut vec = vec![1, 2, 3];
104 /// assert_eq!(vec, vec![1, 2, 3, 4]);
107 /// Use a `Vec<T>` as an efficient stack:
110 /// let mut stack = Vec::new();
117 /// let top = match stack.pop() {
118 /// None => break, // empty
121 /// // Prints 3, 2, 1
122 /// println!("{}", top);
126 /// # Capacity and reallocation
128 /// The capacity of a vector is the amount of space allocated for any future elements that will be
129 /// added onto the vector. This is not to be confused with the *length* of a vector, which
130 /// specifies the number of actual elements within the vector. If a vector's length exceeds its
131 /// capacity, its capacity will automatically be increased, but its elements will have to be
134 /// For example, a vector with capacity 10 and length 0 would be an empty vector with space for 10
135 /// more elements. Pushing 10 or fewer elements onto the vector will not change its capacity or
136 /// cause reallocation to occur. However, if the vector's length is increased to 11, it will have
137 /// to reallocate, which can be slow. For this reason, it is recommended to use
138 /// `Vec::with_capacity` whenever possible to specify how big the vector is expected to get.
139 #[unsafe_no_drop_flag]
140 #[stable(feature = "rust1", since = "1.0.0")]
147 unsafe impl<T: Send> Send for Vec<T> { }
148 unsafe impl<T: Sync> Sync for Vec<T> { }
150 ////////////////////////////////////////////////////////////////////////////////
152 ////////////////////////////////////////////////////////////////////////////////
155 /// Constructs a new, empty `Vec<T>`.
157 /// The vector will not allocate until elements are pushed onto it.
162 /// let mut vec: Vec<i32> = Vec::new();
165 #[stable(feature = "rust1", since = "1.0.0")]
166 pub fn new() -> Vec<T> {
167 // We want ptr to never be NULL so instead we set it to some arbitrary
168 // non-null value which is fine since we never call deallocate on the ptr
169 // if cap is 0. The reason for this is because the pointer of a slice
170 // being NULL would break the null pointer optimization for enums.
171 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
174 /// Constructs a new, empty `Vec<T>` with the specified capacity.
176 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
177 /// `capacity` is 0, the vector will not allocate.
179 /// It is important to note that this function does not specify the *length* of the returned
180 /// vector, but only the *capacity*. (For an explanation of the difference between length and
181 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
186 /// let mut vec: Vec<_> = Vec::with_capacity(10);
188 /// // The vector contains no items, even though it has capacity for more
189 /// assert_eq!(vec.len(), 0);
191 /// // These are all done without reallocating...
196 /// // ...but this may make the vector reallocate
200 #[stable(feature = "rust1", since = "1.0.0")]
201 pub fn with_capacity(capacity: usize) -> Vec<T> {
202 if mem::size_of::<T>() == 0 {
203 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
204 } else if capacity == 0 {
207 let size = capacity.checked_mul(mem::size_of::<T>())
208 .expect("capacity overflow");
209 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
210 if ptr.is_null() { ::alloc::oom() }
211 unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
215 /// Creates a `Vec<T>` directly from the raw components of another vector.
217 /// This is highly unsafe, due to the number of invariants that aren't checked.
226 /// let mut v = vec![1, 2, 3];
228 /// // Pull out the various important pieces of information about `v`
229 /// let p = v.as_mut_ptr();
230 /// let len = v.len();
231 /// let cap = v.capacity();
234 /// // Cast `v` into the void: no destructor run, so we are in
235 /// // complete control of the allocation to which `p` points.
238 /// // Overwrite memory with 4, 5, 6
239 /// for i in 0..len as isize {
240 /// ptr::write(p.offset(i), 4 + i);
243 /// // Put everything back together into a Vec
244 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
245 /// assert_eq!(rebuilt, vec![4, 5, 6]);
249 #[stable(feature = "rust1", since = "1.0.0")]
250 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
251 capacity: usize) -> Vec<T> {
253 ptr: Unique::new(ptr),
259 /// Creates a vector by copying the elements from a raw pointer.
261 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
262 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
263 /// without cloning, as if `ptr::read()` were called on them.
265 #[unstable(feature = "collections",
266 reason = "may be better expressed via composition")]
267 pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
268 let mut dst = Vec::with_capacity(elts);
270 ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
274 /// Returns the number of elements the vector can hold without
280 /// let vec: Vec<i32> = Vec::with_capacity(10);
281 /// assert_eq!(vec.capacity(), 10);
284 #[stable(feature = "rust1", since = "1.0.0")]
285 pub fn capacity(&self) -> usize {
289 /// Reserves capacity for at least `additional` more elements to be inserted in the given
290 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
294 /// Panics if the new capacity overflows `usize`.
299 /// let mut vec = vec![1];
301 /// assert!(vec.capacity() >= 11);
303 #[stable(feature = "rust1", since = "1.0.0")]
304 pub fn reserve(&mut self, additional: usize) {
305 if self.cap - self.len < additional {
306 let err_msg = "Vec::reserve: `usize` overflow";
307 let new_cap = self.len.checked_add(additional).expect(err_msg)
308 .checked_next_power_of_two().expect(err_msg);
309 self.grow_capacity(new_cap);
313 /// Reserves the minimum capacity for exactly `additional` more elements to
314 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
317 /// Note that the allocator may give the collection more space than it
318 /// requests. Therefore capacity can not be relied upon to be precisely
319 /// minimal. Prefer `reserve` if future insertions are expected.
323 /// Panics if the new capacity overflows `usize`.
328 /// let mut vec = vec![1];
329 /// vec.reserve_exact(10);
330 /// assert!(vec.capacity() >= 11);
332 #[stable(feature = "rust1", since = "1.0.0")]
333 pub fn reserve_exact(&mut self, additional: usize) {
334 if self.cap - self.len < additional {
335 match self.len.checked_add(additional) {
336 None => panic!("Vec::reserve: `usize` overflow"),
337 Some(new_cap) => self.grow_capacity(new_cap)
342 /// Shrinks the capacity of the vector as much as possible.
344 /// It will drop down as close as possible to the length but the allocator
345 /// may still inform the vector that there is space for a few more elements.
350 /// let mut vec = Vec::with_capacity(10);
351 /// vec.push_all(&[1, 2, 3]);
352 /// assert_eq!(vec.capacity(), 10);
353 /// vec.shrink_to_fit();
354 /// assert!(vec.capacity() >= 3);
356 #[stable(feature = "rust1", since = "1.0.0")]
357 pub fn shrink_to_fit(&mut self) {
358 if mem::size_of::<T>() == 0 { return }
363 dealloc(*self.ptr, self.cap)
367 } else if self.cap != self.len {
369 // Overflow check is unnecessary as the vector is already at
371 let ptr = reallocate(*self.ptr as *mut u8,
372 self.cap * mem::size_of::<T>(),
373 self.len * mem::size_of::<T>(),
374 mem::min_align_of::<T>()) as *mut T;
375 if ptr.is_null() { ::alloc::oom() }
376 self.ptr = Unique::new(ptr);
382 /// Convert the vector into Box<[T]>.
384 /// Note that this will drop any excess capacity. Calling this and
385 /// converting back to a vector with `into_vec()` is equivalent to calling
386 /// `shrink_to_fit()`.
387 #[unstable(feature = "collections")]
388 pub fn into_boxed_slice(mut self) -> Box<[T]> {
389 self.shrink_to_fit();
391 let xs: Box<[T]> = mem::transmute(&mut *self);
397 /// Shorten a vector, dropping excess elements.
399 /// If `len` is greater than the vector's current length, this has no
405 /// let mut vec = vec![1, 2, 3, 4];
407 /// assert_eq!(vec, vec![1, 2]);
409 #[stable(feature = "rust1", since = "1.0.0")]
410 pub fn truncate(&mut self, len: usize) {
412 // drop any extra elements
413 while len < self.len {
414 // decrement len before the read(), so a panic on Drop doesn't
415 // re-drop the just-failed value.
417 ptr::read(self.get_unchecked(self.len));
422 /// Returns a mutable slice of the elements of `self`.
427 /// fn foo(slice: &mut [i32]) {}
429 /// let mut vec = vec![1, 2];
430 /// foo(vec.as_mut_slice());
433 #[stable(feature = "rust1", since = "1.0.0")]
434 pub fn as_mut_slice(&mut self) -> &mut [T] {
436 mem::transmute(RawSlice {
443 /// Creates a consuming iterator, that is, one that moves each value out of
444 /// the vector (from start to end). The vector cannot be used after calling
450 /// let v = vec!["a".to_string(), "b".to_string()];
451 /// for s in v.into_iter() {
452 /// // s has type String, not &String
453 /// println!("{}", s);
457 #[stable(feature = "rust1", since = "1.0.0")]
458 pub fn into_iter(self) -> IntoIter<T> {
462 let begin = ptr as *const T;
463 let end = if mem::size_of::<T>() == 0 {
464 (ptr as usize + self.len()) as *const T
466 ptr.offset(self.len() as isize) as *const T
469 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
473 /// Sets the length of a vector.
475 /// This will explicitly set the size of the vector, without actually
476 /// modifying its buffers, so it is up to the caller to ensure that the
477 /// vector is actually the specified size.
482 /// let mut v = vec![1, 2, 3, 4];
488 #[stable(feature = "rust1", since = "1.0.0")]
489 pub unsafe fn set_len(&mut self, len: usize) {
493 /// Removes an element from anywhere in the vector and return it, replacing
494 /// it with the last element.
496 /// This does not preserve ordering, but is O(1).
500 /// Panics if `index` is out of bounds.
505 /// let mut v = vec!["foo", "bar", "baz", "qux"];
507 /// assert_eq!(v.swap_remove(1), "bar");
508 /// assert_eq!(v, vec!["foo", "qux", "baz"]);
510 /// assert_eq!(v.swap_remove(0), "foo");
511 /// assert_eq!(v, vec!["baz", "qux"]);
514 #[stable(feature = "rust1", since = "1.0.0")]
515 pub fn swap_remove(&mut self, index: usize) -> T {
516 let length = self.len();
517 self.swap(index, length - 1);
521 /// Inserts an element at position `index` within the vector, shifting all
522 /// elements after position `i` one position to the right.
526 /// Panics if `index` is not between `0` and the vector's length (both
527 /// bounds inclusive).
532 /// let mut vec = vec![1, 2, 3];
533 /// vec.insert(1, 4);
534 /// assert_eq!(vec, vec![1, 4, 2, 3]);
535 /// vec.insert(4, 5);
536 /// assert_eq!(vec, vec![1, 4, 2, 3, 5]);
538 #[stable(feature = "rust1", since = "1.0.0")]
539 pub fn insert(&mut self, index: usize, element: T) {
540 let len = self.len();
541 assert!(index <= len);
542 // space for the new element
545 unsafe { // infallible
546 // The spot to put the new value
548 let p = self.as_mut_ptr().offset(index as isize);
549 // Shift everything over to make space. (Duplicating the
550 // `index`th element into two consecutive places.)
551 ptr::copy_memory(p.offset(1), &*p, len - index);
552 // Write it in, overwriting the first copy of the `index`th
554 ptr::write(&mut *p, element);
556 self.set_len(len + 1);
560 /// Removes and returns the element at position `index` within the vector,
561 /// shifting all elements after position `index` one position to the left.
565 /// Panics if `i` is out of bounds.
570 /// let mut v = vec![1, 2, 3];
571 /// assert_eq!(v.remove(1), 2);
572 /// assert_eq!(v, vec![1, 3]);
574 #[stable(feature = "rust1", since = "1.0.0")]
575 pub fn remove(&mut self, index: usize) -> T {
576 let len = self.len();
577 assert!(index < len);
578 unsafe { // infallible
581 // the place we are taking from.
582 let ptr = self.as_mut_ptr().offset(index as isize);
583 // copy it out, unsafely having a copy of the value on
584 // the stack and in the vector at the same time.
585 ret = ptr::read(ptr);
587 // Shift everything down to fill in that spot.
588 ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
590 self.set_len(len - 1);
595 /// Retains only the elements specified by the predicate.
597 /// In other words, remove all elements `e` such that `f(&e)` returns false.
598 /// This method operates in place and preserves the order of the retained
604 /// let mut vec = vec![1, 2, 3, 4];
605 /// vec.retain(|&x| x%2 == 0);
606 /// assert_eq!(vec, vec![2, 4]);
608 #[stable(feature = "rust1", since = "1.0.0")]
609 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
610 let len = self.len();
624 self.truncate(len - del);
628 /// Appends an element to the back of a collection.
632 /// Panics if the number of elements in the vector overflows a `usize`.
637 /// let mut vec = vec!(1, 2);
639 /// assert_eq!(vec, vec!(1, 2, 3));
642 #[stable(feature = "rust1", since = "1.0.0")]
643 pub fn push(&mut self, value: T) {
644 if mem::size_of::<T>() == 0 {
645 // zero-size types consume no memory, so we can't rely on the
646 // address space running out
647 self.len = self.len.checked_add(1).expect("length overflow");
648 unsafe { mem::forget(value); }
651 if self.len == self.cap {
652 let old_size = self.cap * mem::size_of::<T>();
653 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
654 if old_size > size { panic!("capacity overflow") }
656 let ptr = alloc_or_realloc(*self.ptr, old_size, size);
657 if ptr.is_null() { ::alloc::oom() }
658 self.ptr = Unique::new(ptr);
660 self.cap = max(self.cap, 2) * 2;
664 let end = (*self.ptr).offset(self.len as isize);
665 ptr::write(&mut *end, value);
670 /// Removes the last element from a vector and returns it, or `None` if it is empty.
675 /// let mut vec = vec![1, 2, 3];
676 /// assert_eq!(vec.pop(), Some(3));
677 /// assert_eq!(vec, vec![1, 2]);
680 #[stable(feature = "rust1", since = "1.0.0")]
681 pub fn pop(&mut self) -> Option<T> {
687 Some(ptr::read(self.get_unchecked(self.len())))
692 /// Moves all the elements of `other` into `Self`, leaving `other` empty.
696 /// Panics if the number of elements in the vector overflows a `usize`.
701 /// let mut vec = vec![1, 2, 3];
702 /// let mut vec2 = vec![4, 5, 6];
703 /// vec.append(&mut vec2);
704 /// assert_eq!(vec, vec![1, 2, 3, 4, 5, 6]);
705 /// assert_eq!(vec2, vec![]);
708 #[unstable(feature = "collections",
709 reason = "new API, waiting for dust to settle")]
710 pub fn append(&mut self, other: &mut Self) {
711 if mem::size_of::<T>() == 0 {
712 // zero-size types consume no memory, so we can't rely on the
713 // address space running out
714 self.len = self.len.checked_add(other.len()).expect("length overflow");
715 unsafe { other.set_len(0) }
718 self.reserve(other.len());
719 let len = self.len();
721 ptr::copy_nonoverlapping_memory(
722 self.get_unchecked_mut(len),
727 self.len += other.len();
728 unsafe { other.set_len(0); }
731 /// Creates a draining iterator that clears the `Vec` and iterates over
732 /// the removed items from start to end.
737 /// let mut v = vec!["a".to_string(), "b".to_string()];
738 /// for s in v.drain() {
739 /// // s has type String, not &String
740 /// println!("{}", s);
742 /// assert!(v.is_empty());
745 #[unstable(feature = "collections",
746 reason = "matches collection reform specification, waiting for dust to settle")]
747 pub fn drain(&mut self) -> Drain<T> {
749 let begin = *self.ptr as *const T;
750 let end = if mem::size_of::<T>() == 0 {
751 (*self.ptr as usize + self.len()) as *const T
753 (*self.ptr).offset(self.len() as isize) as *const T
764 /// Clears the vector, removing all values.
769 /// let mut v = vec![1, 2, 3];
773 /// assert!(v.is_empty());
776 #[stable(feature = "rust1", since = "1.0.0")]
777 pub fn clear(&mut self) {
781 /// Returns the number of elements in the vector.
786 /// let a = vec![1, 2, 3];
787 /// assert_eq!(a.len(), 3);
790 #[stable(feature = "rust1", since = "1.0.0")]
791 pub fn len(&self) -> usize { self.len }
793 /// Returns `true` if the vector contains no elements.
798 /// let mut v = Vec::new();
799 /// assert!(v.is_empty());
802 /// assert!(!v.is_empty());
804 #[stable(feature = "rust1", since = "1.0.0")]
805 pub fn is_empty(&self) -> bool { self.len() == 0 }
807 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
808 /// size and in case they are not zero-sized the same minimal alignment.
812 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
813 /// have differing minimal alignments.
818 /// let v = vec![0, 1, 2];
819 /// let w = v.map_in_place(|i| i + 3);
820 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
822 /// #[derive(PartialEq, Debug)]
823 /// struct Newtype(u8);
824 /// let bytes = vec![0x11, 0x22];
825 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
826 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
828 #[unstable(feature = "collections",
829 reason = "API may change to provide stronger guarantees")]
830 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
831 // FIXME: Assert statically that the types `T` and `U` have the same
833 assert!(mem::size_of::<T>() == mem::size_of::<U>());
837 if mem::size_of::<T>() != 0 {
838 // FIXME: Assert statically that the types `T` and `U` have the
839 // same minimal alignment in case they are not zero-sized.
841 // These asserts are necessary because the `min_align_of` of the
842 // types are passed to the allocator by `Vec`.
843 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
845 // This `as isize` cast is safe, because the size of the elements of the
846 // vector is not 0, and:
848 // 1) If the size of the elements in the vector is 1, the `isize` may
849 // overflow, but it has the correct bit pattern so that the
850 // `.offset()` function will work.
853 // Address space 0x0-0xF.
854 // `u8` array at: 0x1.
855 // Size of `u8` array: 0x8.
856 // Calculated `offset`: -0x8.
857 // After `array.offset(offset)`: 0x9.
858 // (0x1 + 0x8 = 0x1 - 0x8)
860 // 2) If the size of the elements in the vector is >1, the `usize` ->
861 // `isize` conversion can't overflow.
862 let offset = vec.len() as isize;
863 let start = vec.as_mut_ptr();
865 let mut pv = PartialVecNonZeroSized {
869 // This points inside the vector, as the vector has length
871 end_t: unsafe { start.offset(offset) },
872 start_u: start as *mut U,
873 end_u: start as *mut U,
875 _marker: PhantomData,
886 while pv.end_u as *mut T != pv.end_t {
890 // +-+-+-+-+-+-+-+-+-+
891 // |U|...|U|T|T|...|T|
892 // +-+-+-+-+-+-+-+-+-+
896 let t = ptr::read(pv.start_t);
899 // +-+-+-+-+-+-+-+-+-+
900 // |U|...|U|X|T|...|T|
901 // +-+-+-+-+-+-+-+-+-+
904 // We must not panic here, one cell is marked as `T`
905 // although it is not `T`.
907 pv.start_t = pv.start_t.offset(1);
910 // +-+-+-+-+-+-+-+-+-+
911 // |U|...|U|X|T|...|T|
912 // +-+-+-+-+-+-+-+-+-+
915 // We may panic again.
917 // The function given by the user might panic.
920 ptr::write(pv.end_u, u);
923 // +-+-+-+-+-+-+-+-+-+
924 // |U|...|U|U|T|...|T|
925 // +-+-+-+-+-+-+-+-+-+
928 // We should not panic here, because that would leak the `U`
929 // pointed to by `end_u`.
931 pv.end_u = pv.end_u.offset(1);
934 // +-+-+-+-+-+-+-+-+-+
935 // |U|...|U|U|T|...|T|
936 // +-+-+-+-+-+-+-+-+-+
939 // We may panic again.
951 // Extract `vec` and prevent the destructor of
952 // `PartialVecNonZeroSized` from running. Note that none of the
953 // function calls can panic, thus no resources can be leaked (as the
954 // `vec` member of `PartialVec` is the only one which holds
955 // allocations -- and it is returned from this function. None of
958 let vec_len = pv.vec.len();
959 let vec_cap = pv.vec.capacity();
960 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
962 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
965 // Put the `Vec` into the `PartialVecZeroSized` structure and
966 // prevent the destructor of the `Vec` from running. Since the
967 // `Vec` contained zero-sized objects, it did not allocate, so we
968 // are not leaking memory here.
969 let mut pv = PartialVecZeroSized::<T,U> {
974 unsafe { mem::forget(vec); }
976 while pv.num_t != 0 {
978 // Create a `T` out of thin air and decrement `num_t`. This
979 // must not panic between these steps, as otherwise a
980 // destructor of `T` which doesn't exist runs.
981 let t = mem::uninitialized();
984 // The function given by the user might panic.
987 // Forget the `U` and increment `num_u`. This increment
988 // cannot overflow the `usize` as we only do this for a
989 // number of times that fits into a `usize` (and start with
990 // `0`). Again, we should not panic between these steps.
995 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
996 // destructor of the latter will not run. None of this can panic.
997 let mut result = Vec::new();
999 result.set_len(pv.num_u);
1006 /// Splits the collection into two at the given index.
1008 /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`,
1009 /// and the returned `Self` contains elements `[at, len)`.
1011 /// Note that the capacity of `self` does not change.
1015 /// Panics if `at > len`.
1020 /// let mut vec = vec![1,2,3];
1021 /// let vec2 = vec.split_off(1);
1022 /// assert_eq!(vec, vec![1]);
1023 /// assert_eq!(vec2, vec![2, 3]);
1026 #[unstable(feature = "collections",
1027 reason = "new API, waiting for dust to settle")]
1028 pub fn split_off(&mut self, at: usize) -> Self {
1029 assert!(at <= self.len(), "`at` out of bounds");
1031 let other_len = self.len - at;
1032 let mut other = Vec::with_capacity(other_len);
1034 // Unsafely `set_len` and copy items to `other`.
1037 other.set_len(other_len);
1039 ptr::copy_nonoverlapping_memory(
1041 self.as_ptr().offset(at as isize),
1049 impl<T: Clone> Vec<T> {
1050 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
1052 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
1053 /// is larger than the current value of `len()` or not.
1058 /// let mut vec = vec!["hello"];
1059 /// vec.resize(3, "world");
1060 /// assert_eq!(vec, vec!["hello", "world", "world"]);
1062 /// let mut vec = vec![1, 2, 3, 4];
1063 /// vec.resize(2, 0);
1064 /// assert_eq!(vec, vec![1, 2]);
1066 #[unstable(feature = "collections",
1067 reason = "matches collection reform specification; waiting for dust to settle")]
1068 pub fn resize(&mut self, new_len: usize, value: T) {
1069 let len = self.len();
1072 self.extend(repeat(value).take(new_len - len));
1074 self.truncate(new_len);
1078 /// Appends all elements in a slice to the `Vec`.
1080 /// Iterates over the slice `other`, clones each element, and then appends
1081 /// it to this `Vec`. The `other` vector is traversed in-order.
1086 /// let mut vec = vec![1];
1087 /// vec.push_all(&[2, 3, 4]);
1088 /// assert_eq!(vec, vec![1, 2, 3, 4]);
1091 #[unstable(feature = "collections",
1092 reason = "likely to be replaced by a more optimized extend")]
1093 pub fn push_all(&mut self, other: &[T]) {
1094 self.reserve(other.len());
1096 for i in 0..other.len() {
1097 let len = self.len();
1099 // Unsafe code so this can be optimised to a memcpy (or something similarly
1100 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1101 // during the loop can prevent this optimisation.
1104 self.get_unchecked_mut(len),
1105 other.get_unchecked(i).clone());
1106 self.set_len(len + 1);
1112 impl<T: PartialEq> Vec<T> {
1113 /// Removes consecutive repeated elements in the vector.
1115 /// If the vector is sorted, this removes all duplicates.
1120 /// let mut vec = vec![1, 2, 2, 3, 2];
1124 /// assert_eq!(vec, vec![1, 2, 3, 2]);
1126 #[stable(feature = "rust1", since = "1.0.0")]
1127 pub fn dedup(&mut self) {
1129 // Although we have a mutable reference to `self`, we cannot make
1130 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1131 // must ensure that the vector is in a valid state at all time.
1133 // The way that we handle this is by using swaps; we iterate
1134 // over all the elements, swapping as we go so that at the end
1135 // the elements we wish to keep are in the front, and those we
1136 // wish to reject are at the back. We can then truncate the
1137 // vector. This operation is still O(n).
1139 // Example: We start in this state, where `r` represents "next
1140 // read" and `w` represents "next_write`.
1143 // +---+---+---+---+---+---+
1144 // | 0 | 1 | 1 | 2 | 3 | 3 |
1145 // +---+---+---+---+---+---+
1148 // Comparing self[r] against self[w-1], this is not a duplicate, so
1149 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1150 // r and w, leaving us with:
1153 // +---+---+---+---+---+---+
1154 // | 0 | 1 | 1 | 2 | 3 | 3 |
1155 // +---+---+---+---+---+---+
1158 // Comparing self[r] against self[w-1], this value is a duplicate,
1159 // so we increment `r` but leave everything else unchanged:
1162 // +---+---+---+---+---+---+
1163 // | 0 | 1 | 1 | 2 | 3 | 3 |
1164 // +---+---+---+---+---+---+
1167 // Comparing self[r] against self[w-1], this is not a duplicate,
1168 // so swap self[r] and self[w] and advance r and w:
1171 // +---+---+---+---+---+---+
1172 // | 0 | 1 | 2 | 1 | 3 | 3 |
1173 // +---+---+---+---+---+---+
1176 // Not a duplicate, repeat:
1179 // +---+---+---+---+---+---+
1180 // | 0 | 1 | 2 | 3 | 1 | 3 |
1181 // +---+---+---+---+---+---+
1184 // Duplicate, advance r. End of vec. Truncate to w.
1186 let ln = self.len();
1187 if ln < 1 { return; }
1189 // Avoid bounds checks by using unsafe pointers.
1190 let p = self.as_mut_ptr();
1195 let p_r = p.offset(r as isize);
1196 let p_wm1 = p.offset((w - 1) as isize);
1199 let p_w = p_wm1.offset(1);
1200 mem::swap(&mut *p_r, &mut *p_w);
1212 ////////////////////////////////////////////////////////////////////////////////
1213 // Internal methods and functions
1214 ////////////////////////////////////////////////////////////////////////////////
1217 /// Reserves capacity for exactly `capacity` elements in the given vector.
1219 /// If the capacity for `self` is already equal to or greater than the
1220 /// requested capacity, then no action is taken.
1221 fn grow_capacity(&mut self, capacity: usize) {
1222 if mem::size_of::<T>() == 0 { return }
1224 if capacity > self.cap {
1225 let size = capacity.checked_mul(mem::size_of::<T>())
1226 .expect("capacity overflow");
1228 let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
1229 if ptr.is_null() { ::alloc::oom() }
1230 self.ptr = Unique::new(ptr);
1232 self.cap = capacity;
1237 // FIXME: #13996: need a way to mark the return value as `noalias`
1239 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
1241 allocate(size, mem::min_align_of::<T>()) as *mut T
1243 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
1248 unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
1249 if mem::size_of::<T>() != 0 {
1250 deallocate(ptr as *mut u8,
1251 len * mem::size_of::<T>(),
1252 mem::min_align_of::<T>())
1257 #[stable(feature = "rust1", since = "1.0.0")]
1258 pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
1260 let mut v = Vec::with_capacity(n);
1261 let mut ptr = v.as_mut_ptr();
1263 // Write all elements except the last one
1265 ptr::write(ptr, Clone::clone(&elem));
1266 ptr = ptr.offset(1);
1267 v.set_len(i); // Increment the length in every step in case Clone::clone() panics
1271 // We can write the last element directly without cloning needlessly
1272 ptr::write(ptr, elem);
1280 ////////////////////////////////////////////////////////////////////////////////
1281 // Common trait implementations for Vec
1282 ////////////////////////////////////////////////////////////////////////////////
1284 #[unstable(feature = "collections")]
1285 impl<T:Clone> Clone for Vec<T> {
1286 fn clone(&self) -> Vec<T> { ::slice::SliceExt::to_vec(&**self) }
1288 fn clone_from(&mut self, other: &Vec<T>) {
1289 // drop anything in self that will not be overwritten
1290 if self.len() > other.len() {
1291 self.truncate(other.len())
1294 // reuse the contained values' allocations/resources.
1295 for (place, thing) in self.iter_mut().zip(other.iter()) {
1296 place.clone_from(thing)
1299 // self.len <= other.len due to the truncate above, so the
1300 // slice here is always in-bounds.
1301 let slice = &other[self.len()..];
1302 self.push_all(slice);
1307 impl<S: hash::Writer + hash::Hasher, T: Hash<S>> Hash<S> for Vec<T> {
1309 fn hash(&self, state: &mut S) {
1310 Hash::hash(&**self, state)
1313 #[stable(feature = "rust1", since = "1.0.0")]
1315 impl<T: Hash> Hash for Vec<T> {
1317 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1318 Hash::hash(&**self, state)
1322 #[stable(feature = "rust1", since = "1.0.0")]
1323 impl<T> Index<usize> for Vec<T> {
1327 fn index(&self, index: &usize) -> &T {
1328 // NB built-in indexing via `&[T]`
1333 #[stable(feature = "rust1", since = "1.0.0")]
1334 impl<T> IndexMut<usize> for Vec<T> {
1336 fn index_mut(&mut self, index: &usize) -> &mut T {
1337 // NB built-in indexing via `&mut [T]`
1338 &mut (**self)[*index]
1343 #[stable(feature = "rust1", since = "1.0.0")]
1344 impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
1347 fn index(&self, index: &ops::Range<usize>) -> &[T] {
1348 Index::index(&**self, index)
1351 #[stable(feature = "rust1", since = "1.0.0")]
1352 impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
1355 fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
1356 Index::index(&**self, index)
1359 #[stable(feature = "rust1", since = "1.0.0")]
1360 impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
1363 fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
1364 Index::index(&**self, index)
1367 #[stable(feature = "rust1", since = "1.0.0")]
1368 impl<T> ops::Index<ops::RangeFull> for Vec<T> {
1371 fn index(&self, _index: &ops::RangeFull) -> &[T] {
1376 #[stable(feature = "rust1", since = "1.0.0")]
1377 impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
1379 fn index_mut(&mut self, index: &ops::Range<usize>) -> &mut [T] {
1380 IndexMut::index_mut(&mut **self, index)
1383 #[stable(feature = "rust1", since = "1.0.0")]
1384 impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
1386 fn index_mut(&mut self, index: &ops::RangeTo<usize>) -> &mut [T] {
1387 IndexMut::index_mut(&mut **self, index)
1390 #[stable(feature = "rust1", since = "1.0.0")]
1391 impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
1393 fn index_mut(&mut self, index: &ops::RangeFrom<usize>) -> &mut [T] {
1394 IndexMut::index_mut(&mut **self, index)
1397 #[stable(feature = "rust1", since = "1.0.0")]
1398 impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
1400 fn index_mut(&mut self, _index: &ops::RangeFull) -> &mut [T] {
1405 #[stable(feature = "rust1", since = "1.0.0")]
1406 impl<T> ops::Deref for Vec<T> {
1409 fn deref(&self) -> &[T] { self.as_slice() }
1412 #[stable(feature = "rust1", since = "1.0.0")]
1413 impl<T> ops::DerefMut for Vec<T> {
1414 fn deref_mut(&mut self) -> &mut [T] { self.as_mut_slice() }
1417 #[stable(feature = "rust1", since = "1.0.0")]
1418 impl<T> FromIterator<T> for Vec<T> {
1420 fn from_iter<I: IntoIterator<Item=T>>(iterable: I) -> Vec<T> {
1421 let mut iterator = iterable.into_iter();
1422 let (lower, _) = iterator.size_hint();
1423 let mut vector = Vec::with_capacity(lower);
1425 // This function should be the moral equivalent of:
1427 // for item in iterator {
1428 // vector.push(item);
1431 // This equivalent crucially runs the iterator precisely once. Below we
1432 // actually in theory run the iterator twice (one without bounds checks
1433 // and one with). To achieve the "moral equivalent", we use the `if`
1434 // statement below to break out early.
1436 // If the first loop has terminated, then we have one of two conditions.
1438 // 1. The underlying iterator returned `None`. In this case we are
1439 // guaranteed that less than `vector.capacity()` elements have been
1440 // returned, so we break out early.
1441 // 2. The underlying iterator yielded `vector.capacity()` elements and
1442 // has not yielded `None` yet. In this case we run the iterator to
1444 for element in iterator.by_ref().take(vector.capacity()) {
1445 let len = vector.len();
1447 ptr::write(vector.get_unchecked_mut(len), element);
1448 vector.set_len(len + 1);
1452 if vector.len() == vector.capacity() {
1453 for element in iterator {
1454 vector.push(element);
1461 #[stable(feature = "rust1", since = "1.0.0")]
1462 impl<T> IntoIterator for Vec<T> {
1464 type IntoIter = IntoIter<T>;
1466 fn into_iter(self) -> IntoIter<T> {
1471 #[stable(feature = "rust1", since = "1.0.0")]
1472 impl<'a, T> IntoIterator for &'a Vec<T> {
1474 type IntoIter = slice::Iter<'a, T>;
1476 fn into_iter(self) -> slice::Iter<'a, T> {
1481 #[stable(feature = "rust1", since = "1.0.0")]
1482 impl<'a, T> IntoIterator for &'a mut Vec<T> {
1483 type Item = &'a mut T;
1484 type IntoIter = slice::IterMut<'a, T>;
1486 fn into_iter(mut self) -> slice::IterMut<'a, T> {
1491 #[unstable(feature = "collections", reason = "waiting on Extend stability")]
1492 impl<T> Extend<T> for Vec<T> {
1494 fn extend<I: IntoIterator<Item=T>>(&mut self, iterable: I) {
1495 let iterator = iterable.into_iter();
1496 let (lower, _) = iterator.size_hint();
1497 self.reserve(lower);
1498 for element in iterator {
1504 impl<A, B> PartialEq<Vec<B>> for Vec<A> where A: PartialEq<B> {
1506 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1508 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1511 macro_rules! impl_eq {
1512 ($lhs:ty, $rhs:ty) => {
1513 impl<'b, A, B> PartialEq<$rhs> for $lhs where A: PartialEq<B> {
1515 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1517 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1520 impl<'b, A, B> PartialEq<$lhs> for $rhs where B: PartialEq<A> {
1522 fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
1524 fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) }
1529 impl_eq! { Vec<A>, &'b [B] }
1530 impl_eq! { Vec<A>, &'b mut [B] }
1532 impl<'a, A, B> PartialEq<Vec<B>> for Cow<'a, [A]> where A: PartialEq<B> + Clone {
1534 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1536 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1539 impl<'a, A, B> PartialEq<Cow<'a, [A]>> for Vec<B> where A: Clone, B: PartialEq<A> {
1541 fn eq(&self, other: &Cow<'a, [A]>) -> bool { PartialEq::eq(&**self, &**other) }
1543 fn ne(&self, other: &Cow<'a, [A]>) -> bool { PartialEq::ne(&**self, &**other) }
1546 macro_rules! impl_eq_for_cowvec {
1548 impl<'a, 'b, A, B> PartialEq<$rhs> for Cow<'a, [A]> where A: PartialEq<B> + Clone {
1550 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1552 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1555 impl<'a, 'b, A, B> PartialEq<Cow<'a, [A]>> for $rhs where A: Clone, B: PartialEq<A> {
1557 fn eq(&self, other: &Cow<'a, [A]>) -> bool { PartialEq::eq(&**self, &**other) }
1559 fn ne(&self, other: &Cow<'a, [A]>) -> bool { PartialEq::ne(&**self, &**other) }
1564 impl_eq_for_cowvec! { &'b [B] }
1565 impl_eq_for_cowvec! { &'b mut [B] }
1567 #[stable(feature = "rust1", since = "1.0.0")]
1568 impl<T: PartialOrd> PartialOrd for Vec<T> {
1570 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
1571 PartialOrd::partial_cmp(&**self, &**other)
1575 #[stable(feature = "rust1", since = "1.0.0")]
1576 impl<T: Eq> Eq for Vec<T> {}
1578 #[stable(feature = "rust1", since = "1.0.0")]
1579 impl<T: Ord> Ord for Vec<T> {
1581 fn cmp(&self, other: &Vec<T>) -> Ordering {
1582 Ord::cmp(&**self, &**other)
1586 impl<T> AsSlice<T> for Vec<T> {
1587 /// Returns a slice into `self`.
1592 /// fn foo(slice: &[i32]) {}
1594 /// let vec = vec![1, 2];
1595 /// foo(vec.as_slice());
1598 #[stable(feature = "rust1", since = "1.0.0")]
1599 fn as_slice(&self) -> &[T] {
1602 if cfg!(not(stage0)) { // NOTE remove cfg after next snapshot
1603 assume(p != 0 as *mut T);
1605 mem::transmute(RawSlice {
1613 #[unstable(feature = "collections",
1614 reason = "recent addition, needs more experience")]
1615 impl<'a, T: Clone> Add<&'a [T]> for Vec<T> {
1616 type Output = Vec<T>;
1619 fn add(mut self, rhs: &[T]) -> Vec<T> {
1625 #[unsafe_destructor]
1626 #[stable(feature = "rust1", since = "1.0.0")]
1627 impl<T> Drop for Vec<T> {
1628 fn drop(&mut self) {
1629 // This is (and should always remain) a no-op if the fields are
1630 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1636 dealloc(*self.ptr, self.cap)
1642 #[stable(feature = "rust1", since = "1.0.0")]
1643 impl<T> Default for Vec<T> {
1644 #[stable(feature = "rust1", since = "1.0.0")]
1645 fn default() -> Vec<T> {
1650 #[stable(feature = "rust1", since = "1.0.0")]
1651 impl<T: fmt::Debug> fmt::Debug for Vec<T> {
1652 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1653 fmt::Debug::fmt(&**self, f)
1657 ////////////////////////////////////////////////////////////////////////////////
1659 ////////////////////////////////////////////////////////////////////////////////
1661 /// A clone-on-write vector
1662 #[deprecated(since = "1.0.0", reason = "use Cow<'a, [T]> instead")]
1663 #[unstable(feature = "collections")]
1664 pub type CowVec<'a, T> = Cow<'a, [T]>;
1666 #[unstable(feature = "collections")]
1667 impl<'a, T> FromIterator<T> for Cow<'a, [T]> where T: Clone {
1668 fn from_iter<I: IntoIterator<Item=T>>(it: I) -> Cow<'a, [T]> {
1669 Cow::Owned(FromIterator::from_iter(it))
1673 impl<'a, T: 'a> IntoCow<'a, [T]> for Vec<T> where T: Clone {
1674 fn into_cow(self) -> Cow<'a, [T]> {
1679 impl<'a, T> IntoCow<'a, [T]> for &'a [T] where T: Clone {
1680 fn into_cow(self) -> Cow<'a, [T]> {
1685 ////////////////////////////////////////////////////////////////////////////////
1687 ////////////////////////////////////////////////////////////////////////////////
1689 /// An iterator that moves out of a vector.
1690 #[stable(feature = "rust1", since = "1.0.0")]
1691 pub struct IntoIter<T> {
1692 allocation: *mut T, // the block of memory allocated for the vector
1693 cap: usize, // the capacity of the vector
1698 unsafe impl<T: Send> Send for IntoIter<T> { }
1699 unsafe impl<T: Sync> Sync for IntoIter<T> { }
1701 impl<T> IntoIter<T> {
1703 /// Drops all items that have not yet been moved and returns the empty vector.
1704 #[unstable(feature = "collections")]
1705 pub fn into_inner(mut self) -> Vec<T> {
1707 for _x in self.by_ref() { }
1708 let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
1710 Vec::from_raw_parts(allocation, 0, cap)
1715 #[stable(feature = "rust1", since = "1.0.0")]
1716 impl<T> Iterator for IntoIter<T> {
1720 fn next(&mut self) -> Option<T> {
1722 if self.ptr == self.end {
1725 if mem::size_of::<T>() == 0 {
1726 // purposefully don't use 'ptr.offset' because for
1727 // vectors with 0-size elements this would return the
1729 self.ptr = mem::transmute(self.ptr as usize + 1);
1731 // Use a non-null pointer value
1732 Some(ptr::read(EMPTY as *mut T))
1735 self.ptr = self.ptr.offset(1);
1737 Some(ptr::read(old))
1744 fn size_hint(&self) -> (usize, Option<usize>) {
1745 let diff = (self.end as usize) - (self.ptr as usize);
1746 let size = mem::size_of::<T>();
1747 let exact = diff / (if size == 0 {1} else {size});
1748 (exact, Some(exact))
1752 #[stable(feature = "rust1", since = "1.0.0")]
1753 impl<T> DoubleEndedIterator for IntoIter<T> {
1755 fn next_back(&mut self) -> Option<T> {
1757 if self.end == self.ptr {
1760 if mem::size_of::<T>() == 0 {
1761 // See above for why 'ptr.offset' isn't used
1762 self.end = mem::transmute(self.end as usize - 1);
1764 // Use a non-null pointer value
1765 Some(ptr::read(EMPTY as *mut T))
1767 self.end = self.end.offset(-1);
1769 Some(ptr::read(mem::transmute(self.end)))
1776 #[stable(feature = "rust1", since = "1.0.0")]
1777 impl<T> ExactSizeIterator for IntoIter<T> {}
1779 #[unsafe_destructor]
1780 #[stable(feature = "rust1", since = "1.0.0")]
1781 impl<T> Drop for IntoIter<T> {
1782 fn drop(&mut self) {
1783 // destroy the remaining elements
1785 for _x in self.by_ref() {}
1787 dealloc(self.allocation, self.cap);
1793 /// An iterator that drains a vector.
1794 #[unsafe_no_drop_flag]
1795 #[unstable(feature = "collections",
1796 reason = "recently added as part of collections reform 2")]
1797 pub struct Drain<'a, T:'a> {
1800 marker: PhantomData<&'a T>,
1803 #[stable(feature = "rust1", since = "1.0.0")]
1804 impl<'a, T> Iterator for Drain<'a, T> {
1808 fn next(&mut self) -> Option<T> {
1810 if self.ptr == self.end {
1813 if mem::size_of::<T>() == 0 {
1814 // purposefully don't use 'ptr.offset' because for
1815 // vectors with 0-size elements this would return the
1817 self.ptr = mem::transmute(self.ptr as usize + 1);
1819 // Use a non-null pointer value
1820 Some(ptr::read(EMPTY as *mut T))
1823 self.ptr = self.ptr.offset(1);
1825 Some(ptr::read(old))
1832 fn size_hint(&self) -> (usize, Option<usize>) {
1833 let diff = (self.end as usize) - (self.ptr as usize);
1834 let size = mem::size_of::<T>();
1835 let exact = diff / (if size == 0 {1} else {size});
1836 (exact, Some(exact))
1840 #[stable(feature = "rust1", since = "1.0.0")]
1841 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1843 fn next_back(&mut self) -> Option<T> {
1845 if self.end == self.ptr {
1848 if mem::size_of::<T>() == 0 {
1849 // See above for why 'ptr.offset' isn't used
1850 self.end = mem::transmute(self.end as usize - 1);
1852 // Use a non-null pointer value
1853 Some(ptr::read(EMPTY as *mut T))
1855 self.end = self.end.offset(-1);
1857 Some(ptr::read(self.end))
1864 #[stable(feature = "rust1", since = "1.0.0")]
1865 impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1867 #[unsafe_destructor]
1868 #[stable(feature = "rust1", since = "1.0.0")]
1869 impl<'a, T> Drop for Drain<'a, T> {
1870 fn drop(&mut self) {
1871 // self.ptr == self.end == null if drop has already been called,
1872 // so we can use #[unsafe_no_drop_flag].
1874 // destroy the remaining elements
1875 for _x in self.by_ref() {}
1879 ////////////////////////////////////////////////////////////////////////////////
1880 // Conversion from &[T] to &Vec<T>
1881 ////////////////////////////////////////////////////////////////////////////////
1883 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1884 #[unstable(feature = "collections")]
1885 pub struct DerefVec<'a, T:'a> {
1887 l: PhantomData<&'a T>,
1890 #[unstable(feature = "collections")]
1891 impl<'a, T> Deref for DerefVec<'a, T> {
1892 type Target = Vec<T>;
1894 fn deref<'b>(&'b self) -> &'b Vec<T> {
1899 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1900 #[unsafe_destructor]
1901 #[stable(feature = "rust1", since = "1.0.0")]
1902 impl<'a, T> Drop for DerefVec<'a, T> {
1903 fn drop(&mut self) {
1909 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1910 #[unstable(feature = "collections")]
1911 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1914 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1920 ////////////////////////////////////////////////////////////////////////////////
1921 // Partial vec, used for map_in_place
1922 ////////////////////////////////////////////////////////////////////////////////
1924 /// An owned, partially type-converted vector of elements with non-zero size.
1926 /// `T` and `U` must have the same, non-zero size. They must also have the same
1929 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1930 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1931 /// destructed. Additionally the underlying storage of `vec` will be freed.
1932 struct PartialVecNonZeroSized<T,U> {
1940 _marker: PhantomData<U>,
1943 /// An owned, partially type-converted vector of zero-sized elements.
1945 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1947 struct PartialVecZeroSized<T,U> {
1950 marker: PhantomData<::core::cell::Cell<(T,U)>>,
1953 #[unsafe_destructor]
1954 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1955 fn drop(&mut self) {
1957 // `vec` hasn't been modified until now. As it has a length
1958 // currently, this would run destructors of `T`s which might not be
1959 // there. So at first, set `vec`s length to `0`. This must be done
1960 // at first to remain memory-safe as the destructors of `U` or `T`
1961 // might cause unwinding where `vec`s destructor would be executed.
1962 self.vec.set_len(0);
1964 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1965 while self.start_u != self.end_u {
1966 let _ = ptr::read(self.start_u); // Run a `U` destructor.
1967 self.start_u = self.start_u.offset(1);
1969 while self.start_t != self.end_t {
1970 let _ = ptr::read(self.start_t); // Run a `T` destructor.
1971 self.start_t = self.start_t.offset(1);
1973 // After this destructor ran, the destructor of `vec` will run,
1974 // deallocating the underlying memory.
1979 #[unsafe_destructor]
1980 impl<T,U> Drop for PartialVecZeroSized<T,U> {
1981 fn drop(&mut self) {
1983 // Destruct the instances of `T` and `U` this struct owns.
1984 while self.num_t != 0 {
1985 let _: T = mem::uninitialized(); // Run a `T` destructor.
1988 while self.num_u != 0 {
1989 let _: U = mem::uninitialized(); // Run a `U` destructor.
1999 use core::mem::size_of;
2000 use core::iter::repeat;
2004 struct DropCounter<'a> {
2008 #[unsafe_destructor]
2009 impl<'a> Drop for DropCounter<'a> {
2010 fn drop(&mut self) {
2017 let xs = [1u8, 2u8, 3u8];
2018 assert_eq!(&**as_vec(&xs), xs);
2022 fn test_as_vec_dtor() {
2023 let (mut count_x, mut count_y) = (0, 0);
2025 let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }];
2026 assert_eq!(as_vec(xs).len(), 2);
2028 assert_eq!(count_x, 1);
2029 assert_eq!(count_y, 1);
2033 fn test_small_vec_struct() {
2034 assert!(size_of::<Vec<u8>>() == size_of::<usize>() * 3);
2038 fn test_double_drop() {
2044 let (mut count_x, mut count_y) = (0, 0);
2046 let mut tv = TwoVec {
2050 tv.x.push(DropCounter {count: &mut count_x});
2051 tv.y.push(DropCounter {count: &mut count_y});
2053 // If Vec had a drop flag, here is where it would be zeroed.
2054 // Instead, it should rely on its internal state to prevent
2055 // doing anything significant when dropped multiple times.
2058 // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
2061 assert_eq!(count_x, 1);
2062 assert_eq!(count_y, 1);
2067 let mut v = Vec::new();
2068 assert_eq!(v.capacity(), 0);
2071 assert!(v.capacity() >= 2);
2077 assert!(v.capacity() >= 16);
2079 assert!(v.capacity() >= 32);
2084 assert!(v.capacity() >= 33)
2089 let mut v = Vec::new();
2090 let mut w = Vec::new();
2093 for i in 0..3 { w.push(i) }
2098 for i in 3..10 { w.push(i) }
2104 fn test_slice_from_mut() {
2105 let mut values = vec![1, 2, 3, 4, 5];
2107 let slice = &mut values[2 ..];
2108 assert!(slice == [3, 4, 5]);
2114 assert!(values == [1, 2, 5, 6, 7]);
2118 fn test_slice_to_mut() {
2119 let mut values = vec![1, 2, 3, 4, 5];
2121 let slice = &mut values[.. 2];
2122 assert!(slice == [1, 2]);
2128 assert!(values == [2, 3, 3, 4, 5]);
2132 fn test_split_at_mut() {
2133 let mut values = vec![1, 2, 3, 4, 5];
2135 let (left, right) = values.split_at_mut(2);
2137 let left: &[_] = left;
2138 assert!(&left[..left.len()] == &[1, 2][]);
2145 let right: &[_] = right;
2146 assert!(&right[..right.len()] == &[3, 4, 5][]);
2153 assert!(values == vec![2, 3, 5, 6, 7]);
2158 let v: Vec<i32> = vec![];
2159 let w = vec!(1, 2, 3);
2161 assert_eq!(v, v.clone());
2165 // they should be disjoint in memory.
2166 assert!(w.as_ptr() != z.as_ptr())
2170 fn test_clone_from() {
2172 let three = vec!(box 1, box 2, box 3);
2173 let two = vec!(box 4, box 5);
2175 v.clone_from(&three);
2176 assert_eq!(v, three);
2179 v.clone_from(&three);
2180 assert_eq!(v, three);
2187 v.clone_from(&three);
2188 assert_eq!(v, three)
2193 let mut vec = vec![1, 2, 3, 4];
2194 vec.retain(|&x| x % 2 == 0);
2195 assert!(vec == vec![2, 4]);
2199 fn zero_sized_values() {
2200 let mut v = Vec::new();
2201 assert_eq!(v.len(), 0);
2203 assert_eq!(v.len(), 1);
2205 assert_eq!(v.len(), 2);
2206 assert_eq!(v.pop(), Some(()));
2207 assert_eq!(v.pop(), Some(()));
2208 assert_eq!(v.pop(), None);
2210 assert_eq!(v.iter().count(), 0);
2212 assert_eq!(v.iter().count(), 1);
2214 assert_eq!(v.iter().count(), 2);
2218 assert_eq!(v.iter_mut().count(), 2);
2220 assert_eq!(v.iter_mut().count(), 3);
2222 assert_eq!(v.iter_mut().count(), 4);
2224 for &mut () in &mut v {}
2225 unsafe { v.set_len(0); }
2226 assert_eq!(v.iter_mut().count(), 0);
2230 fn test_partition() {
2231 assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
2232 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
2233 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
2234 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
2238 fn test_zip_unzip() {
2239 let z1 = vec![(1, 4), (2, 5), (3, 6)];
2241 let (left, right): (Vec<_>, Vec<_>) = z1.iter().cloned().unzip();
2243 assert_eq!((1, 4), (left[0], right[0]));
2244 assert_eq!((2, 5), (left[1], right[1]));
2245 assert_eq!((3, 6), (left[2], right[2]));
2249 fn test_unsafe_ptrs() {
2251 // Test on-stack copy-from-buf.
2253 let ptr = a.as_ptr();
2254 let b = Vec::from_raw_buf(ptr, 3);
2255 assert_eq!(b, vec![1, 2, 3]);
2257 // Test on-heap copy-from-buf.
2258 let c = vec![1, 2, 3, 4, 5];
2259 let ptr = c.as_ptr();
2260 let d = Vec::from_raw_buf(ptr, 5);
2261 assert_eq!(d, vec![1, 2, 3, 4, 5]);
2266 fn test_vec_truncate_drop() {
2267 static mut drops: u32 = 0;
2269 impl Drop for Elem {
2270 fn drop(&mut self) {
2271 unsafe { drops += 1; }
2275 let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
2276 assert_eq!(unsafe { drops }, 0);
2278 assert_eq!(unsafe { drops }, 2);
2280 assert_eq!(unsafe { drops }, 5);
2285 fn test_vec_truncate_fail() {
2286 struct BadElem(i32);
2287 impl Drop for BadElem {
2288 fn drop(&mut self) {
2289 let BadElem(ref mut x) = *self;
2290 if *x == 0xbadbeef {
2291 panic!("BadElem panic: 0xbadbeef")
2296 let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
2302 let vec = vec![1, 2, 3];
2303 assert!(vec[1] == 2);
2308 fn test_index_out_of_bounds() {
2309 let vec = vec![1, 2, 3];
2315 fn test_slice_out_of_bounds_1() {
2316 let x = vec![1, 2, 3, 4, 5];
2322 fn test_slice_out_of_bounds_2() {
2323 let x = vec![1, 2, 3, 4, 5];
2329 fn test_slice_out_of_bounds_3() {
2330 let x = vec![1, 2, 3, 4, 5];
2336 fn test_slice_out_of_bounds_4() {
2337 let x = vec![1, 2, 3, 4, 5];
2343 fn test_slice_out_of_bounds_5() {
2344 let x = vec![1, 2, 3, 4, 5];
2350 fn test_swap_remove_empty() {
2351 let mut vec= Vec::<i32>::new();
2356 fn test_move_iter_unwrap() {
2357 let mut vec = Vec::with_capacity(7);
2360 let ptr = vec.as_ptr();
2361 vec = vec.into_iter().into_inner();
2362 assert_eq!(vec.as_ptr(), ptr);
2363 assert_eq!(vec.capacity(), 7);
2364 assert_eq!(vec.len(), 0);
2369 fn test_map_in_place_incompatible_types_fail() {
2370 let v = vec![0, 1, 2];
2371 v.map_in_place(|_| ());
2375 fn test_map_in_place() {
2376 let v = vec![0, 1, 2];
2377 assert_eq!(v.map_in_place(|i: u32| i as i32 - 1), [-1, 0, 1]);
2381 fn test_map_in_place_zero_sized() {
2382 let v = vec![(), ()];
2383 #[derive(PartialEq, Debug)]
2385 assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
2389 fn test_map_in_place_zero_drop_count() {
2390 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
2392 #[derive(Clone, PartialEq, Debug)]
2394 impl Drop for Nothing { fn drop(&mut self) { } }
2396 #[derive(Clone, PartialEq, Debug)]
2398 impl Drop for ZeroSized {
2399 fn drop(&mut self) {
2400 DROP_COUNTER.fetch_add(1, Ordering::Relaxed);
2403 const NUM_ELEMENTS: usize = 2;
2404 static DROP_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
2406 let v = repeat(Nothing).take(NUM_ELEMENTS).collect::<Vec<_>>();
2408 DROP_COUNTER.store(0, Ordering::Relaxed);
2410 let v = v.map_in_place(|_| ZeroSized);
2411 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), 0);
2413 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), NUM_ELEMENTS);
2417 fn test_move_items() {
2418 let vec = vec![1, 2, 3];
2419 let mut vec2 = vec![];
2423 assert!(vec2 == vec![1, 2, 3]);
2427 fn test_move_items_reverse() {
2428 let vec = vec![1, 2, 3];
2429 let mut vec2 = vec![];
2430 for i in vec.into_iter().rev() {
2433 assert!(vec2 == vec![3, 2, 1]);
2437 fn test_move_items_zero_sized() {
2438 let vec = vec![(), (), ()];
2439 let mut vec2 = vec![];
2443 assert!(vec2 == vec![(), (), ()]);
2447 fn test_drain_items() {
2448 let mut vec = vec![1, 2, 3];
2449 let mut vec2 = vec![];
2450 for i in vec.drain() {
2453 assert_eq!(vec, []);
2454 assert_eq!(vec2, [ 1, 2, 3 ]);
2458 fn test_drain_items_reverse() {
2459 let mut vec = vec![1, 2, 3];
2460 let mut vec2 = vec![];
2461 for i in vec.drain().rev() {
2464 assert_eq!(vec, []);
2465 assert_eq!(vec2, [3, 2, 1]);
2469 fn test_drain_items_zero_sized() {
2470 let mut vec = vec![(), (), ()];
2471 let mut vec2 = vec![];
2472 for i in vec.drain() {
2475 assert_eq!(vec, []);
2476 assert_eq!(vec2, [(), (), ()]);
2480 fn test_into_boxed_slice() {
2481 let xs = vec![1, 2, 3];
2482 let ys = xs.into_boxed_slice();
2483 assert_eq!(ys, [1, 2, 3]);
2488 let mut vec = vec![1, 2, 3];
2489 let mut vec2 = vec![4, 5, 6];
2490 vec.append(&mut vec2);
2491 assert_eq!(vec, vec![1, 2, 3, 4, 5, 6]);
2492 assert_eq!(vec2, vec![]);
2496 fn test_split_off() {
2497 let mut vec = vec![1, 2, 3, 4, 5, 6];
2498 let vec2 = vec.split_off(4);
2499 assert_eq!(vec, vec![1, 2, 3, 4]);
2500 assert_eq!(vec2, vec![5, 6]);
2504 fn bench_new(b: &mut Bencher) {
2506 let v: Vec<u32> = Vec::new();
2507 assert_eq!(v.len(), 0);
2508 assert_eq!(v.capacity(), 0);
2512 fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) {
2513 b.bytes = src_len as u64;
2516 let v: Vec<u32> = Vec::with_capacity(src_len);
2517 assert_eq!(v.len(), 0);
2518 assert_eq!(v.capacity(), src_len);
2523 fn bench_with_capacity_0000(b: &mut Bencher) {
2524 do_bench_with_capacity(b, 0)
2528 fn bench_with_capacity_0010(b: &mut Bencher) {
2529 do_bench_with_capacity(b, 10)
2533 fn bench_with_capacity_0100(b: &mut Bencher) {
2534 do_bench_with_capacity(b, 100)
2538 fn bench_with_capacity_1000(b: &mut Bencher) {
2539 do_bench_with_capacity(b, 1000)
2542 fn do_bench_from_fn(b: &mut Bencher, src_len: usize) {
2543 b.bytes = src_len as u64;
2546 let dst = (0..src_len).collect::<Vec<_>>();
2547 assert_eq!(dst.len(), src_len);
2548 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2553 fn bench_from_fn_0000(b: &mut Bencher) {
2554 do_bench_from_fn(b, 0)
2558 fn bench_from_fn_0010(b: &mut Bencher) {
2559 do_bench_from_fn(b, 10)
2563 fn bench_from_fn_0100(b: &mut Bencher) {
2564 do_bench_from_fn(b, 100)
2568 fn bench_from_fn_1000(b: &mut Bencher) {
2569 do_bench_from_fn(b, 1000)
2572 fn do_bench_from_elem(b: &mut Bencher, src_len: usize) {
2573 b.bytes = src_len as u64;
2576 let dst: Vec<usize> = repeat(5).take(src_len).collect();
2577 assert_eq!(dst.len(), src_len);
2578 assert!(dst.iter().all(|x| *x == 5));
2583 fn bench_from_elem_0000(b: &mut Bencher) {
2584 do_bench_from_elem(b, 0)
2588 fn bench_from_elem_0010(b: &mut Bencher) {
2589 do_bench_from_elem(b, 10)
2593 fn bench_from_elem_0100(b: &mut Bencher) {
2594 do_bench_from_elem(b, 100)
2598 fn bench_from_elem_1000(b: &mut Bencher) {
2599 do_bench_from_elem(b, 1000)
2602 fn do_bench_from_slice(b: &mut Bencher, src_len: usize) {
2603 let src: Vec<_> = FromIterator::from_iter(0..src_len);
2605 b.bytes = src_len as u64;
2608 let dst = src.clone()[..].to_vec();
2609 assert_eq!(dst.len(), src_len);
2610 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2615 fn bench_from_slice_0000(b: &mut Bencher) {
2616 do_bench_from_slice(b, 0)
2620 fn bench_from_slice_0010(b: &mut Bencher) {
2621 do_bench_from_slice(b, 10)
2625 fn bench_from_slice_0100(b: &mut Bencher) {
2626 do_bench_from_slice(b, 100)
2630 fn bench_from_slice_1000(b: &mut Bencher) {
2631 do_bench_from_slice(b, 1000)
2634 fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
2635 let src: Vec<_> = FromIterator::from_iter(0..src_len);
2637 b.bytes = src_len as u64;
2640 let dst: Vec<_> = FromIterator::from_iter(src.clone().into_iter());
2641 assert_eq!(dst.len(), src_len);
2642 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2647 fn bench_from_iter_0000(b: &mut Bencher) {
2648 do_bench_from_iter(b, 0)
2652 fn bench_from_iter_0010(b: &mut Bencher) {
2653 do_bench_from_iter(b, 10)
2657 fn bench_from_iter_0100(b: &mut Bencher) {
2658 do_bench_from_iter(b, 100)
2662 fn bench_from_iter_1000(b: &mut Bencher) {
2663 do_bench_from_iter(b, 1000)
2666 fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
2667 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2668 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2670 b.bytes = src_len as u64;
2673 let mut dst = dst.clone();
2674 dst.extend(src.clone().into_iter());
2675 assert_eq!(dst.len(), dst_len + src_len);
2676 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2681 fn bench_extend_0000_0000(b: &mut Bencher) {
2682 do_bench_extend(b, 0, 0)
2686 fn bench_extend_0000_0010(b: &mut Bencher) {
2687 do_bench_extend(b, 0, 10)
2691 fn bench_extend_0000_0100(b: &mut Bencher) {
2692 do_bench_extend(b, 0, 100)
2696 fn bench_extend_0000_1000(b: &mut Bencher) {
2697 do_bench_extend(b, 0, 1000)
2701 fn bench_extend_0010_0010(b: &mut Bencher) {
2702 do_bench_extend(b, 10, 10)
2706 fn bench_extend_0100_0100(b: &mut Bencher) {
2707 do_bench_extend(b, 100, 100)
2711 fn bench_extend_1000_1000(b: &mut Bencher) {
2712 do_bench_extend(b, 1000, 1000)
2715 fn do_bench_push_all(b: &mut Bencher, dst_len: usize, src_len: usize) {
2716 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2717 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2719 b.bytes = src_len as u64;
2722 let mut dst = dst.clone();
2724 assert_eq!(dst.len(), dst_len + src_len);
2725 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2730 fn bench_push_all_0000_0000(b: &mut Bencher) {
2731 do_bench_push_all(b, 0, 0)
2735 fn bench_push_all_0000_0010(b: &mut Bencher) {
2736 do_bench_push_all(b, 0, 10)
2740 fn bench_push_all_0000_0100(b: &mut Bencher) {
2741 do_bench_push_all(b, 0, 100)
2745 fn bench_push_all_0000_1000(b: &mut Bencher) {
2746 do_bench_push_all(b, 0, 1000)
2750 fn bench_push_all_0010_0010(b: &mut Bencher) {
2751 do_bench_push_all(b, 10, 10)
2755 fn bench_push_all_0100_0100(b: &mut Bencher) {
2756 do_bench_push_all(b, 100, 100)
2760 fn bench_push_all_1000_1000(b: &mut Bencher) {
2761 do_bench_push_all(b, 1000, 1000)
2764 fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) {
2765 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2766 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2768 b.bytes = src_len as u64;
2771 let mut dst = dst.clone();
2772 dst.extend(src.clone().into_iter());
2773 assert_eq!(dst.len(), dst_len + src_len);
2774 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2779 fn bench_push_all_move_0000_0000(b: &mut Bencher) {
2780 do_bench_push_all_move(b, 0, 0)
2784 fn bench_push_all_move_0000_0010(b: &mut Bencher) {
2785 do_bench_push_all_move(b, 0, 10)
2789 fn bench_push_all_move_0000_0100(b: &mut Bencher) {
2790 do_bench_push_all_move(b, 0, 100)
2794 fn bench_push_all_move_0000_1000(b: &mut Bencher) {
2795 do_bench_push_all_move(b, 0, 1000)
2799 fn bench_push_all_move_0010_0010(b: &mut Bencher) {
2800 do_bench_push_all_move(b, 10, 10)
2804 fn bench_push_all_move_0100_0100(b: &mut Bencher) {
2805 do_bench_push_all_move(b, 100, 100)
2809 fn bench_push_all_move_1000_1000(b: &mut Bencher) {
2810 do_bench_push_all_move(b, 1000, 1000)
2813 fn do_bench_clone(b: &mut Bencher, src_len: usize) {
2814 let src: Vec<usize> = FromIterator::from_iter(0..src_len);
2816 b.bytes = src_len as u64;
2819 let dst = src.clone();
2820 assert_eq!(dst.len(), src_len);
2821 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2826 fn bench_clone_0000(b: &mut Bencher) {
2827 do_bench_clone(b, 0)
2831 fn bench_clone_0010(b: &mut Bencher) {
2832 do_bench_clone(b, 10)
2836 fn bench_clone_0100(b: &mut Bencher) {
2837 do_bench_clone(b, 100)
2841 fn bench_clone_1000(b: &mut Bencher) {
2842 do_bench_clone(b, 1000)
2845 fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) {
2846 let dst: Vec<_> = FromIterator::from_iter(0..src_len);
2847 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2849 b.bytes = (times * src_len) as u64;
2852 let mut dst = dst.clone();
2855 dst.clone_from(&src);
2857 assert_eq!(dst.len(), src_len);
2858 assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
2864 fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
2865 do_bench_clone_from(b, 1, 0, 0)
2869 fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
2870 do_bench_clone_from(b, 1, 0, 10)
2874 fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
2875 do_bench_clone_from(b, 1, 0, 100)
2879 fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
2880 do_bench_clone_from(b, 1, 0, 1000)
2884 fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
2885 do_bench_clone_from(b, 1, 10, 10)
2889 fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
2890 do_bench_clone_from(b, 1, 100, 100)
2894 fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
2895 do_bench_clone_from(b, 1, 1000, 1000)
2899 fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
2900 do_bench_clone_from(b, 1, 10, 100)
2904 fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
2905 do_bench_clone_from(b, 1, 100, 1000)
2909 fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
2910 do_bench_clone_from(b, 1, 10, 0)
2914 fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
2915 do_bench_clone_from(b, 1, 100, 10)
2919 fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
2920 do_bench_clone_from(b, 1, 1000, 100)
2924 fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
2925 do_bench_clone_from(b, 10, 0, 0)
2929 fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
2930 do_bench_clone_from(b, 10, 0, 10)
2934 fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
2935 do_bench_clone_from(b, 10, 0, 100)
2939 fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
2940 do_bench_clone_from(b, 10, 0, 1000)
2944 fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
2945 do_bench_clone_from(b, 10, 10, 10)
2949 fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
2950 do_bench_clone_from(b, 10, 100, 100)
2954 fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
2955 do_bench_clone_from(b, 10, 1000, 1000)
2959 fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
2960 do_bench_clone_from(b, 10, 10, 100)
2964 fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
2965 do_bench_clone_from(b, 10, 100, 1000)
2969 fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
2970 do_bench_clone_from(b, 10, 10, 0)
2974 fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
2975 do_bench_clone_from(b, 10, 100, 10)
2979 fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
2980 do_bench_clone_from(b, 10, 1000, 100)