1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type with heap-allocated contents, written `Vec<T>` but pronounced 'vector.'
13 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
17 //! Explicitly creating a `Vec<T>` with `new()`:
20 //! let xs: Vec<i32> = Vec::new();
23 //! Using the `vec!` macro:
26 //! let ys: Vec<i32> = vec![];
28 //! let zs = vec![1i32, 2, 3, 4, 5];
34 //! let mut xs = vec![1i32, 2];
42 //! let mut xs = vec![1i32, 2];
44 //! let two = xs.pop();
47 #![stable(feature = "rust1", since = "1.0.0")]
51 use alloc::boxed::Box;
52 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
54 use core::cmp::{Ordering};
55 use core::default::Default;
57 use core::hash::{self, Hash};
58 use core::intrinsics::assume;
59 use core::iter::{repeat, FromIterator, IntoIterator};
60 use core::marker::PhantomData;
62 use core::num::{Int, UnsignedInt};
63 use core::ops::{Index, IndexMut, Deref, Add};
66 use core::ptr::Unique;
67 use core::raw::Slice as RawSlice;
71 use borrow::{Cow, IntoCow};
73 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
78 /// let mut vec = Vec::new();
82 /// assert_eq!(vec.len(), 2);
83 /// assert_eq!(vec[0], 1);
85 /// assert_eq!(vec.pop(), Some(2));
86 /// assert_eq!(vec.len(), 1);
89 /// assert_eq!(vec[0], 7);
91 /// vec.push_all(&[1, 2, 3]);
93 /// for x in vec.iter() {
94 /// println!("{}", x);
96 /// assert_eq!(vec, [7, 1, 2, 3]);
99 /// The `vec!` macro is provided to make initialization more convenient:
102 /// let mut vec = vec![1, 2, 3];
104 /// assert_eq!(vec, [1, 2, 3, 4]);
107 /// Use a `Vec<T>` as an efficient stack:
110 /// let mut stack = Vec::new();
117 /// let top = match stack.pop() {
118 /// None => break, // empty
121 /// // Prints 3, 2, 1
122 /// println!("{}", top);
126 /// # Capacity and reallocation
128 /// The capacity of a vector is the amount of space allocated for any future elements that will be
129 /// added onto the vector. This is not to be confused with the *length* of a vector, which
130 /// specifies the number of actual elements within the vector. If a vector's length exceeds its
131 /// capacity, its capacity will automatically be increased, but its elements will have to be
134 /// For example, a vector with capacity 10 and length 0 would be an empty vector with space for 10
135 /// more elements. Pushing 10 or fewer elements onto the vector will not change its capacity or
136 /// cause reallocation to occur. However, if the vector's length is increased to 11, it will have
137 /// to reallocate, which can be slow. For this reason, it is recommended to use
138 /// `Vec::with_capacity` whenever possible to specify how big the vector is expected to get.
139 #[unsafe_no_drop_flag]
140 #[stable(feature = "rust1", since = "1.0.0")]
147 unsafe impl<T: Send> Send for Vec<T> { }
148 unsafe impl<T: Sync> Sync for Vec<T> { }
150 ////////////////////////////////////////////////////////////////////////////////
152 ////////////////////////////////////////////////////////////////////////////////
155 /// Constructs a new, empty `Vec<T>`.
157 /// The vector will not allocate until elements are pushed onto it.
162 /// let mut vec: Vec<i32> = Vec::new();
165 #[stable(feature = "rust1", since = "1.0.0")]
166 pub fn new() -> Vec<T> {
167 // We want ptr to never be NULL so instead we set it to some arbitrary
168 // non-null value which is fine since we never call deallocate on the ptr
169 // if cap is 0. The reason for this is because the pointer of a slice
170 // being NULL would break the null pointer optimization for enums.
171 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
174 /// Constructs a new, empty `Vec<T>` with the specified capacity.
176 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
177 /// `capacity` is 0, the vector will not allocate.
179 /// It is important to note that this function does not specify the *length* of the returned
180 /// vector, but only the *capacity*. (For an explanation of the difference between length and
181 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
186 /// let mut vec: Vec<_> = Vec::with_capacity(10);
188 /// // The vector contains no items, even though it has capacity for more
189 /// assert_eq!(vec.len(), 0);
191 /// // These are all done without reallocating...
196 /// // ...but this may make the vector reallocate
200 #[stable(feature = "rust1", since = "1.0.0")]
201 pub fn with_capacity(capacity: usize) -> Vec<T> {
202 if mem::size_of::<T>() == 0 {
203 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
204 } else if capacity == 0 {
207 let size = capacity.checked_mul(mem::size_of::<T>())
208 .expect("capacity overflow");
209 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
210 if ptr.is_null() { ::alloc::oom() }
211 unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
215 /// Creates a `Vec<T>` directly from the raw components of another vector.
217 /// This is highly unsafe, due to the number of invariants that aren't checked.
226 /// let mut v = vec![1, 2, 3];
228 /// // Pull out the various important pieces of information about `v`
229 /// let p = v.as_mut_ptr();
230 /// let len = v.len();
231 /// let cap = v.capacity();
234 /// // Cast `v` into the void: no destructor run, so we are in
235 /// // complete control of the allocation to which `p` points.
238 /// // Overwrite memory with 4, 5, 6
239 /// for i in 0..len as isize {
240 /// ptr::write(p.offset(i), 4 + i);
243 /// // Put everything back together into a Vec
244 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
245 /// assert_eq!(rebuilt, [4, 5, 6]);
249 #[stable(feature = "rust1", since = "1.0.0")]
250 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
251 capacity: usize) -> Vec<T> {
253 ptr: Unique::new(ptr),
259 /// Creates a vector by copying the elements from a raw pointer.
261 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
262 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
263 /// without cloning, as if `ptr::read()` were called on them.
265 #[unstable(feature = "collections",
266 reason = "may be better expressed via composition")]
267 pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
268 let mut dst = Vec::with_capacity(elts);
270 ptr::copy_nonoverlapping(dst.as_mut_ptr(), ptr, elts);
274 /// Returns the number of elements the vector can hold without
280 /// let vec: Vec<i32> = Vec::with_capacity(10);
281 /// assert_eq!(vec.capacity(), 10);
284 #[stable(feature = "rust1", since = "1.0.0")]
285 pub fn capacity(&self) -> usize {
289 /// Reserves capacity for at least `additional` more elements to be inserted in the given
290 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
294 /// Panics if the new capacity overflows `usize`.
299 /// let mut vec = vec![1];
301 /// assert!(vec.capacity() >= 11);
303 #[stable(feature = "rust1", since = "1.0.0")]
304 pub fn reserve(&mut self, additional: usize) {
305 if self.cap - self.len < additional {
306 let err_msg = "Vec::reserve: `usize` overflow";
307 let new_cap = self.len.checked_add(additional).expect(err_msg)
308 .checked_next_power_of_two().expect(err_msg);
309 self.grow_capacity(new_cap);
313 /// Reserves the minimum capacity for exactly `additional` more elements to
314 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
317 /// Note that the allocator may give the collection more space than it
318 /// requests. Therefore capacity can not be relied upon to be precisely
319 /// minimal. Prefer `reserve` if future insertions are expected.
323 /// Panics if the new capacity overflows `usize`.
328 /// let mut vec = vec![1];
329 /// vec.reserve_exact(10);
330 /// assert!(vec.capacity() >= 11);
332 #[stable(feature = "rust1", since = "1.0.0")]
333 pub fn reserve_exact(&mut self, additional: usize) {
334 if self.cap - self.len < additional {
335 match self.len.checked_add(additional) {
336 None => panic!("Vec::reserve: `usize` overflow"),
337 Some(new_cap) => self.grow_capacity(new_cap)
342 /// Shrinks the capacity of the vector as much as possible.
344 /// It will drop down as close as possible to the length but the allocator
345 /// may still inform the vector that there is space for a few more elements.
350 /// let mut vec = Vec::with_capacity(10);
351 /// vec.push_all(&[1, 2, 3]);
352 /// assert_eq!(vec.capacity(), 10);
353 /// vec.shrink_to_fit();
354 /// assert!(vec.capacity() >= 3);
356 #[stable(feature = "rust1", since = "1.0.0")]
357 pub fn shrink_to_fit(&mut self) {
358 if mem::size_of::<T>() == 0 { return }
363 dealloc(*self.ptr, self.cap)
367 } else if self.cap != self.len {
369 // Overflow check is unnecessary as the vector is already at
371 let ptr = reallocate(*self.ptr as *mut u8,
372 self.cap * mem::size_of::<T>(),
373 self.len * mem::size_of::<T>(),
374 mem::min_align_of::<T>()) as *mut T;
375 if ptr.is_null() { ::alloc::oom() }
376 self.ptr = Unique::new(ptr);
382 /// Convert the vector into Box<[T]>.
384 /// Note that this will drop any excess capacity. Calling this and
385 /// converting back to a vector with `into_vec()` is equivalent to calling
386 /// `shrink_to_fit()`.
387 #[unstable(feature = "collections")]
388 pub fn into_boxed_slice(mut self) -> Box<[T]> {
389 self.shrink_to_fit();
391 let xs: Box<[T]> = Box::from_raw(&mut *self);
397 /// Shorten a vector, dropping excess elements.
399 /// If `len` is greater than the vector's current length, this has no
405 /// let mut vec = vec![1, 2, 3, 4];
407 /// assert_eq!(vec, [1, 2]);
409 #[stable(feature = "rust1", since = "1.0.0")]
410 pub fn truncate(&mut self, len: usize) {
412 // drop any extra elements
413 while len < self.len {
414 // decrement len before the read(), so a panic on Drop doesn't
415 // re-drop the just-failed value.
417 ptr::read(self.get_unchecked(self.len));
422 /// Returns a mutable slice of the elements of `self`.
427 /// fn foo(slice: &mut [i32]) {}
429 /// let mut vec = vec![1, 2];
430 /// foo(vec.as_mut_slice());
433 #[stable(feature = "rust1", since = "1.0.0")]
434 pub fn as_mut_slice(&mut self) -> &mut [T] {
437 assume(!ptr.is_null());
438 mem::transmute(RawSlice {
445 /// Creates a consuming iterator, that is, one that moves each value out of
446 /// the vector (from start to end). The vector cannot be used after calling
452 /// let v = vec!["a".to_string(), "b".to_string()];
453 /// for s in v.into_iter() {
454 /// // s has type String, not &String
455 /// println!("{}", s);
459 #[stable(feature = "rust1", since = "1.0.0")]
460 pub fn into_iter(self) -> IntoIter<T> {
463 assume(!ptr.is_null());
465 let begin = ptr as *const T;
466 let end = if mem::size_of::<T>() == 0 {
467 (ptr as usize + self.len()) as *const T
469 ptr.offset(self.len() as isize) as *const T
472 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
476 /// Sets the length of a vector.
478 /// This will explicitly set the size of the vector, without actually
479 /// modifying its buffers, so it is up to the caller to ensure that the
480 /// vector is actually the specified size.
485 /// let mut v = vec![1, 2, 3, 4];
491 #[stable(feature = "rust1", since = "1.0.0")]
492 pub unsafe fn set_len(&mut self, len: usize) {
496 /// Removes an element from anywhere in the vector and return it, replacing
497 /// it with the last element.
499 /// This does not preserve ordering, but is O(1).
503 /// Panics if `index` is out of bounds.
508 /// let mut v = vec!["foo", "bar", "baz", "qux"];
510 /// assert_eq!(v.swap_remove(1), "bar");
511 /// assert_eq!(v, ["foo", "qux", "baz"]);
513 /// assert_eq!(v.swap_remove(0), "foo");
514 /// assert_eq!(v, ["baz", "qux"]);
517 #[stable(feature = "rust1", since = "1.0.0")]
518 pub fn swap_remove(&mut self, index: usize) -> T {
519 let length = self.len();
520 self.swap(index, length - 1);
524 /// Inserts an element at position `index` within the vector, shifting all
525 /// elements after position `i` one position to the right.
529 /// Panics if `index` is not between `0` and the vector's length (both
530 /// bounds inclusive).
535 /// let mut vec = vec![1, 2, 3];
536 /// vec.insert(1, 4);
537 /// assert_eq!(vec, [1, 4, 2, 3]);
538 /// vec.insert(4, 5);
539 /// assert_eq!(vec, [1, 4, 2, 3, 5]);
541 #[stable(feature = "rust1", since = "1.0.0")]
542 pub fn insert(&mut self, index: usize, element: T) {
543 let len = self.len();
544 assert!(index <= len);
545 // space for the new element
548 unsafe { // infallible
549 // The spot to put the new value
551 let p = self.as_mut_ptr().offset(index as isize);
552 // Shift everything over to make space. (Duplicating the
553 // `index`th element into two consecutive places.)
554 ptr::copy(p.offset(1), &*p, len - index);
555 // Write it in, overwriting the first copy of the `index`th
557 ptr::write(&mut *p, element);
559 self.set_len(len + 1);
563 /// Removes and returns the element at position `index` within the vector,
564 /// shifting all elements after position `index` one position to the left.
568 /// Panics if `i` is out of bounds.
573 /// let mut v = vec![1, 2, 3];
574 /// assert_eq!(v.remove(1), 2);
575 /// assert_eq!(v, [1, 3]);
577 #[stable(feature = "rust1", since = "1.0.0")]
578 pub fn remove(&mut self, index: usize) -> T {
579 let len = self.len();
580 assert!(index < len);
581 unsafe { // infallible
584 // the place we are taking from.
585 let ptr = self.as_mut_ptr().offset(index as isize);
586 // copy it out, unsafely having a copy of the value on
587 // the stack and in the vector at the same time.
588 ret = ptr::read(ptr);
590 // Shift everything down to fill in that spot.
591 ptr::copy(ptr, &*ptr.offset(1), len - index - 1);
593 self.set_len(len - 1);
598 /// Retains only the elements specified by the predicate.
600 /// In other words, remove all elements `e` such that `f(&e)` returns false.
601 /// This method operates in place and preserves the order of the retained
607 /// let mut vec = vec![1, 2, 3, 4];
608 /// vec.retain(|&x| x%2 == 0);
609 /// assert_eq!(vec, [2, 4]);
611 #[stable(feature = "rust1", since = "1.0.0")]
612 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
613 let len = self.len();
627 self.truncate(len - del);
631 /// Appends an element to the back of a collection.
635 /// Panics if the number of elements in the vector overflows a `usize`.
640 /// let mut vec = vec!(1, 2);
642 /// assert_eq!(vec, [1, 2, 3]);
645 #[stable(feature = "rust1", since = "1.0.0")]
646 pub fn push(&mut self, value: T) {
647 if mem::size_of::<T>() == 0 {
648 // zero-size types consume no memory, so we can't rely on the
649 // address space running out
650 self.len = self.len.checked_add(1).expect("length overflow");
651 unsafe { mem::forget(value); }
654 if self.len == self.cap {
655 let old_size = self.cap * mem::size_of::<T>();
656 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
657 if old_size > size { panic!("capacity overflow") }
659 let ptr = alloc_or_realloc(*self.ptr, old_size, size);
660 if ptr.is_null() { ::alloc::oom() }
661 self.ptr = Unique::new(ptr);
663 self.cap = max(self.cap, 2) * 2;
667 let end = (*self.ptr).offset(self.len as isize);
668 ptr::write(&mut *end, value);
673 /// Removes the last element from a vector and returns it, or `None` if it is empty.
678 /// let mut vec = vec![1, 2, 3];
679 /// assert_eq!(vec.pop(), Some(3));
680 /// assert_eq!(vec, [1, 2]);
683 #[stable(feature = "rust1", since = "1.0.0")]
684 pub fn pop(&mut self) -> Option<T> {
690 Some(ptr::read(self.get_unchecked(self.len())))
695 /// Moves all the elements of `other` into `Self`, leaving `other` empty.
699 /// Panics if the number of elements in the vector overflows a `usize`.
704 /// let mut vec = vec![1, 2, 3];
705 /// let mut vec2 = vec![4, 5, 6];
706 /// vec.append(&mut vec2);
707 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
708 /// assert_eq!(vec2, []);
711 #[unstable(feature = "collections",
712 reason = "new API, waiting for dust to settle")]
713 pub fn append(&mut self, other: &mut Self) {
714 if mem::size_of::<T>() == 0 {
715 // zero-size types consume no memory, so we can't rely on the
716 // address space running out
717 self.len = self.len.checked_add(other.len()).expect("length overflow");
718 unsafe { other.set_len(0) }
721 self.reserve(other.len());
722 let len = self.len();
724 ptr::copy_nonoverlapping(
725 self.get_unchecked_mut(len),
730 self.len += other.len();
731 unsafe { other.set_len(0); }
734 /// Creates a draining iterator that clears the `Vec` and iterates over
735 /// the removed items from start to end.
740 /// let mut v = vec!["a".to_string(), "b".to_string()];
741 /// for s in v.drain() {
742 /// // s has type String, not &String
743 /// println!("{}", s);
745 /// assert!(v.is_empty());
748 #[unstable(feature = "collections",
749 reason = "matches collection reform specification, waiting for dust to settle")]
750 pub fn drain(&mut self) -> Drain<T> {
752 let begin = *self.ptr as *const T;
753 let end = if mem::size_of::<T>() == 0 {
754 (*self.ptr as usize + self.len()) as *const T
756 (*self.ptr).offset(self.len() as isize) as *const T
767 /// Clears the vector, removing all values.
772 /// let mut v = vec![1, 2, 3];
776 /// assert!(v.is_empty());
779 #[stable(feature = "rust1", since = "1.0.0")]
780 pub fn clear(&mut self) {
784 /// Returns the number of elements in the vector.
789 /// let a = vec![1, 2, 3];
790 /// assert_eq!(a.len(), 3);
793 #[stable(feature = "rust1", since = "1.0.0")]
794 pub fn len(&self) -> usize { self.len }
796 /// Returns `true` if the vector contains no elements.
801 /// let mut v = Vec::new();
802 /// assert!(v.is_empty());
805 /// assert!(!v.is_empty());
807 #[stable(feature = "rust1", since = "1.0.0")]
808 pub fn is_empty(&self) -> bool { self.len() == 0 }
810 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
811 /// size and in case they are not zero-sized the same minimal alignment.
815 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
816 /// have differing minimal alignments.
821 /// let v = vec![0, 1, 2];
822 /// let w = v.map_in_place(|i| i + 3);
823 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
825 /// #[derive(PartialEq, Debug)]
826 /// struct Newtype(u8);
827 /// let bytes = vec![0x11, 0x22];
828 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
829 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
831 #[unstable(feature = "collections",
832 reason = "API may change to provide stronger guarantees")]
833 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
834 // FIXME: Assert statically that the types `T` and `U` have the same
836 assert!(mem::size_of::<T>() == mem::size_of::<U>());
840 if mem::size_of::<T>() != 0 {
841 // FIXME: Assert statically that the types `T` and `U` have the
842 // same minimal alignment in case they are not zero-sized.
844 // These asserts are necessary because the `min_align_of` of the
845 // types are passed to the allocator by `Vec`.
846 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
848 // This `as isize` cast is safe, because the size of the elements of the
849 // vector is not 0, and:
851 // 1) If the size of the elements in the vector is 1, the `isize` may
852 // overflow, but it has the correct bit pattern so that the
853 // `.offset()` function will work.
856 // Address space 0x0-0xF.
857 // `u8` array at: 0x1.
858 // Size of `u8` array: 0x8.
859 // Calculated `offset`: -0x8.
860 // After `array.offset(offset)`: 0x9.
861 // (0x1 + 0x8 = 0x1 - 0x8)
863 // 2) If the size of the elements in the vector is >1, the `usize` ->
864 // `isize` conversion can't overflow.
865 let offset = vec.len() as isize;
866 let start = vec.as_mut_ptr();
868 let mut pv = PartialVecNonZeroSized {
872 // This points inside the vector, as the vector has length
874 end_t: unsafe { start.offset(offset) },
875 start_u: start as *mut U,
876 end_u: start as *mut U,
878 _marker: PhantomData,
889 while pv.end_u as *mut T != pv.end_t {
893 // +-+-+-+-+-+-+-+-+-+
894 // |U|...|U|T|T|...|T|
895 // +-+-+-+-+-+-+-+-+-+
899 let t = ptr::read(pv.start_t);
902 // +-+-+-+-+-+-+-+-+-+
903 // |U|...|U|X|T|...|T|
904 // +-+-+-+-+-+-+-+-+-+
907 // We must not panic here, one cell is marked as `T`
908 // although it is not `T`.
910 pv.start_t = pv.start_t.offset(1);
913 // +-+-+-+-+-+-+-+-+-+
914 // |U|...|U|X|T|...|T|
915 // +-+-+-+-+-+-+-+-+-+
918 // We may panic again.
920 // The function given by the user might panic.
923 ptr::write(pv.end_u, u);
926 // +-+-+-+-+-+-+-+-+-+
927 // |U|...|U|U|T|...|T|
928 // +-+-+-+-+-+-+-+-+-+
931 // We should not panic here, because that would leak the `U`
932 // pointed to by `end_u`.
934 pv.end_u = pv.end_u.offset(1);
937 // +-+-+-+-+-+-+-+-+-+
938 // |U|...|U|U|T|...|T|
939 // +-+-+-+-+-+-+-+-+-+
942 // We may panic again.
954 // Extract `vec` and prevent the destructor of
955 // `PartialVecNonZeroSized` from running. Note that none of the
956 // function calls can panic, thus no resources can be leaked (as the
957 // `vec` member of `PartialVec` is the only one which holds
958 // allocations -- and it is returned from this function. None of
961 let vec_len = pv.vec.len();
962 let vec_cap = pv.vec.capacity();
963 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
965 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
968 // Put the `Vec` into the `PartialVecZeroSized` structure and
969 // prevent the destructor of the `Vec` from running. Since the
970 // `Vec` contained zero-sized objects, it did not allocate, so we
971 // are not leaking memory here.
972 let mut pv = PartialVecZeroSized::<T,U> {
977 unsafe { mem::forget(vec); }
979 while pv.num_t != 0 {
981 // Create a `T` out of thin air and decrement `num_t`. This
982 // must not panic between these steps, as otherwise a
983 // destructor of `T` which doesn't exist runs.
984 let t = mem::uninitialized();
987 // The function given by the user might panic.
990 // Forget the `U` and increment `num_u`. This increment
991 // cannot overflow the `usize` as we only do this for a
992 // number of times that fits into a `usize` (and start with
993 // `0`). Again, we should not panic between these steps.
998 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
999 // destructor of the latter will not run. None of this can panic.
1000 let mut result = Vec::new();
1002 result.set_len(pv.num_u);
1009 /// Splits the collection into two at the given index.
1011 /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`,
1012 /// and the returned `Self` contains elements `[at, len)`.
1014 /// Note that the capacity of `self` does not change.
1018 /// Panics if `at > len`.
1023 /// let mut vec = vec![1,2,3];
1024 /// let vec2 = vec.split_off(1);
1025 /// assert_eq!(vec, [1]);
1026 /// assert_eq!(vec2, [2, 3]);
1029 #[unstable(feature = "collections",
1030 reason = "new API, waiting for dust to settle")]
1031 pub fn split_off(&mut self, at: usize) -> Self {
1032 assert!(at <= self.len(), "`at` out of bounds");
1034 let other_len = self.len - at;
1035 let mut other = Vec::with_capacity(other_len);
1037 // Unsafely `set_len` and copy items to `other`.
1040 other.set_len(other_len);
1042 ptr::copy_nonoverlapping(
1044 self.as_ptr().offset(at as isize),
1052 impl<T: Clone> Vec<T> {
1053 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
1055 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
1056 /// is larger than the current value of `len()` or not.
1061 /// let mut vec = vec!["hello"];
1062 /// vec.resize(3, "world");
1063 /// assert_eq!(vec, ["hello", "world", "world"]);
1065 /// let mut vec = vec![1, 2, 3, 4];
1066 /// vec.resize(2, 0);
1067 /// assert_eq!(vec, [1, 2]);
1069 #[unstable(feature = "collections",
1070 reason = "matches collection reform specification; waiting for dust to settle")]
1071 pub fn resize(&mut self, new_len: usize, value: T) {
1072 let len = self.len();
1075 self.extend(repeat(value).take(new_len - len));
1077 self.truncate(new_len);
1081 /// Appends all elements in a slice to the `Vec`.
1083 /// Iterates over the slice `other`, clones each element, and then appends
1084 /// it to this `Vec`. The `other` vector is traversed in-order.
1089 /// let mut vec = vec![1];
1090 /// vec.push_all(&[2, 3, 4]);
1091 /// assert_eq!(vec, [1, 2, 3, 4]);
1094 #[unstable(feature = "collections",
1095 reason = "likely to be replaced by a more optimized extend")]
1096 pub fn push_all(&mut self, other: &[T]) {
1097 self.reserve(other.len());
1099 for i in 0..other.len() {
1100 let len = self.len();
1102 // Unsafe code so this can be optimised to a memcpy (or something similarly
1103 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1104 // during the loop can prevent this optimisation.
1107 self.get_unchecked_mut(len),
1108 other.get_unchecked(i).clone());
1109 self.set_len(len + 1);
1115 impl<T: PartialEq> Vec<T> {
1116 /// Removes consecutive repeated elements in the vector.
1118 /// If the vector is sorted, this removes all duplicates.
1123 /// let mut vec = vec![1, 2, 2, 3, 2];
1127 /// assert_eq!(vec, [1, 2, 3, 2]);
1129 #[stable(feature = "rust1", since = "1.0.0")]
1130 pub fn dedup(&mut self) {
1132 // Although we have a mutable reference to `self`, we cannot make
1133 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1134 // must ensure that the vector is in a valid state at all time.
1136 // The way that we handle this is by using swaps; we iterate
1137 // over all the elements, swapping as we go so that at the end
1138 // the elements we wish to keep are in the front, and those we
1139 // wish to reject are at the back. We can then truncate the
1140 // vector. This operation is still O(n).
1142 // Example: We start in this state, where `r` represents "next
1143 // read" and `w` represents "next_write`.
1146 // +---+---+---+---+---+---+
1147 // | 0 | 1 | 1 | 2 | 3 | 3 |
1148 // +---+---+---+---+---+---+
1151 // Comparing self[r] against self[w-1], this is not a duplicate, so
1152 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1153 // r and w, leaving us with:
1156 // +---+---+---+---+---+---+
1157 // | 0 | 1 | 1 | 2 | 3 | 3 |
1158 // +---+---+---+---+---+---+
1161 // Comparing self[r] against self[w-1], this value is a duplicate,
1162 // so we increment `r` but leave everything else unchanged:
1165 // +---+---+---+---+---+---+
1166 // | 0 | 1 | 1 | 2 | 3 | 3 |
1167 // +---+---+---+---+---+---+
1170 // Comparing self[r] against self[w-1], this is not a duplicate,
1171 // so swap self[r] and self[w] and advance r and w:
1174 // +---+---+---+---+---+---+
1175 // | 0 | 1 | 2 | 1 | 3 | 3 |
1176 // +---+---+---+---+---+---+
1179 // Not a duplicate, repeat:
1182 // +---+---+---+---+---+---+
1183 // | 0 | 1 | 2 | 3 | 1 | 3 |
1184 // +---+---+---+---+---+---+
1187 // Duplicate, advance r. End of vec. Truncate to w.
1189 let ln = self.len();
1190 if ln < 1 { return; }
1192 // Avoid bounds checks by using unsafe pointers.
1193 let p = self.as_mut_ptr();
1198 let p_r = p.offset(r as isize);
1199 let p_wm1 = p.offset((w - 1) as isize);
1202 let p_w = p_wm1.offset(1);
1203 mem::swap(&mut *p_r, &mut *p_w);
1215 ////////////////////////////////////////////////////////////////////////////////
1216 // Internal methods and functions
1217 ////////////////////////////////////////////////////////////////////////////////
1220 /// Reserves capacity for exactly `capacity` elements in the given vector.
1222 /// If the capacity for `self` is already equal to or greater than the
1223 /// requested capacity, then no action is taken.
1224 fn grow_capacity(&mut self, capacity: usize) {
1225 if mem::size_of::<T>() == 0 { return }
1227 if capacity > self.cap {
1228 let size = capacity.checked_mul(mem::size_of::<T>())
1229 .expect("capacity overflow");
1231 let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
1232 if ptr.is_null() { ::alloc::oom() }
1233 self.ptr = Unique::new(ptr);
1235 self.cap = capacity;
1240 // FIXME: #13996: need a way to mark the return value as `noalias`
1242 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
1244 allocate(size, mem::min_align_of::<T>()) as *mut T
1246 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
1251 unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
1252 if mem::size_of::<T>() != 0 {
1253 deallocate(ptr as *mut u8,
1254 len * mem::size_of::<T>(),
1255 mem::min_align_of::<T>())
1260 #[stable(feature = "rust1", since = "1.0.0")]
1261 pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
1263 let mut v = Vec::with_capacity(n);
1264 let mut ptr = v.as_mut_ptr();
1266 // Write all elements except the last one
1268 ptr::write(ptr, Clone::clone(&elem));
1269 ptr = ptr.offset(1);
1270 v.set_len(i); // Increment the length in every step in case Clone::clone() panics
1274 // We can write the last element directly without cloning needlessly
1275 ptr::write(ptr, elem);
1283 ////////////////////////////////////////////////////////////////////////////////
1284 // Common trait implementations for Vec
1285 ////////////////////////////////////////////////////////////////////////////////
1287 #[unstable(feature = "collections")]
1288 impl<T:Clone> Clone for Vec<T> {
1289 fn clone(&self) -> Vec<T> { ::slice::SliceExt::to_vec(&**self) }
1291 fn clone_from(&mut self, other: &Vec<T>) {
1292 // drop anything in self that will not be overwritten
1293 if self.len() > other.len() {
1294 self.truncate(other.len())
1297 // reuse the contained values' allocations/resources.
1298 for (place, thing) in self.iter_mut().zip(other.iter()) {
1299 place.clone_from(thing)
1302 // self.len <= other.len due to the truncate above, so the
1303 // slice here is always in-bounds.
1304 let slice = &other[self.len()..];
1305 self.push_all(slice);
1309 #[stable(feature = "rust1", since = "1.0.0")]
1310 impl<T: Hash> Hash for Vec<T> {
1312 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1313 Hash::hash(&**self, state)
1317 #[stable(feature = "rust1", since = "1.0.0")]
1318 impl<T> Index<usize> for Vec<T> {
1322 fn index(&self, index: &usize) -> &T {
1323 // NB built-in indexing via `&[T]`
1328 #[stable(feature = "rust1", since = "1.0.0")]
1329 impl<T> IndexMut<usize> for Vec<T> {
1331 fn index_mut(&mut self, index: &usize) -> &mut T {
1332 // NB built-in indexing via `&mut [T]`
1333 &mut (**self)[*index]
1338 #[stable(feature = "rust1", since = "1.0.0")]
1339 impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
1342 fn index(&self, index: &ops::Range<usize>) -> &[T] {
1343 Index::index(&**self, index)
1346 #[stable(feature = "rust1", since = "1.0.0")]
1347 impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
1350 fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
1351 Index::index(&**self, index)
1354 #[stable(feature = "rust1", since = "1.0.0")]
1355 impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
1358 fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
1359 Index::index(&**self, index)
1362 #[stable(feature = "rust1", since = "1.0.0")]
1363 impl<T> ops::Index<ops::RangeFull> for Vec<T> {
1366 fn index(&self, _index: &ops::RangeFull) -> &[T] {
1371 #[stable(feature = "rust1", since = "1.0.0")]
1372 impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
1374 fn index_mut(&mut self, index: &ops::Range<usize>) -> &mut [T] {
1375 IndexMut::index_mut(&mut **self, index)
1378 #[stable(feature = "rust1", since = "1.0.0")]
1379 impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
1381 fn index_mut(&mut self, index: &ops::RangeTo<usize>) -> &mut [T] {
1382 IndexMut::index_mut(&mut **self, index)
1385 #[stable(feature = "rust1", since = "1.0.0")]
1386 impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
1388 fn index_mut(&mut self, index: &ops::RangeFrom<usize>) -> &mut [T] {
1389 IndexMut::index_mut(&mut **self, index)
1392 #[stable(feature = "rust1", since = "1.0.0")]
1393 impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
1395 fn index_mut(&mut self, _index: &ops::RangeFull) -> &mut [T] {
1400 #[stable(feature = "rust1", since = "1.0.0")]
1401 impl<T> ops::Deref for Vec<T> {
1404 fn deref(&self) -> &[T] { self.as_slice() }
1407 #[stable(feature = "rust1", since = "1.0.0")]
1408 impl<T> ops::DerefMut for Vec<T> {
1409 fn deref_mut(&mut self) -> &mut [T] { self.as_mut_slice() }
1412 #[stable(feature = "rust1", since = "1.0.0")]
1413 impl<T> FromIterator<T> for Vec<T> {
1415 fn from_iter<I: IntoIterator<Item=T>>(iterable: I) -> Vec<T> {
1416 let mut iterator = iterable.into_iter();
1417 let (lower, _) = iterator.size_hint();
1418 let mut vector = Vec::with_capacity(lower);
1420 // This function should be the moral equivalent of:
1422 // for item in iterator {
1423 // vector.push(item);
1426 // This equivalent crucially runs the iterator precisely once. Below we
1427 // actually in theory run the iterator twice (one without bounds checks
1428 // and one with). To achieve the "moral equivalent", we use the `if`
1429 // statement below to break out early.
1431 // If the first loop has terminated, then we have one of two conditions.
1433 // 1. The underlying iterator returned `None`. In this case we are
1434 // guaranteed that less than `vector.capacity()` elements have been
1435 // returned, so we break out early.
1436 // 2. The underlying iterator yielded `vector.capacity()` elements and
1437 // has not yielded `None` yet. In this case we run the iterator to
1439 for element in iterator.by_ref().take(vector.capacity()) {
1440 let len = vector.len();
1442 ptr::write(vector.get_unchecked_mut(len), element);
1443 vector.set_len(len + 1);
1447 if vector.len() == vector.capacity() {
1448 for element in iterator {
1449 vector.push(element);
1456 #[stable(feature = "rust1", since = "1.0.0")]
1457 impl<T> IntoIterator for Vec<T> {
1459 type IntoIter = IntoIter<T>;
1461 fn into_iter(self) -> IntoIter<T> {
1466 #[stable(feature = "rust1", since = "1.0.0")]
1467 impl<'a, T> IntoIterator for &'a Vec<T> {
1469 type IntoIter = slice::Iter<'a, T>;
1471 fn into_iter(self) -> slice::Iter<'a, T> {
1476 #[stable(feature = "rust1", since = "1.0.0")]
1477 impl<'a, T> IntoIterator for &'a mut Vec<T> {
1478 type Item = &'a mut T;
1479 type IntoIter = slice::IterMut<'a, T>;
1481 fn into_iter(mut self) -> slice::IterMut<'a, T> {
1486 #[unstable(feature = "collections", reason = "waiting on Extend stability")]
1487 impl<T> Extend<T> for Vec<T> {
1489 fn extend<I: IntoIterator<Item=T>>(&mut self, iterable: I) {
1490 let iterator = iterable.into_iter();
1491 let (lower, _) = iterator.size_hint();
1492 self.reserve(lower);
1493 for element in iterator {
1499 __impl_slice_eq1! { Vec<A>, Vec<B> }
1500 __impl_slice_eq2! { Vec<A>, &'b [B] }
1501 __impl_slice_eq2! { Vec<A>, &'b mut [B] }
1502 __impl_slice_eq2! { CowVec<'a, A>, &'b [B], Clone }
1503 __impl_slice_eq2! { CowVec<'a, A>, &'b mut [B], Clone }
1504 __impl_slice_eq2! { CowVec<'a, A>, Vec<B>, Clone }
1506 macro_rules! array_impls {
1509 // NOTE: some less important impls are omitted to reduce code bloat
1510 __impl_slice_eq2! { Vec<A>, [B; $N] }
1511 __impl_slice_eq2! { Vec<A>, &'b [B; $N] }
1512 // __impl_slice_eq2! { Vec<A>, &'b mut [B; $N] }
1513 // __impl_slice_eq2! { CowVec<'a, A>, [B; $N], Clone }
1514 // __impl_slice_eq2! { CowVec<'a, A>, &'b [B; $N], Clone }
1515 // __impl_slice_eq2! { CowVec<'a, A>, &'b mut [B; $N], Clone }
1522 10 11 12 13 14 15 16 17 18 19
1523 20 21 22 23 24 25 26 27 28 29
1527 #[stable(feature = "rust1", since = "1.0.0")]
1528 impl<T: PartialOrd> PartialOrd for Vec<T> {
1530 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
1531 PartialOrd::partial_cmp(&**self, &**other)
1535 #[stable(feature = "rust1", since = "1.0.0")]
1536 impl<T: Eq> Eq for Vec<T> {}
1538 #[stable(feature = "rust1", since = "1.0.0")]
1539 impl<T: Ord> Ord for Vec<T> {
1541 fn cmp(&self, other: &Vec<T>) -> Ordering {
1542 Ord::cmp(&**self, &**other)
1546 impl<T> AsSlice<T> for Vec<T> {
1547 /// Returns a slice into `self`.
1552 /// fn foo(slice: &[i32]) {}
1554 /// let vec = vec![1, 2];
1555 /// foo(vec.as_slice());
1558 #[stable(feature = "rust1", since = "1.0.0")]
1559 fn as_slice(&self) -> &[T] {
1562 assume(p != 0 as *mut T);
1563 mem::transmute(RawSlice {
1571 #[unstable(feature = "collections",
1572 reason = "recent addition, needs more experience")]
1573 impl<'a, T: Clone> Add<&'a [T]> for Vec<T> {
1574 type Output = Vec<T>;
1577 fn add(mut self, rhs: &[T]) -> Vec<T> {
1583 #[unsafe_destructor]
1584 #[stable(feature = "rust1", since = "1.0.0")]
1585 impl<T> Drop for Vec<T> {
1586 fn drop(&mut self) {
1587 // This is (and should always remain) a no-op if the fields are
1588 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1594 dealloc(*self.ptr, self.cap)
1600 #[stable(feature = "rust1", since = "1.0.0")]
1601 impl<T> Default for Vec<T> {
1602 #[stable(feature = "rust1", since = "1.0.0")]
1603 fn default() -> Vec<T> {
1608 #[stable(feature = "rust1", since = "1.0.0")]
1609 impl<T: fmt::Debug> fmt::Debug for Vec<T> {
1610 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1611 fmt::Debug::fmt(&**self, f)
1615 ////////////////////////////////////////////////////////////////////////////////
1617 ////////////////////////////////////////////////////////////////////////////////
1619 /// A clone-on-write vector
1620 #[deprecated(since = "1.0.0", reason = "use Cow<'a, [T]> instead")]
1621 #[unstable(feature = "collections")]
1622 pub type CowVec<'a, T> = Cow<'a, [T]>;
1624 #[unstable(feature = "collections")]
1625 impl<'a, T> FromIterator<T> for Cow<'a, [T]> where T: Clone {
1626 fn from_iter<I: IntoIterator<Item=T>>(it: I) -> Cow<'a, [T]> {
1627 Cow::Owned(FromIterator::from_iter(it))
1631 impl<'a, T: 'a> IntoCow<'a, [T]> for Vec<T> where T: Clone {
1632 fn into_cow(self) -> Cow<'a, [T]> {
1637 impl<'a, T> IntoCow<'a, [T]> for &'a [T] where T: Clone {
1638 fn into_cow(self) -> Cow<'a, [T]> {
1643 ////////////////////////////////////////////////////////////////////////////////
1645 ////////////////////////////////////////////////////////////////////////////////
1647 /// An iterator that moves out of a vector.
1648 #[stable(feature = "rust1", since = "1.0.0")]
1649 pub struct IntoIter<T> {
1650 allocation: *mut T, // the block of memory allocated for the vector
1651 cap: usize, // the capacity of the vector
1656 unsafe impl<T: Send> Send for IntoIter<T> { }
1657 unsafe impl<T: Sync> Sync for IntoIter<T> { }
1659 impl<T> IntoIter<T> {
1661 /// Drops all items that have not yet been moved and returns the empty vector.
1662 #[unstable(feature = "collections")]
1663 pub fn into_inner(mut self) -> Vec<T> {
1665 for _x in self.by_ref() { }
1666 let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
1668 Vec::from_raw_parts(allocation, 0, cap)
1673 #[stable(feature = "rust1", since = "1.0.0")]
1674 impl<T> Iterator for IntoIter<T> {
1678 fn next(&mut self) -> Option<T> {
1680 if self.ptr == self.end {
1683 if mem::size_of::<T>() == 0 {
1684 // purposefully don't use 'ptr.offset' because for
1685 // vectors with 0-size elements this would return the
1687 self.ptr = mem::transmute(self.ptr as usize + 1);
1689 // Use a non-null pointer value
1690 Some(ptr::read(EMPTY as *mut T))
1693 self.ptr = self.ptr.offset(1);
1695 Some(ptr::read(old))
1702 fn size_hint(&self) -> (usize, Option<usize>) {
1703 let diff = (self.end as usize) - (self.ptr as usize);
1704 let size = mem::size_of::<T>();
1705 let exact = diff / (if size == 0 {1} else {size});
1706 (exact, Some(exact))
1710 #[stable(feature = "rust1", since = "1.0.0")]
1711 impl<T> DoubleEndedIterator for IntoIter<T> {
1713 fn next_back(&mut self) -> Option<T> {
1715 if self.end == self.ptr {
1718 if mem::size_of::<T>() == 0 {
1719 // See above for why 'ptr.offset' isn't used
1720 self.end = mem::transmute(self.end as usize - 1);
1722 // Use a non-null pointer value
1723 Some(ptr::read(EMPTY as *mut T))
1725 self.end = self.end.offset(-1);
1727 Some(ptr::read(mem::transmute(self.end)))
1734 #[stable(feature = "rust1", since = "1.0.0")]
1735 impl<T> ExactSizeIterator for IntoIter<T> {}
1737 #[unsafe_destructor]
1738 #[stable(feature = "rust1", since = "1.0.0")]
1739 impl<T> Drop for IntoIter<T> {
1740 fn drop(&mut self) {
1741 // destroy the remaining elements
1743 for _x in self.by_ref() {}
1745 dealloc(self.allocation, self.cap);
1751 /// An iterator that drains a vector.
1752 #[unsafe_no_drop_flag]
1753 #[unstable(feature = "collections",
1754 reason = "recently added as part of collections reform 2")]
1755 pub struct Drain<'a, T:'a> {
1758 marker: PhantomData<&'a T>,
1761 unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
1762 unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
1764 #[stable(feature = "rust1", since = "1.0.0")]
1765 impl<'a, T> Iterator for Drain<'a, T> {
1769 fn next(&mut self) -> Option<T> {
1771 if self.ptr == self.end {
1774 if mem::size_of::<T>() == 0 {
1775 // purposefully don't use 'ptr.offset' because for
1776 // vectors with 0-size elements this would return the
1778 self.ptr = mem::transmute(self.ptr as usize + 1);
1780 // Use a non-null pointer value
1781 Some(ptr::read(EMPTY as *mut T))
1784 self.ptr = self.ptr.offset(1);
1786 Some(ptr::read(old))
1793 fn size_hint(&self) -> (usize, Option<usize>) {
1794 let diff = (self.end as usize) - (self.ptr as usize);
1795 let size = mem::size_of::<T>();
1796 let exact = diff / (if size == 0 {1} else {size});
1797 (exact, Some(exact))
1801 #[stable(feature = "rust1", since = "1.0.0")]
1802 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1804 fn next_back(&mut self) -> Option<T> {
1806 if self.end == self.ptr {
1809 if mem::size_of::<T>() == 0 {
1810 // See above for why 'ptr.offset' isn't used
1811 self.end = mem::transmute(self.end as usize - 1);
1813 // Use a non-null pointer value
1814 Some(ptr::read(EMPTY as *mut T))
1816 self.end = self.end.offset(-1);
1818 Some(ptr::read(self.end))
1825 #[stable(feature = "rust1", since = "1.0.0")]
1826 impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1828 #[unsafe_destructor]
1829 #[stable(feature = "rust1", since = "1.0.0")]
1830 impl<'a, T> Drop for Drain<'a, T> {
1831 fn drop(&mut self) {
1832 // self.ptr == self.end == null if drop has already been called,
1833 // so we can use #[unsafe_no_drop_flag].
1835 // destroy the remaining elements
1836 for _x in self.by_ref() {}
1840 ////////////////////////////////////////////////////////////////////////////////
1841 // Conversion from &[T] to &Vec<T>
1842 ////////////////////////////////////////////////////////////////////////////////
1844 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1845 #[unstable(feature = "collections")]
1846 pub struct DerefVec<'a, T:'a> {
1848 l: PhantomData<&'a T>,
1851 #[unstable(feature = "collections")]
1852 impl<'a, T> Deref for DerefVec<'a, T> {
1853 type Target = Vec<T>;
1855 fn deref<'b>(&'b self) -> &'b Vec<T> {
1860 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1861 #[unsafe_destructor]
1862 #[stable(feature = "rust1", since = "1.0.0")]
1863 impl<'a, T> Drop for DerefVec<'a, T> {
1864 fn drop(&mut self) {
1870 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1871 #[unstable(feature = "collections")]
1872 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1875 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1881 ////////////////////////////////////////////////////////////////////////////////
1882 // Partial vec, used for map_in_place
1883 ////////////////////////////////////////////////////////////////////////////////
1885 /// An owned, partially type-converted vector of elements with non-zero size.
1887 /// `T` and `U` must have the same, non-zero size. They must also have the same
1890 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1891 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1892 /// destructed. Additionally the underlying storage of `vec` will be freed.
1893 struct PartialVecNonZeroSized<T,U> {
1901 _marker: PhantomData<U>,
1904 /// An owned, partially type-converted vector of zero-sized elements.
1906 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1908 struct PartialVecZeroSized<T,U> {
1911 marker: PhantomData<::core::cell::Cell<(T,U)>>,
1914 #[unsafe_destructor]
1915 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1916 fn drop(&mut self) {
1918 // `vec` hasn't been modified until now. As it has a length
1919 // currently, this would run destructors of `T`s which might not be
1920 // there. So at first, set `vec`s length to `0`. This must be done
1921 // at first to remain memory-safe as the destructors of `U` or `T`
1922 // might cause unwinding where `vec`s destructor would be executed.
1923 self.vec.set_len(0);
1925 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1926 while self.start_u != self.end_u {
1927 let _ = ptr::read(self.start_u); // Run a `U` destructor.
1928 self.start_u = self.start_u.offset(1);
1930 while self.start_t != self.end_t {
1931 let _ = ptr::read(self.start_t); // Run a `T` destructor.
1932 self.start_t = self.start_t.offset(1);
1934 // After this destructor ran, the destructor of `vec` will run,
1935 // deallocating the underlying memory.
1940 #[unsafe_destructor]
1941 impl<T,U> Drop for PartialVecZeroSized<T,U> {
1942 fn drop(&mut self) {
1944 // Destruct the instances of `T` and `U` this struct owns.
1945 while self.num_t != 0 {
1946 let _: T = mem::uninitialized(); // Run a `T` destructor.
1949 while self.num_u != 0 {
1950 let _: U = mem::uninitialized(); // Run a `U` destructor.
1960 use core::mem::size_of;
1961 use core::iter::repeat;
1965 struct DropCounter<'a> {
1969 #[unsafe_destructor]
1970 impl<'a> Drop for DropCounter<'a> {
1971 fn drop(&mut self) {
1978 let xs = [1u8, 2u8, 3u8];
1979 assert_eq!(&**as_vec(&xs), xs);
1983 fn test_as_vec_dtor() {
1984 let (mut count_x, mut count_y) = (0, 0);
1986 let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }];
1987 assert_eq!(as_vec(xs).len(), 2);
1989 assert_eq!(count_x, 1);
1990 assert_eq!(count_y, 1);
1994 fn test_small_vec_struct() {
1995 assert!(size_of::<Vec<u8>>() == size_of::<usize>() * 3);
1999 fn test_double_drop() {
2005 let (mut count_x, mut count_y) = (0, 0);
2007 let mut tv = TwoVec {
2011 tv.x.push(DropCounter {count: &mut count_x});
2012 tv.y.push(DropCounter {count: &mut count_y});
2014 // If Vec had a drop flag, here is where it would be zeroed.
2015 // Instead, it should rely on its internal state to prevent
2016 // doing anything significant when dropped multiple times.
2019 // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
2022 assert_eq!(count_x, 1);
2023 assert_eq!(count_y, 1);
2028 let mut v = Vec::new();
2029 assert_eq!(v.capacity(), 0);
2032 assert!(v.capacity() >= 2);
2038 assert!(v.capacity() >= 16);
2040 assert!(v.capacity() >= 32);
2045 assert!(v.capacity() >= 33)
2050 let mut v = Vec::new();
2051 let mut w = Vec::new();
2054 for i in 0..3 { w.push(i) }
2059 for i in 3..10 { w.push(i) }
2065 fn test_slice_from_mut() {
2066 let mut values = vec![1, 2, 3, 4, 5];
2068 let slice = &mut values[2 ..];
2069 assert!(slice == [3, 4, 5]);
2075 assert!(values == [1, 2, 5, 6, 7]);
2079 fn test_slice_to_mut() {
2080 let mut values = vec![1, 2, 3, 4, 5];
2082 let slice = &mut values[.. 2];
2083 assert!(slice == [1, 2]);
2089 assert!(values == [2, 3, 3, 4, 5]);
2093 fn test_split_at_mut() {
2094 let mut values = vec![1, 2, 3, 4, 5];
2096 let (left, right) = values.split_at_mut(2);
2098 let left: &[_] = left;
2099 assert!(&left[..left.len()] == &[1, 2]);
2106 let right: &[_] = right;
2107 assert!(&right[..right.len()] == &[3, 4, 5]);
2114 assert_eq!(values, [2, 3, 5, 6, 7]);
2119 let v: Vec<i32> = vec![];
2120 let w = vec!(1, 2, 3);
2122 assert_eq!(v, v.clone());
2126 // they should be disjoint in memory.
2127 assert!(w.as_ptr() != z.as_ptr())
2131 fn test_clone_from() {
2133 let three = vec!(box 1, box 2, box 3);
2134 let two = vec!(box 4, box 5);
2136 v.clone_from(&three);
2137 assert_eq!(v, three);
2140 v.clone_from(&three);
2141 assert_eq!(v, three);
2148 v.clone_from(&three);
2149 assert_eq!(v, three)
2154 let mut vec = vec![1, 2, 3, 4];
2155 vec.retain(|&x| x % 2 == 0);
2156 assert_eq!(vec, [2, 4]);
2160 fn zero_sized_values() {
2161 let mut v = Vec::new();
2162 assert_eq!(v.len(), 0);
2164 assert_eq!(v.len(), 1);
2166 assert_eq!(v.len(), 2);
2167 assert_eq!(v.pop(), Some(()));
2168 assert_eq!(v.pop(), Some(()));
2169 assert_eq!(v.pop(), None);
2171 assert_eq!(v.iter().count(), 0);
2173 assert_eq!(v.iter().count(), 1);
2175 assert_eq!(v.iter().count(), 2);
2179 assert_eq!(v.iter_mut().count(), 2);
2181 assert_eq!(v.iter_mut().count(), 3);
2183 assert_eq!(v.iter_mut().count(), 4);
2185 for &mut () in &mut v {}
2186 unsafe { v.set_len(0); }
2187 assert_eq!(v.iter_mut().count(), 0);
2191 fn test_partition() {
2192 assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
2193 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
2194 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
2195 assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
2199 fn test_zip_unzip() {
2200 let z1 = vec![(1, 4), (2, 5), (3, 6)];
2202 let (left, right): (Vec<_>, Vec<_>) = z1.iter().cloned().unzip();
2204 assert_eq!((1, 4), (left[0], right[0]));
2205 assert_eq!((2, 5), (left[1], right[1]));
2206 assert_eq!((3, 6), (left[2], right[2]));
2210 fn test_unsafe_ptrs() {
2212 // Test on-stack copy-from-buf.
2214 let ptr = a.as_ptr();
2215 let b = Vec::from_raw_buf(ptr, 3);
2216 assert_eq!(b, [1, 2, 3]);
2218 // Test on-heap copy-from-buf.
2219 let c = vec![1, 2, 3, 4, 5];
2220 let ptr = c.as_ptr();
2221 let d = Vec::from_raw_buf(ptr, 5);
2222 assert_eq!(d, [1, 2, 3, 4, 5]);
2227 fn test_vec_truncate_drop() {
2228 static mut drops: u32 = 0;
2230 impl Drop for Elem {
2231 fn drop(&mut self) {
2232 unsafe { drops += 1; }
2236 let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
2237 assert_eq!(unsafe { drops }, 0);
2239 assert_eq!(unsafe { drops }, 2);
2241 assert_eq!(unsafe { drops }, 5);
2246 fn test_vec_truncate_fail() {
2247 struct BadElem(i32);
2248 impl Drop for BadElem {
2249 fn drop(&mut self) {
2250 let BadElem(ref mut x) = *self;
2251 if *x == 0xbadbeef {
2252 panic!("BadElem panic: 0xbadbeef")
2257 let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
2263 let vec = vec![1, 2, 3];
2264 assert!(vec[1] == 2);
2269 fn test_index_out_of_bounds() {
2270 let vec = vec![1, 2, 3];
2276 fn test_slice_out_of_bounds_1() {
2277 let x = vec![1, 2, 3, 4, 5];
2283 fn test_slice_out_of_bounds_2() {
2284 let x = vec![1, 2, 3, 4, 5];
2290 fn test_slice_out_of_bounds_3() {
2291 let x = vec![1, 2, 3, 4, 5];
2297 fn test_slice_out_of_bounds_4() {
2298 let x = vec![1, 2, 3, 4, 5];
2304 fn test_slice_out_of_bounds_5() {
2305 let x = vec![1, 2, 3, 4, 5];
2311 fn test_swap_remove_empty() {
2312 let mut vec= Vec::<i32>::new();
2317 fn test_move_iter_unwrap() {
2318 let mut vec = Vec::with_capacity(7);
2321 let ptr = vec.as_ptr();
2322 vec = vec.into_iter().into_inner();
2323 assert_eq!(vec.as_ptr(), ptr);
2324 assert_eq!(vec.capacity(), 7);
2325 assert_eq!(vec.len(), 0);
2330 fn test_map_in_place_incompatible_types_fail() {
2331 let v = vec![0, 1, 2];
2332 v.map_in_place(|_| ());
2336 fn test_map_in_place() {
2337 let v = vec![0, 1, 2];
2338 assert_eq!(v.map_in_place(|i: u32| i as i32 - 1), [-1, 0, 1]);
2342 fn test_map_in_place_zero_sized() {
2343 let v = vec![(), ()];
2344 #[derive(PartialEq, Debug)]
2346 assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
2350 fn test_map_in_place_zero_drop_count() {
2351 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
2353 #[derive(Clone, PartialEq, Debug)]
2355 impl Drop for Nothing { fn drop(&mut self) { } }
2357 #[derive(Clone, PartialEq, Debug)]
2359 impl Drop for ZeroSized {
2360 fn drop(&mut self) {
2361 DROP_COUNTER.fetch_add(1, Ordering::Relaxed);
2364 const NUM_ELEMENTS: usize = 2;
2365 static DROP_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
2367 let v = repeat(Nothing).take(NUM_ELEMENTS).collect::<Vec<_>>();
2369 DROP_COUNTER.store(0, Ordering::Relaxed);
2371 let v = v.map_in_place(|_| ZeroSized);
2372 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), 0);
2374 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), NUM_ELEMENTS);
2378 fn test_move_items() {
2379 let vec = vec![1, 2, 3];
2380 let mut vec2 = vec![];
2384 assert_eq!(vec2, [1, 2, 3]);
2388 fn test_move_items_reverse() {
2389 let vec = vec![1, 2, 3];
2390 let mut vec2 = vec![];
2391 for i in vec.into_iter().rev() {
2394 assert_eq!(vec2, [3, 2, 1]);
2398 fn test_move_items_zero_sized() {
2399 let vec = vec![(), (), ()];
2400 let mut vec2 = vec![];
2404 assert_eq!(vec2, [(), (), ()]);
2408 fn test_drain_items() {
2409 let mut vec = vec![1, 2, 3];
2410 let mut vec2 = vec![];
2411 for i in vec.drain() {
2414 assert_eq!(vec, []);
2415 assert_eq!(vec2, [ 1, 2, 3 ]);
2419 fn test_drain_items_reverse() {
2420 let mut vec = vec![1, 2, 3];
2421 let mut vec2 = vec![];
2422 for i in vec.drain().rev() {
2425 assert_eq!(vec, []);
2426 assert_eq!(vec2, [3, 2, 1]);
2430 fn test_drain_items_zero_sized() {
2431 let mut vec = vec![(), (), ()];
2432 let mut vec2 = vec![];
2433 for i in vec.drain() {
2436 assert_eq!(vec, []);
2437 assert_eq!(vec2, [(), (), ()]);
2441 fn test_into_boxed_slice() {
2442 let xs = vec![1, 2, 3];
2443 let ys = xs.into_boxed_slice();
2444 assert_eq!(&*ys, [1, 2, 3]);
2449 let mut vec = vec![1, 2, 3];
2450 let mut vec2 = vec![4, 5, 6];
2451 vec.append(&mut vec2);
2452 assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
2453 assert_eq!(vec2, []);
2457 fn test_split_off() {
2458 let mut vec = vec![1, 2, 3, 4, 5, 6];
2459 let vec2 = vec.split_off(4);
2460 assert_eq!(vec, [1, 2, 3, 4]);
2461 assert_eq!(vec2, [5, 6]);
2465 fn bench_new(b: &mut Bencher) {
2467 let v: Vec<u32> = Vec::new();
2468 assert_eq!(v.len(), 0);
2469 assert_eq!(v.capacity(), 0);
2473 fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) {
2474 b.bytes = src_len as u64;
2477 let v: Vec<u32> = Vec::with_capacity(src_len);
2478 assert_eq!(v.len(), 0);
2479 assert_eq!(v.capacity(), src_len);
2484 fn bench_with_capacity_0000(b: &mut Bencher) {
2485 do_bench_with_capacity(b, 0)
2489 fn bench_with_capacity_0010(b: &mut Bencher) {
2490 do_bench_with_capacity(b, 10)
2494 fn bench_with_capacity_0100(b: &mut Bencher) {
2495 do_bench_with_capacity(b, 100)
2499 fn bench_with_capacity_1000(b: &mut Bencher) {
2500 do_bench_with_capacity(b, 1000)
2503 fn do_bench_from_fn(b: &mut Bencher, src_len: usize) {
2504 b.bytes = src_len as u64;
2507 let dst = (0..src_len).collect::<Vec<_>>();
2508 assert_eq!(dst.len(), src_len);
2509 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2514 fn bench_from_fn_0000(b: &mut Bencher) {
2515 do_bench_from_fn(b, 0)
2519 fn bench_from_fn_0010(b: &mut Bencher) {
2520 do_bench_from_fn(b, 10)
2524 fn bench_from_fn_0100(b: &mut Bencher) {
2525 do_bench_from_fn(b, 100)
2529 fn bench_from_fn_1000(b: &mut Bencher) {
2530 do_bench_from_fn(b, 1000)
2533 fn do_bench_from_elem(b: &mut Bencher, src_len: usize) {
2534 b.bytes = src_len as u64;
2537 let dst: Vec<usize> = repeat(5).take(src_len).collect();
2538 assert_eq!(dst.len(), src_len);
2539 assert!(dst.iter().all(|x| *x == 5));
2544 fn bench_from_elem_0000(b: &mut Bencher) {
2545 do_bench_from_elem(b, 0)
2549 fn bench_from_elem_0010(b: &mut Bencher) {
2550 do_bench_from_elem(b, 10)
2554 fn bench_from_elem_0100(b: &mut Bencher) {
2555 do_bench_from_elem(b, 100)
2559 fn bench_from_elem_1000(b: &mut Bencher) {
2560 do_bench_from_elem(b, 1000)
2563 fn do_bench_from_slice(b: &mut Bencher, src_len: usize) {
2564 let src: Vec<_> = FromIterator::from_iter(0..src_len);
2566 b.bytes = src_len as u64;
2569 let dst = src.clone()[..].to_vec();
2570 assert_eq!(dst.len(), src_len);
2571 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2576 fn bench_from_slice_0000(b: &mut Bencher) {
2577 do_bench_from_slice(b, 0)
2581 fn bench_from_slice_0010(b: &mut Bencher) {
2582 do_bench_from_slice(b, 10)
2586 fn bench_from_slice_0100(b: &mut Bencher) {
2587 do_bench_from_slice(b, 100)
2591 fn bench_from_slice_1000(b: &mut Bencher) {
2592 do_bench_from_slice(b, 1000)
2595 fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
2596 let src: Vec<_> = FromIterator::from_iter(0..src_len);
2598 b.bytes = src_len as u64;
2601 let dst: Vec<_> = FromIterator::from_iter(src.clone().into_iter());
2602 assert_eq!(dst.len(), src_len);
2603 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2608 fn bench_from_iter_0000(b: &mut Bencher) {
2609 do_bench_from_iter(b, 0)
2613 fn bench_from_iter_0010(b: &mut Bencher) {
2614 do_bench_from_iter(b, 10)
2618 fn bench_from_iter_0100(b: &mut Bencher) {
2619 do_bench_from_iter(b, 100)
2623 fn bench_from_iter_1000(b: &mut Bencher) {
2624 do_bench_from_iter(b, 1000)
2627 fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
2628 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2629 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2631 b.bytes = src_len as u64;
2634 let mut dst = dst.clone();
2635 dst.extend(src.clone().into_iter());
2636 assert_eq!(dst.len(), dst_len + src_len);
2637 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2642 fn bench_extend_0000_0000(b: &mut Bencher) {
2643 do_bench_extend(b, 0, 0)
2647 fn bench_extend_0000_0010(b: &mut Bencher) {
2648 do_bench_extend(b, 0, 10)
2652 fn bench_extend_0000_0100(b: &mut Bencher) {
2653 do_bench_extend(b, 0, 100)
2657 fn bench_extend_0000_1000(b: &mut Bencher) {
2658 do_bench_extend(b, 0, 1000)
2662 fn bench_extend_0010_0010(b: &mut Bencher) {
2663 do_bench_extend(b, 10, 10)
2667 fn bench_extend_0100_0100(b: &mut Bencher) {
2668 do_bench_extend(b, 100, 100)
2672 fn bench_extend_1000_1000(b: &mut Bencher) {
2673 do_bench_extend(b, 1000, 1000)
2676 fn do_bench_push_all(b: &mut Bencher, dst_len: usize, src_len: usize) {
2677 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2678 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2680 b.bytes = src_len as u64;
2683 let mut dst = dst.clone();
2685 assert_eq!(dst.len(), dst_len + src_len);
2686 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2691 fn bench_push_all_0000_0000(b: &mut Bencher) {
2692 do_bench_push_all(b, 0, 0)
2696 fn bench_push_all_0000_0010(b: &mut Bencher) {
2697 do_bench_push_all(b, 0, 10)
2701 fn bench_push_all_0000_0100(b: &mut Bencher) {
2702 do_bench_push_all(b, 0, 100)
2706 fn bench_push_all_0000_1000(b: &mut Bencher) {
2707 do_bench_push_all(b, 0, 1000)
2711 fn bench_push_all_0010_0010(b: &mut Bencher) {
2712 do_bench_push_all(b, 10, 10)
2716 fn bench_push_all_0100_0100(b: &mut Bencher) {
2717 do_bench_push_all(b, 100, 100)
2721 fn bench_push_all_1000_1000(b: &mut Bencher) {
2722 do_bench_push_all(b, 1000, 1000)
2725 fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) {
2726 let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
2727 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2729 b.bytes = src_len as u64;
2732 let mut dst = dst.clone();
2733 dst.extend(src.clone().into_iter());
2734 assert_eq!(dst.len(), dst_len + src_len);
2735 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2740 fn bench_push_all_move_0000_0000(b: &mut Bencher) {
2741 do_bench_push_all_move(b, 0, 0)
2745 fn bench_push_all_move_0000_0010(b: &mut Bencher) {
2746 do_bench_push_all_move(b, 0, 10)
2750 fn bench_push_all_move_0000_0100(b: &mut Bencher) {
2751 do_bench_push_all_move(b, 0, 100)
2755 fn bench_push_all_move_0000_1000(b: &mut Bencher) {
2756 do_bench_push_all_move(b, 0, 1000)
2760 fn bench_push_all_move_0010_0010(b: &mut Bencher) {
2761 do_bench_push_all_move(b, 10, 10)
2765 fn bench_push_all_move_0100_0100(b: &mut Bencher) {
2766 do_bench_push_all_move(b, 100, 100)
2770 fn bench_push_all_move_1000_1000(b: &mut Bencher) {
2771 do_bench_push_all_move(b, 1000, 1000)
2774 fn do_bench_clone(b: &mut Bencher, src_len: usize) {
2775 let src: Vec<usize> = FromIterator::from_iter(0..src_len);
2777 b.bytes = src_len as u64;
2780 let dst = src.clone();
2781 assert_eq!(dst.len(), src_len);
2782 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2787 fn bench_clone_0000(b: &mut Bencher) {
2788 do_bench_clone(b, 0)
2792 fn bench_clone_0010(b: &mut Bencher) {
2793 do_bench_clone(b, 10)
2797 fn bench_clone_0100(b: &mut Bencher) {
2798 do_bench_clone(b, 100)
2802 fn bench_clone_1000(b: &mut Bencher) {
2803 do_bench_clone(b, 1000)
2806 fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) {
2807 let dst: Vec<_> = FromIterator::from_iter(0..src_len);
2808 let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
2810 b.bytes = (times * src_len) as u64;
2813 let mut dst = dst.clone();
2816 dst.clone_from(&src);
2818 assert_eq!(dst.len(), src_len);
2819 assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
2825 fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
2826 do_bench_clone_from(b, 1, 0, 0)
2830 fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
2831 do_bench_clone_from(b, 1, 0, 10)
2835 fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
2836 do_bench_clone_from(b, 1, 0, 100)
2840 fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
2841 do_bench_clone_from(b, 1, 0, 1000)
2845 fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
2846 do_bench_clone_from(b, 1, 10, 10)
2850 fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
2851 do_bench_clone_from(b, 1, 100, 100)
2855 fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
2856 do_bench_clone_from(b, 1, 1000, 1000)
2860 fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
2861 do_bench_clone_from(b, 1, 10, 100)
2865 fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
2866 do_bench_clone_from(b, 1, 100, 1000)
2870 fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
2871 do_bench_clone_from(b, 1, 10, 0)
2875 fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
2876 do_bench_clone_from(b, 1, 100, 10)
2880 fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
2881 do_bench_clone_from(b, 1, 1000, 100)
2885 fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
2886 do_bench_clone_from(b, 10, 0, 0)
2890 fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
2891 do_bench_clone_from(b, 10, 0, 10)
2895 fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
2896 do_bench_clone_from(b, 10, 0, 100)
2900 fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
2901 do_bench_clone_from(b, 10, 0, 1000)
2905 fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
2906 do_bench_clone_from(b, 10, 10, 10)
2910 fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
2911 do_bench_clone_from(b, 10, 100, 100)
2915 fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
2916 do_bench_clone_from(b, 10, 1000, 1000)
2920 fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
2921 do_bench_clone_from(b, 10, 10, 100)
2925 fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
2926 do_bench_clone_from(b, 10, 100, 1000)
2930 fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
2931 do_bench_clone_from(b, 10, 10, 0)
2935 fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
2936 do_bench_clone_from(b, 10, 100, 10)
2940 fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
2941 do_bench_clone_from(b, 10, 1000, 100)