1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type, written `Vec<T>` but pronounced 'vector.'
13 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
17 //! Explicitly creating a `Vec<T>` with `new()`:
20 //! let xs: Vec<i32> = Vec::new();
23 //! Using the `vec!` macro:
26 //! let ys: Vec<i32> = vec![];
28 //! let zs = vec![1i32, 2, 3, 4, 5];
34 //! let mut xs = vec![1i32, 2];
42 //! let mut xs = vec![1i32, 2];
44 //! let two = xs.pop();
49 use alloc::boxed::Box;
50 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
51 use core::borrow::{Cow, IntoCow};
53 use core::default::Default;
55 use core::hash::{mod, Hash};
56 use core::iter::repeat;
57 use core::kinds::marker::{ContravariantLifetime, InvariantType};
59 use core::nonzero::NonZero;
60 use core::num::{Int, UnsignedInt};
63 use core::raw::Slice as RawSlice;
66 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
71 /// let mut vec = Vec::new();
75 /// assert_eq!(vec.len(), 2);
76 /// assert_eq!(vec[0], 1);
78 /// assert_eq!(vec.pop(), Some(2));
79 /// assert_eq!(vec.len(), 1);
82 /// assert_eq!(vec[0], 7);
84 /// vec.push_all(&[1, 2, 3]);
86 /// for x in vec.iter() {
87 /// println!("{}", x);
89 /// assert_eq!(vec, vec![7i, 1, 2, 3]);
92 /// The `vec!` macro is provided to make initialization more convenient:
95 /// let mut vec = vec![1i, 2i, 3i];
97 /// assert_eq!(vec, vec![1, 2, 3, 4]);
100 /// Use a `Vec<T>` as an efficient stack:
103 /// let mut stack = Vec::new();
110 /// let top = match stack.pop() {
111 /// None => break, // empty
114 /// // Prints 3, 2, 1
115 /// println!("{}", top);
119 /// # Capacity and reallocation
121 /// The capacity of a vector is the amount of space allocated for any future elements that will be
122 /// added onto the vector. This is not to be confused with the *length* of a vector, which
123 /// specifies the number of actual elements within the vector. If a vector's length exceeds its
124 /// capacity, its capacity will automatically be increased, but its elements will have to be
127 /// For example, a vector with capacity 10 and length 0 would be an empty vector with space for 10
128 /// more elements. Pushing 10 or fewer elements onto the vector will not change its capacity or
129 /// cause reallocation to occur. However, if the vector's length is increased to 11, it will have
130 /// to reallocate, which can be slow. For this reason, it is recommended to use
131 /// `Vec::with_capacity` whenever possible to specify how big the vector is expected to get.
132 #[unsafe_no_drop_flag]
135 ptr: NonZero<*mut T>,
140 unsafe impl<T: Send> Send for Vec<T> { }
141 unsafe impl<T: Sync> Sync for Vec<T> { }
143 ////////////////////////////////////////////////////////////////////////////////
145 ////////////////////////////////////////////////////////////////////////////////
148 /// Constructs a new, empty `Vec<T>`.
150 /// The vector will not allocate until elements are pushed onto it.
155 /// let mut vec: Vec<int> = Vec::new();
159 pub fn new() -> Vec<T> {
160 // We want ptr to never be NULL so instead we set it to some arbitrary
161 // non-null value which is fine since we never call deallocate on the ptr
162 // if cap is 0. The reason for this is because the pointer of a slice
163 // being NULL would break the null pointer optimization for enums.
164 Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
167 /// Constructs a new, empty `Vec<T>` with the specified capacity.
169 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
170 /// `capacity` is 0, the vector will not allocate.
172 /// It is important to note that this function does not specify the *length* of the returned
173 /// vector, but only the *capacity*. (For an explanation of the difference between length and
174 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
179 /// let mut vec: Vec<int> = Vec::with_capacity(10);
181 /// // The vector contains no items, even though it has capacity for more
182 /// assert_eq!(vec.len(), 0);
184 /// // These are all done without reallocating...
185 /// for i in range(0i, 10) {
189 /// // ...but this may make the vector reallocate
194 pub fn with_capacity(capacity: uint) -> Vec<T> {
195 if mem::size_of::<T>() == 0 {
196 Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
197 } else if capacity == 0 {
200 let size = capacity.checked_mul(mem::size_of::<T>())
201 .expect("capacity overflow");
202 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
203 if ptr.is_null() { ::alloc::oom() }
204 Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
208 /// Deprecated: use `iter::range(0, length).map(op).collect()` instead
210 #[deprecated = "use iter::range(0, length).map(op).collect() instead"]
211 pub fn from_fn<F>(length: uint, op: F) -> Vec<T> where F: FnMut(uint) -> T {
212 range(0, length).map(op).collect()
215 /// Creates a `Vec<T>` directly from the raw components of another vector.
217 /// This is highly unsafe, due to the number of invariants that aren't checked.
226 /// let mut v = vec![1i, 2, 3];
228 /// // Pull out the various important pieces of information about `v`
229 /// let p = v.as_mut_ptr();
230 /// let len = v.len();
231 /// let cap = v.capacity();
234 /// // Cast `v` into the void: no destructor run, so we are in
235 /// // complete control of the allocation to which `p` points.
238 /// // Overwrite memory with 4, 5, 6
239 /// for i in range(0, len as int) {
240 /// ptr::write(p.offset(i), 4 + i);
243 /// // Put everything back together into a Vec
244 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
245 /// assert_eq!(rebuilt, vec![4i, 5i, 6i]);
250 pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
251 capacity: uint) -> Vec<T> {
252 Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
255 /// Creates a vector by copying the elements from a raw pointer.
257 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
258 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
259 /// without cloning, as if `ptr::read()` were called on them.
261 #[unstable = "may be better expressed via composition"]
262 pub unsafe fn from_raw_buf(ptr: *const T, elts: uint) -> Vec<T> {
263 let mut dst = Vec::with_capacity(elts);
265 ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
269 /// Deprecated: use `into_iter().partition(f)` instead.
271 #[deprecated = "use into_iter().partition(f) instead"]
272 pub fn partition<F>(self, f: F) -> (Vec<T>, Vec<T>) where F: FnMut(&T) -> bool {
273 self.into_iter().partition(f)
276 /// Returns the number of elements the vector can hold without
282 /// let vec: Vec<int> = Vec::with_capacity(10);
283 /// assert_eq!(vec.capacity(), 10);
287 pub fn capacity(&self) -> uint {
291 /// Deprecated: Renamed to `reserve`.
292 #[deprecated = "Renamed to `reserve`"]
293 pub fn reserve_additional(&mut self, extra: uint) {
297 /// Reserves capacity for at least `additional` more elements to be inserted in the given
298 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
302 /// Panics if the new capacity overflows `uint`.
307 /// let mut vec: Vec<int> = vec![1];
309 /// assert!(vec.capacity() >= 11);
312 pub fn reserve(&mut self, additional: uint) {
313 if self.cap - self.len < additional {
314 let err_msg = "Vec::reserve: `uint` overflow";
315 let new_cap = self.len.checked_add(additional).expect(err_msg)
316 .checked_next_power_of_two().expect(err_msg);
317 self.grow_capacity(new_cap);
321 /// Reserves the minimum capacity for exactly `additional` more elements to
322 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
325 /// Note that the allocator may give the collection more space than it
326 /// requests. Therefore capacity can not be relied upon to be precisely
327 /// minimal. Prefer `reserve` if future insertions are expected.
331 /// Panics if the new capacity overflows `uint`.
336 /// let mut vec: Vec<int> = vec![1];
337 /// vec.reserve_exact(10);
338 /// assert!(vec.capacity() >= 11);
341 pub fn reserve_exact(&mut self, additional: uint) {
342 if self.cap - self.len < additional {
343 match self.len.checked_add(additional) {
344 None => panic!("Vec::reserve: `uint` overflow"),
345 Some(new_cap) => self.grow_capacity(new_cap)
350 /// Shrinks the capacity of the vector as much as possible.
352 /// It will drop down as close as possible to the length but the allocator
353 /// may still inform the vector that there is space for a few more elements.
358 /// let mut vec: Vec<int> = Vec::with_capacity(10);
359 /// vec.push_all(&[1, 2, 3]);
360 /// assert_eq!(vec.capacity(), 10);
361 /// vec.shrink_to_fit();
362 /// assert!(vec.capacity() >= 3);
365 pub fn shrink_to_fit(&mut self) {
366 if mem::size_of::<T>() == 0 { return }
371 dealloc(*self.ptr, self.cap)
377 // Overflow check is unnecessary as the vector is already at
379 let ptr = reallocate(*self.ptr as *mut u8,
380 self.cap * mem::size_of::<T>(),
381 self.len * mem::size_of::<T>(),
382 mem::min_align_of::<T>()) as *mut T;
383 if ptr.is_null() { ::alloc::oom() }
384 self.ptr = NonZero::new(ptr);
390 /// Convert the vector into Box<[T]>.
392 /// Note that this will drop any excess capacity. Calling this and
393 /// converting back to a vector with `into_vec()` is equivalent to calling
394 /// `shrink_to_fit()`.
396 pub fn into_boxed_slice(mut self) -> Box<[T]> {
397 self.shrink_to_fit();
399 let xs: Box<[T]> = mem::transmute(self.as_mut_slice());
405 /// Shorten a vector, dropping excess elements.
407 /// If `len` is greater than the vector's current length, this has no
413 /// let mut vec = vec![1i, 2, 3, 4];
415 /// assert_eq!(vec, vec![1, 2]);
418 pub fn truncate(&mut self, len: uint) {
420 // drop any extra elements
421 while len < self.len {
422 // decrement len before the read(), so a panic on Drop doesn't
423 // re-drop the just-failed value.
425 ptr::read(self.get_unchecked(self.len));
430 /// Returns a mutable slice of the elements of `self`.
435 /// fn foo(slice: &mut [int]) {}
437 /// let mut vec = vec![1i, 2];
438 /// foo(vec.as_mut_slice());
442 pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
444 mem::transmute(RawSlice {
445 data: *self.ptr as *const T,
451 /// Creates a consuming iterator, that is, one that moves each value out of
452 /// the vector (from start to end). The vector cannot be used after calling
458 /// let v = vec!["a".to_string(), "b".to_string()];
459 /// for s in v.into_iter() {
460 /// // s has type String, not &String
461 /// println!("{}", s);
466 pub fn into_iter(self) -> IntoIter<T> {
470 let begin = ptr as *const T;
471 let end = if mem::size_of::<T>() == 0 {
472 (ptr as uint + self.len()) as *const T
474 ptr.offset(self.len() as int) as *const T
477 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
481 /// Sets the length of a vector.
483 /// This will explicitly set the size of the vector, without actually
484 /// modifying its buffers, so it is up to the caller to ensure that the
485 /// vector is actually the specified size.
490 /// let mut v = vec![1u, 2, 3, 4];
497 pub unsafe fn set_len(&mut self, len: uint) {
501 /// Removes an element from anywhere in the vector and return it, replacing
502 /// it with the last element.
504 /// This does not preserve ordering, but is O(1).
508 /// Panics if `index` is out of bounds.
513 /// let mut v = vec!["foo", "bar", "baz", "qux"];
515 /// assert_eq!(v.swap_remove(1), "bar");
516 /// assert_eq!(v, vec!["foo", "qux", "baz"]);
518 /// assert_eq!(v.swap_remove(0), "foo");
519 /// assert_eq!(v, vec!["baz", "qux"]);
523 pub fn swap_remove(&mut self, index: uint) -> T {
524 let length = self.len();
525 self.swap(index, length - 1);
529 /// Inserts an element at position `index` within the vector, shifting all
530 /// elements after position `i` one position to the right.
534 /// Panics if `index` is not between `0` and the vector's length (both
535 /// bounds inclusive).
540 /// let mut vec = vec![1i, 2, 3];
541 /// vec.insert(1, 4);
542 /// assert_eq!(vec, vec![1, 4, 2, 3]);
543 /// vec.insert(4, 5);
544 /// assert_eq!(vec, vec![1, 4, 2, 3, 5]);
547 pub fn insert(&mut self, index: uint, element: T) {
548 let len = self.len();
549 assert!(index <= len);
550 // space for the new element
553 unsafe { // infallible
554 // The spot to put the new value
556 let p = self.as_mut_ptr().offset(index as int);
557 // Shift everything over to make space. (Duplicating the
558 // `index`th element into two consecutive places.)
559 ptr::copy_memory(p.offset(1), &*p, len - index);
560 // Write it in, overwriting the first copy of the `index`th
562 ptr::write(&mut *p, element);
564 self.set_len(len + 1);
568 /// Removes and returns the element at position `index` within the vector,
569 /// shifting all elements after position `index` one position to the left.
573 /// Panics if `i` is out of bounds.
578 /// let mut v = vec![1i, 2, 3];
579 /// assert_eq!(v.remove(1), 2);
580 /// assert_eq!(v, vec![1, 3]);
583 pub fn remove(&mut self, index: uint) -> T {
584 let len = self.len();
585 assert!(index < len);
586 unsafe { // infallible
589 // the place we are taking from.
590 let ptr = self.as_mut_ptr().offset(index as int);
591 // copy it out, unsafely having a copy of the value on
592 // the stack and in the vector at the same time.
593 ret = ptr::read(ptr as *const T);
595 // Shift everything down to fill in that spot.
596 ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
598 self.set_len(len - 1);
603 /// Retains only the elements specified by the predicate.
605 /// In other words, remove all elements `e` such that `f(&e)` returns false.
606 /// This method operates in place and preserves the order of the retained
612 /// let mut vec = vec![1i, 2, 3, 4];
613 /// vec.retain(|&x| x%2 == 0);
614 /// assert_eq!(vec, vec![2, 4]);
617 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
618 let len = self.len();
621 let v = self.as_mut_slice();
623 for i in range(0u, len) {
632 self.truncate(len - del);
636 /// Deprecated: use `extend(range(0, n).map(f))` instead.
637 #[deprecated = "use extend(range(0, n).map(f)) instead"]
638 pub fn grow_fn<F>(&mut self, n: uint, f: F) where F: FnMut(uint) -> T {
639 self.extend(range(0, n).map(f));
642 /// Appends an element to the back of a collection.
646 /// Panics if the number of elements in the vector overflows a `uint`.
651 /// let mut vec = vec!(1i, 2);
653 /// assert_eq!(vec, vec!(1, 2, 3));
657 pub fn push(&mut self, value: T) {
658 if mem::size_of::<T>() == 0 {
659 // zero-size types consume no memory, so we can't rely on the
660 // address space running out
661 self.len = self.len.checked_add(1).expect("length overflow");
662 unsafe { mem::forget(value); }
665 if self.len == self.cap {
666 let old_size = self.cap * mem::size_of::<T>();
667 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
668 if old_size > size { panic!("capacity overflow") }
670 let ptr = alloc_or_realloc(*self.ptr, old_size, size);
671 if ptr.is_null() { ::alloc::oom() }
672 self.ptr = NonZero::new(ptr);
674 self.cap = max(self.cap, 2) * 2;
678 let end = (*self.ptr).offset(self.len as int);
679 ptr::write(&mut *end, value);
684 /// Removes the last element from a vector and returns it, or `None` if it is empty.
689 /// let mut vec = vec![1i, 2, 3];
690 /// assert_eq!(vec.pop(), Some(3));
691 /// assert_eq!(vec, vec![1, 2]);
695 pub fn pop(&mut self) -> Option<T> {
701 Some(ptr::read(self.get_unchecked(self.len())))
706 /// Creates a draining iterator that clears the `Vec` and iterates over
707 /// the removed items from start to end.
712 /// let mut v = vec!["a".to_string(), "b".to_string()];
713 /// for s in v.drain() {
714 /// // s has type String, not &String
715 /// println!("{}", s);
717 /// assert!(v.is_empty());
720 #[unstable = "matches collection reform specification, waiting for dust to settle"]
721 pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
723 let begin = *self.ptr as *const T;
724 let end = if mem::size_of::<T>() == 0 {
725 (*self.ptr as uint + self.len()) as *const T
727 (*self.ptr).offset(self.len() as int) as *const T
733 marker: ContravariantLifetime,
738 /// Clears the vector, removing all values.
743 /// let mut v = vec![1i, 2, 3];
747 /// assert!(v.is_empty());
751 pub fn clear(&mut self) {
755 /// Returns the number of elements in the vector.
760 /// let a = vec![1i, 2, 3];
761 /// assert_eq!(a.len(), 3);
765 pub fn len(&self) -> uint { self.len }
767 /// Returns `true` if the vector contains no elements.
772 /// let mut v = Vec::new();
773 /// assert!(v.is_empty());
776 /// assert!(!v.is_empty());
779 pub fn is_empty(&self) -> bool { self.len() == 0 }
781 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
782 /// size and in case they are not zero-sized the same minimal alignment.
786 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
787 /// have differing minimal alignments.
792 /// let v = vec![0u, 1, 2];
793 /// let w = v.map_in_place(|i| i + 3);
794 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
796 /// #[deriving(PartialEq, Show)]
797 /// struct Newtype(u8);
798 /// let bytes = vec![0x11, 0x22];
799 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
800 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
802 #[experimental = "API may change to provide stronger guarantees"]
803 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
804 // FIXME: Assert statically that the types `T` and `U` have the same
806 assert!(mem::size_of::<T>() == mem::size_of::<U>());
810 if mem::size_of::<T>() != 0 {
811 // FIXME: Assert statically that the types `T` and `U` have the
812 // same minimal alignment in case they are not zero-sized.
814 // These asserts are necessary because the `min_align_of` of the
815 // types are passed to the allocator by `Vec`.
816 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
818 // This `as int` cast is safe, because the size of the elements of the
819 // vector is not 0, and:
821 // 1) If the size of the elements in the vector is 1, the `int` may
822 // overflow, but it has the correct bit pattern so that the
823 // `.offset()` function will work.
826 // Address space 0x0-0xF.
827 // `u8` array at: 0x1.
828 // Size of `u8` array: 0x8.
829 // Calculated `offset`: -0x8.
830 // After `array.offset(offset)`: 0x9.
831 // (0x1 + 0x8 = 0x1 - 0x8)
833 // 2) If the size of the elements in the vector is >1, the `uint` ->
834 // `int` conversion can't overflow.
835 let offset = vec.len() as int;
836 let start = vec.as_mut_ptr();
838 let mut pv = PartialVecNonZeroSized {
842 // This points inside the vector, as the vector has length
844 end_t: unsafe { start.offset(offset) },
845 start_u: start as *mut U,
846 end_u: start as *mut U,
857 while pv.end_u as *mut T != pv.end_t {
861 // +-+-+-+-+-+-+-+-+-+
862 // |U|...|U|T|T|...|T|
863 // +-+-+-+-+-+-+-+-+-+
867 let t = ptr::read(pv.start_t as *const T);
870 // +-+-+-+-+-+-+-+-+-+
871 // |U|...|U|X|T|...|T|
872 // +-+-+-+-+-+-+-+-+-+
875 // We must not panic here, one cell is marked as `T`
876 // although it is not `T`.
878 pv.start_t = pv.start_t.offset(1);
881 // +-+-+-+-+-+-+-+-+-+
882 // |U|...|U|X|T|...|T|
883 // +-+-+-+-+-+-+-+-+-+
886 // We may panic again.
888 // The function given by the user might panic.
891 ptr::write(pv.end_u, u);
894 // +-+-+-+-+-+-+-+-+-+
895 // |U|...|U|U|T|...|T|
896 // +-+-+-+-+-+-+-+-+-+
899 // We should not panic here, because that would leak the `U`
900 // pointed to by `end_u`.
902 pv.end_u = pv.end_u.offset(1);
905 // +-+-+-+-+-+-+-+-+-+
906 // |U|...|U|U|T|...|T|
907 // +-+-+-+-+-+-+-+-+-+
910 // We may panic again.
922 // Extract `vec` and prevent the destructor of
923 // `PartialVecNonZeroSized` from running. Note that none of the
924 // function calls can panic, thus no resources can be leaked (as the
925 // `vec` member of `PartialVec` is the only one which holds
926 // allocations -- and it is returned from this function. None of
929 let vec_len = pv.vec.len();
930 let vec_cap = pv.vec.capacity();
931 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
933 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
936 // Put the `Vec` into the `PartialVecZeroSized` structure and
937 // prevent the destructor of the `Vec` from running. Since the
938 // `Vec` contained zero-sized objects, it did not allocate, so we
939 // are not leaking memory here.
940 let mut pv = PartialVecZeroSized::<T,U> {
943 marker_t: InvariantType,
944 marker_u: InvariantType,
946 unsafe { mem::forget(vec); }
948 while pv.num_t != 0 {
950 // Create a `T` out of thin air and decrement `num_t`. This
951 // must not panic between these steps, as otherwise a
952 // destructor of `T` which doesn't exist runs.
953 let t = mem::uninitialized();
956 // The function given by the user might panic.
959 // Forget the `U` and increment `num_u`. This increment
960 // cannot overflow the `uint` as we only do this for a
961 // number of times that fits into a `uint` (and start with
962 // `0`). Again, we should not panic between these steps.
967 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
968 // destructor of the latter will not run. None of this can panic.
969 let mut result = Vec::new();
971 result.set_len(pv.num_u);
979 impl<T: Clone> Vec<T> {
980 /// Deprecated: use `repeat(value).take(length).collect()` instead.
982 #[deprecated = "use repeat(value).take(length).collect() instead"]
983 pub fn from_elem(length: uint, value: T) -> Vec<T> {
984 repeat(value).take(length).collect()
987 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
989 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
990 /// is larger than the current value of `len()` or not.
995 /// let mut vec = vec!["hello"];
996 /// vec.resize(3, "world");
997 /// assert_eq!(vec, vec!["hello", "world", "world"]);
999 /// let mut vec = vec![1i, 2, 3, 4];
1000 /// vec.resize(2, 0);
1001 /// assert_eq!(vec, vec![1, 2]);
1003 #[unstable = "matches collection reform specification; waiting for dust to settle"]
1004 pub fn resize(&mut self, new_len: uint, value: T) {
1005 let len = self.len();
1008 self.extend(repeat(value).take(new_len - len));
1010 self.truncate(new_len);
1014 /// Appends all elements in a slice to the `Vec`.
1016 /// Iterates over the slice `other`, clones each element, and then appends
1017 /// it to this `Vec`. The `other` vector is traversed in-order.
1022 /// let mut vec = vec![1i];
1023 /// vec.push_all(&[2i, 3, 4]);
1024 /// assert_eq!(vec, vec![1, 2, 3, 4]);
1027 #[experimental = "likely to be replaced by a more optimized extend"]
1028 pub fn push_all(&mut self, other: &[T]) {
1029 self.reserve(other.len());
1031 for i in range(0, other.len()) {
1032 let len = self.len();
1034 // Unsafe code so this can be optimised to a memcpy (or something similarly
1035 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1036 // during the loop can prevent this optimisation.
1039 self.get_unchecked_mut(len),
1040 other.get_unchecked(i).clone());
1041 self.set_len(len + 1);
1046 /// Deprecated: use `extend(repeat(value).take(n))` instead
1047 #[deprecated = "use extend(repeat(value).take(n)) instead"]
1048 pub fn grow(&mut self, n: uint, value: T) {
1049 self.extend(repeat(value).take(n))
1052 /// Deprecated: use `iter().cloned().partition(f)` instead.
1053 #[deprecated = "use iter().cloned().partition(f) instead"]
1054 pub fn partitioned<F>(&self, f: F) -> (Vec<T>, Vec<T>) where F: FnMut(&T) -> bool {
1055 self.iter().cloned().partition(f)
1059 impl<T: PartialEq> Vec<T> {
1060 /// Removes consecutive repeated elements in the vector.
1062 /// If the vector is sorted, this removes all duplicates.
1067 /// let mut vec = vec![1i, 2, 2, 3, 2];
1071 /// assert_eq!(vec, vec![1i, 2, 3, 2]);
1074 pub fn dedup(&mut self) {
1076 // Although we have a mutable reference to `self`, we cannot make
1077 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1078 // must ensure that the vector is in a valid state at all time.
1080 // The way that we handle this is by using swaps; we iterate
1081 // over all the elements, swapping as we go so that at the end
1082 // the elements we wish to keep are in the front, and those we
1083 // wish to reject are at the back. We can then truncate the
1084 // vector. This operation is still O(n).
1086 // Example: We start in this state, where `r` represents "next
1087 // read" and `w` represents "next_write`.
1090 // +---+---+---+---+---+---+
1091 // | 0 | 1 | 1 | 2 | 3 | 3 |
1092 // +---+---+---+---+---+---+
1095 // Comparing self[r] against self[w-1], this is not a duplicate, so
1096 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1097 // r and w, leaving us with:
1100 // +---+---+---+---+---+---+
1101 // | 0 | 1 | 1 | 2 | 3 | 3 |
1102 // +---+---+---+---+---+---+
1105 // Comparing self[r] against self[w-1], this value is a duplicate,
1106 // so we increment `r` but leave everything else unchanged:
1109 // +---+---+---+---+---+---+
1110 // | 0 | 1 | 1 | 2 | 3 | 3 |
1111 // +---+---+---+---+---+---+
1114 // Comparing self[r] against self[w-1], this is not a duplicate,
1115 // so swap self[r] and self[w] and advance r and w:
1118 // +---+---+---+---+---+---+
1119 // | 0 | 1 | 2 | 1 | 3 | 3 |
1120 // +---+---+---+---+---+---+
1123 // Not a duplicate, repeat:
1126 // +---+---+---+---+---+---+
1127 // | 0 | 1 | 2 | 3 | 1 | 3 |
1128 // +---+---+---+---+---+---+
1131 // Duplicate, advance r. End of vec. Truncate to w.
1133 let ln = self.len();
1134 if ln < 1 { return; }
1136 // Avoid bounds checks by using unsafe pointers.
1137 let p = self.as_mut_ptr();
1142 let p_r = p.offset(r as int);
1143 let p_wm1 = p.offset((w - 1) as int);
1146 let p_w = p_wm1.offset(1);
1147 mem::swap(&mut *p_r, &mut *p_w);
1159 ////////////////////////////////////////////////////////////////////////////////
1161 ////////////////////////////////////////////////////////////////////////////////
1163 /// Deprecated: use `unzip` directly on the iterator instead.
1164 #[deprecated = "use unzip directly on the iterator instead"]
1165 pub fn unzip<T, U, V: Iterator<(T, U)>>(iter: V) -> (Vec<T>, Vec<U>) {
1169 ////////////////////////////////////////////////////////////////////////////////
1170 // Internal methods and functions
1171 ////////////////////////////////////////////////////////////////////////////////
1174 /// Reserves capacity for exactly `capacity` elements in the given vector.
1176 /// If the capacity for `self` is already equal to or greater than the
1177 /// requested capacity, then no action is taken.
1178 fn grow_capacity(&mut self, capacity: uint) {
1179 if mem::size_of::<T>() == 0 { return }
1181 if capacity > self.cap {
1182 let size = capacity.checked_mul(mem::size_of::<T>())
1183 .expect("capacity overflow");
1185 let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
1186 if ptr.is_null() { ::alloc::oom() }
1187 self.ptr = NonZero::new(ptr);
1189 self.cap = capacity;
1194 // FIXME: #13996: need a way to mark the return value as `noalias`
1196 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: uint, size: uint) -> *mut T {
1198 allocate(size, mem::min_align_of::<T>()) as *mut T
1200 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
1205 unsafe fn dealloc<T>(ptr: *mut T, len: uint) {
1206 if mem::size_of::<T>() != 0 {
1207 deallocate(ptr as *mut u8,
1208 len * mem::size_of::<T>(),
1209 mem::min_align_of::<T>())
1213 ////////////////////////////////////////////////////////////////////////////////
1214 // Common trait implementations for Vec
1215 ////////////////////////////////////////////////////////////////////////////////
1218 impl<T:Clone> Clone for Vec<T> {
1219 fn clone(&self) -> Vec<T> { ::slice::SliceExt::to_vec(self.as_slice()) }
1221 fn clone_from(&mut self, other: &Vec<T>) {
1222 // drop anything in self that will not be overwritten
1223 if self.len() > other.len() {
1224 self.truncate(other.len())
1227 // reuse the contained values' allocations/resources.
1228 for (place, thing) in self.iter_mut().zip(other.iter()) {
1229 place.clone_from(thing)
1232 // self.len <= other.len due to the truncate above, so the
1233 // slice here is always in-bounds.
1234 let slice = other[self.len()..];
1235 self.push_all(slice);
1239 impl<S: hash::Writer, T: Hash<S>> Hash<S> for Vec<T> {
1241 fn hash(&self, state: &mut S) {
1242 self.as_slice().hash(state);
1246 #[experimental = "waiting on Index stability"]
1247 impl<T> Index<uint,T> for Vec<T> {
1249 fn index<'a>(&'a self, index: &uint) -> &'a T {
1250 &self.as_slice()[*index]
1254 impl<T> IndexMut<uint,T> for Vec<T> {
1256 fn index_mut<'a>(&'a mut self, index: &uint) -> &'a mut T {
1257 &mut self.as_mut_slice()[*index]
1261 impl<T> ops::Slice<uint, [T]> for Vec<T> {
1263 fn as_slice_<'a>(&'a self) -> &'a [T] {
1268 fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] {
1269 self.as_slice().slice_from_or_fail(start)
1273 fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] {
1274 self.as_slice().slice_to_or_fail(end)
1277 fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] {
1278 self.as_slice().slice_or_fail(start, end)
1282 impl<T> ops::SliceMut<uint, [T]> for Vec<T> {
1284 fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] {
1289 fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] {
1290 self.as_mut_slice().slice_from_or_fail_mut(start)
1294 fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] {
1295 self.as_mut_slice().slice_to_or_fail_mut(end)
1298 fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] {
1299 self.as_mut_slice().slice_or_fail_mut(start, end)
1303 #[experimental = "waiting on Deref stability"]
1304 impl<T> ops::Deref<[T]> for Vec<T> {
1305 fn deref<'a>(&'a self) -> &'a [T] { self.as_slice() }
1308 #[experimental = "waiting on DerefMut stability"]
1309 impl<T> ops::DerefMut<[T]> for Vec<T> {
1310 fn deref_mut<'a>(&'a mut self) -> &'a mut [T] { self.as_mut_slice() }
1313 #[experimental = "waiting on FromIterator stability"]
1314 impl<T> FromIterator<T> for Vec<T> {
1316 fn from_iter<I:Iterator<T>>(mut iterator: I) -> Vec<T> {
1317 let (lower, _) = iterator.size_hint();
1318 let mut vector = Vec::with_capacity(lower);
1319 for element in iterator {
1320 vector.push(element)
1326 #[experimental = "waiting on Extend stability"]
1327 impl<T> Extend<T> for Vec<T> {
1329 fn extend<I: Iterator<T>>(&mut self, mut iterator: I) {
1330 let (lower, _) = iterator.size_hint();
1331 self.reserve(lower);
1332 for element in iterator {
1338 impl<A, B> PartialEq<Vec<B>> for Vec<A> where A: PartialEq<B> {
1340 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1342 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1345 macro_rules! impl_eq {
1346 ($lhs:ty, $rhs:ty) => {
1347 impl<'b, A, B> PartialEq<$rhs> for $lhs where A: PartialEq<B> {
1349 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1351 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1354 impl<'b, A, B> PartialEq<$lhs> for $rhs where B: PartialEq<A> {
1356 fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
1358 fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) }
1363 impl_eq! { Vec<A>, &'b [B] }
1364 impl_eq! { Vec<A>, &'b mut [B] }
1366 impl<'a, A, B> PartialEq<Vec<B>> for CowVec<'a, A> where A: PartialEq<B> + Clone {
1368 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1370 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1373 impl<'a, A, B> PartialEq<CowVec<'a, A>> for Vec<B> where A: Clone, B: PartialEq<A> {
1375 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
1377 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
1380 macro_rules! impl_eq_for_cowvec {
1382 impl<'a, 'b, A, B> PartialEq<$rhs> for CowVec<'a, A> where A: PartialEq<B> + Clone {
1384 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1386 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1389 impl<'a, 'b, A, B> PartialEq<CowVec<'a, A>> for $rhs where A: Clone, B: PartialEq<A> {
1391 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
1393 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
1398 impl_eq_for_cowvec! { &'b [B] }
1399 impl_eq_for_cowvec! { &'b mut [B] }
1401 #[unstable = "waiting on PartialOrd stability"]
1402 impl<T: PartialOrd> PartialOrd for Vec<T> {
1404 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
1405 self.as_slice().partial_cmp(other.as_slice())
1409 #[unstable = "waiting on Eq stability"]
1410 impl<T: Eq> Eq for Vec<T> {}
1412 #[allow(deprecated)]
1413 #[deprecated = "Use overloaded `core::cmp::PartialEq`"]
1414 impl<T: PartialEq, Sized? V: AsSlice<T>> Equiv<V> for Vec<T> {
1416 fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
1419 #[unstable = "waiting on Ord stability"]
1420 impl<T: Ord> Ord for Vec<T> {
1422 fn cmp(&self, other: &Vec<T>) -> Ordering {
1423 self.as_slice().cmp(other.as_slice())
1427 impl<T> AsSlice<T> for Vec<T> {
1428 /// Returns a slice into `self`.
1433 /// fn foo(slice: &[int]) {}
1435 /// let vec = vec![1i, 2];
1436 /// foo(vec.as_slice());
1440 fn as_slice<'a>(&'a self) -> &'a [T] {
1442 mem::transmute(RawSlice {
1443 data: *self.ptr as *const T,
1450 impl<'a, T: Clone> Add<&'a [T], Vec<T>> for Vec<T> {
1452 fn add(mut self, rhs: &[T]) -> Vec<T> {
1458 #[unsafe_destructor]
1459 impl<T> Drop for Vec<T> {
1460 fn drop(&mut self) {
1461 // This is (and should always remain) a no-op if the fields are
1462 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1465 for x in self.iter() {
1468 dealloc(*self.ptr, self.cap)
1475 impl<T> Default for Vec<T> {
1477 fn default() -> Vec<T> {
1482 #[experimental = "waiting on Show stability"]
1483 impl<T:fmt::Show> fmt::Show for Vec<T> {
1484 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1485 self.as_slice().fmt(f)
1489 impl<'a> fmt::FormatWriter for Vec<u8> {
1490 fn write(&mut self, buf: &[u8]) -> fmt::Result {
1496 ////////////////////////////////////////////////////////////////////////////////
1498 ////////////////////////////////////////////////////////////////////////////////
1500 #[experimental = "unclear how valuable this alias is"]
1501 /// A clone-on-write vector
1502 pub type CowVec<'a, T> = Cow<'a, Vec<T>, [T]>;
1504 impl<'a, T> FromIterator<T> for CowVec<'a, T> where T: Clone {
1505 fn from_iter<I: Iterator<T>>(it: I) -> CowVec<'a, T> {
1506 Cow::Owned(FromIterator::from_iter(it))
1510 impl<'a, T: 'a> IntoCow<'a, Vec<T>, [T]> for Vec<T> where T: Clone {
1511 fn into_cow(self) -> CowVec<'a, T> {
1516 impl<'a, T> IntoCow<'a, Vec<T>, [T]> for &'a [T] where T: Clone {
1517 fn into_cow(self) -> CowVec<'a, T> {
1522 ////////////////////////////////////////////////////////////////////////////////
1524 ////////////////////////////////////////////////////////////////////////////////
1526 /// An iterator that moves out of a vector.
1528 pub struct IntoIter<T> {
1529 allocation: *mut T, // the block of memory allocated for the vector
1530 cap: uint, // the capacity of the vector
1535 #[deprecated = "use IntoIter instead"]
1536 pub type MoveItems<T> = IntoIter<T>;
1538 impl<T> IntoIter<T> {
1540 /// Drops all items that have not yet been moved and returns the empty vector.
1542 pub fn into_inner(mut self) -> Vec<T> {
1545 let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
1547 Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
1551 /// Deprecated, use .into_inner() instead
1552 #[deprecated = "use .into_inner() instead"]
1553 pub fn unwrap(self) -> Vec<T> { self.into_inner() }
1556 impl<T> Iterator<T> for IntoIter<T> {
1558 fn next<'a>(&'a mut self) -> Option<T> {
1560 if self.ptr == self.end {
1563 if mem::size_of::<T>() == 0 {
1564 // purposefully don't use 'ptr.offset' because for
1565 // vectors with 0-size elements this would return the
1567 self.ptr = mem::transmute(self.ptr as uint + 1);
1569 // Use a non-null pointer value
1570 Some(ptr::read(mem::transmute(1u)))
1573 self.ptr = self.ptr.offset(1);
1575 Some(ptr::read(old))
1582 fn size_hint(&self) -> (uint, Option<uint>) {
1583 let diff = (self.end as uint) - (self.ptr as uint);
1584 let size = mem::size_of::<T>();
1585 let exact = diff / (if size == 0 {1} else {size});
1586 (exact, Some(exact))
1590 impl<T> DoubleEndedIterator<T> for IntoIter<T> {
1592 fn next_back<'a>(&'a mut self) -> Option<T> {
1594 if self.end == self.ptr {
1597 if mem::size_of::<T>() == 0 {
1598 // See above for why 'ptr.offset' isn't used
1599 self.end = mem::transmute(self.end as uint - 1);
1601 // Use a non-null pointer value
1602 Some(ptr::read(mem::transmute(1u)))
1604 self.end = self.end.offset(-1);
1606 Some(ptr::read(mem::transmute(self.end)))
1613 impl<T> ExactSizeIterator<T> for IntoIter<T> {}
1615 #[unsafe_destructor]
1616 impl<T> Drop for IntoIter<T> {
1617 fn drop(&mut self) {
1618 // destroy the remaining elements
1622 dealloc(self.allocation, self.cap);
1628 /// An iterator that drains a vector.
1629 #[unsafe_no_drop_flag]
1630 #[unstable = "recently added as part of collections reform 2"]
1631 pub struct Drain<'a, T> {
1634 marker: ContravariantLifetime<'a>,
1637 impl<'a, T> Iterator<T> for Drain<'a, T> {
1639 fn next(&mut self) -> Option<T> {
1641 if self.ptr == self.end {
1644 if mem::size_of::<T>() == 0 {
1645 // purposefully don't use 'ptr.offset' because for
1646 // vectors with 0-size elements this would return the
1648 self.ptr = mem::transmute(self.ptr as uint + 1);
1650 // Use a non-null pointer value
1651 Some(ptr::read(mem::transmute(1u)))
1654 self.ptr = self.ptr.offset(1);
1656 Some(ptr::read(old))
1663 fn size_hint(&self) -> (uint, Option<uint>) {
1664 let diff = (self.end as uint) - (self.ptr as uint);
1665 let size = mem::size_of::<T>();
1666 let exact = diff / (if size == 0 {1} else {size});
1667 (exact, Some(exact))
1671 impl<'a, T> DoubleEndedIterator<T> for Drain<'a, T> {
1673 fn next_back(&mut self) -> Option<T> {
1675 if self.end == self.ptr {
1678 if mem::size_of::<T>() == 0 {
1679 // See above for why 'ptr.offset' isn't used
1680 self.end = mem::transmute(self.end as uint - 1);
1682 // Use a non-null pointer value
1683 Some(ptr::read(mem::transmute(1u)))
1685 self.end = self.end.offset(-1);
1687 Some(ptr::read(self.end))
1694 impl<'a, T> ExactSizeIterator<T> for Drain<'a, T> {}
1696 #[unsafe_destructor]
1697 impl<'a, T> Drop for Drain<'a, T> {
1698 fn drop(&mut self) {
1699 // self.ptr == self.end == null if drop has already been called,
1700 // so we can use #[unsafe_no_drop_flag].
1702 // destroy the remaining elements
1707 ////////////////////////////////////////////////////////////////////////////////
1708 // Conversion from &[T] to &Vec<T>
1709 ////////////////////////////////////////////////////////////////////////////////
1711 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1713 pub struct DerefVec<'a, T> {
1715 l: ContravariantLifetime<'a>
1719 impl<'a, T> Deref<Vec<T>> for DerefVec<'a, T> {
1720 fn deref<'b>(&'b self) -> &'b Vec<T> {
1725 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1726 #[unsafe_destructor]
1728 impl<'a, T> Drop for DerefVec<'a, T> {
1729 fn drop(&mut self) {
1735 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1737 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1740 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1741 l: ContravariantLifetime::<'a>
1746 ////////////////////////////////////////////////////////////////////////////////
1747 // Raw module (deprecated)
1748 ////////////////////////////////////////////////////////////////////////////////
1750 /// Unsafe vector operations.
1755 /// Constructs a vector from an unsafe pointer to a buffer.
1757 /// The elements of the buffer are copied into the vector without cloning,
1758 /// as if `ptr::read()` were called on them.
1760 #[deprecated = "renamed to Vec::from_raw_buf"]
1761 pub unsafe fn from_buf<T>(ptr: *const T, elts: uint) -> Vec<T> {
1762 Vec::from_raw_buf(ptr, elts)
1766 ////////////////////////////////////////////////////////////////////////////////
1767 // Partial vec, used for map_in_place
1768 ////////////////////////////////////////////////////////////////////////////////
1770 /// An owned, partially type-converted vector of elements with non-zero size.
1772 /// `T` and `U` must have the same, non-zero size. They must also have the same
1775 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1776 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1777 /// destructed. Additionally the underlying storage of `vec` will be freed.
1778 struct PartialVecNonZeroSized<T,U> {
1787 /// An owned, partially type-converted vector of zero-sized elements.
1789 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1791 struct PartialVecZeroSized<T,U> {
1794 marker_t: InvariantType<T>,
1795 marker_u: InvariantType<U>,
1798 #[unsafe_destructor]
1799 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1800 fn drop(&mut self) {
1802 // `vec` hasn't been modified until now. As it has a length
1803 // currently, this would run destructors of `T`s which might not be
1804 // there. So at first, set `vec`s length to `0`. This must be done
1805 // at first to remain memory-safe as the destructors of `U` or `T`
1806 // might cause unwinding where `vec`s destructor would be executed.
1807 self.vec.set_len(0);
1809 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1810 while self.start_u != self.end_u {
1811 let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor.
1812 self.start_u = self.start_u.offset(1);
1814 while self.start_t != self.end_t {
1815 let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor.
1816 self.start_t = self.start_t.offset(1);
1818 // After this destructor ran, the destructor of `vec` will run,
1819 // deallocating the underlying memory.
1824 #[unsafe_destructor]
1825 impl<T,U> Drop for PartialVecZeroSized<T,U> {
1826 fn drop(&mut self) {
1828 // Destruct the instances of `T` and `U` this struct owns.
1829 while self.num_t != 0 {
1830 let _: T = mem::uninitialized(); // Run a `T` destructor.
1833 while self.num_u != 0 {
1834 let _: U = mem::uninitialized(); // Run a `U` destructor.
1844 use core::mem::size_of;
1846 use super::{as_vec, unzip, raw};
1848 struct DropCounter<'a> {
1852 #[unsafe_destructor]
1853 impl<'a> Drop for DropCounter<'a> {
1854 fn drop(&mut self) {
1861 let xs = [1u8, 2u8, 3u8];
1862 assert_eq!(as_vec(&xs).as_slice(), xs);
1866 fn test_as_vec_dtor() {
1867 let (mut count_x, mut count_y) = (0, 0);
1869 let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }];
1870 assert_eq!(as_vec(xs).len(), 2);
1872 assert_eq!(count_x, 1);
1873 assert_eq!(count_y, 1);
1877 fn test_small_vec_struct() {
1878 assert!(size_of::<Vec<u8>>() == size_of::<uint>() * 3);
1882 fn test_double_drop() {
1888 let (mut count_x, mut count_y) = (0, 0);
1890 let mut tv = TwoVec {
1894 tv.x.push(DropCounter {count: &mut count_x});
1895 tv.y.push(DropCounter {count: &mut count_y});
1897 // If Vec had a drop flag, here is where it would be zeroed.
1898 // Instead, it should rely on its internal state to prevent
1899 // doing anything significant when dropped multiple times.
1902 // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
1905 assert_eq!(count_x, 1);
1906 assert_eq!(count_y, 1);
1911 let mut v = Vec::new();
1912 assert_eq!(v.capacity(), 0);
1915 assert!(v.capacity() >= 2);
1917 for i in range(0i, 16) {
1921 assert!(v.capacity() >= 16);
1923 assert!(v.capacity() >= 32);
1928 assert!(v.capacity() >= 33)
1933 let mut v = Vec::new();
1934 let mut w = Vec::new();
1936 v.extend(range(0i, 3));
1937 for i in range(0i, 3) { w.push(i) }
1941 v.extend(range(3i, 10));
1942 for i in range(3i, 10) { w.push(i) }
1948 fn test_slice_from_mut() {
1949 let mut values = vec![1u8,2,3,4,5];
1951 let slice = values.slice_from_mut(2);
1952 assert!(slice == [3, 4, 5]);
1953 for p in slice.iter_mut() {
1958 assert!(values == [1, 2, 5, 6, 7]);
1962 fn test_slice_to_mut() {
1963 let mut values = vec![1u8,2,3,4,5];
1965 let slice = values.slice_to_mut(2);
1966 assert!(slice == [1, 2]);
1967 for p in slice.iter_mut() {
1972 assert!(values == [2, 3, 3, 4, 5]);
1976 fn test_split_at_mut() {
1977 let mut values = vec![1u8,2,3,4,5];
1979 let (left, right) = values.split_at_mut(2);
1981 let left: &[_] = left;
1982 assert!(left[0..left.len()] == [1, 2][]);
1984 for p in left.iter_mut() {
1989 let right: &[_] = right;
1990 assert!(right[0..right.len()] == [3, 4, 5][]);
1992 for p in right.iter_mut() {
1997 assert!(values == vec![2u8, 3, 5, 6, 7]);
2002 let v: Vec<int> = vec!();
2003 let w = vec!(1i, 2, 3);
2005 assert_eq!(v, v.clone());
2009 // they should be disjoint in memory.
2010 assert!(w.as_ptr() != z.as_ptr())
2014 fn test_clone_from() {
2016 let three = vec!(box 1i, box 2, box 3);
2017 let two = vec!(box 4i, box 5);
2019 v.clone_from(&three);
2020 assert_eq!(v, three);
2023 v.clone_from(&three);
2024 assert_eq!(v, three);
2031 v.clone_from(&three);
2032 assert_eq!(v, three)
2037 let mut v = vec![0u, 1];
2038 v.grow_fn(3, |i| i);
2039 assert!(v == vec![0u, 1, 0, 1, 2]);
2044 let mut vec = vec![1u, 2, 3, 4];
2045 vec.retain(|&x| x % 2 == 0);
2046 assert!(vec == vec![2u, 4]);
2050 fn zero_sized_values() {
2051 let mut v = Vec::new();
2052 assert_eq!(v.len(), 0);
2054 assert_eq!(v.len(), 1);
2056 assert_eq!(v.len(), 2);
2057 assert_eq!(v.pop(), Some(()));
2058 assert_eq!(v.pop(), Some(()));
2059 assert_eq!(v.pop(), None);
2061 assert_eq!(v.iter().count(), 0);
2063 assert_eq!(v.iter().count(), 1);
2065 assert_eq!(v.iter().count(), 2);
2067 for &() in v.iter() {}
2069 assert_eq!(v.iter_mut().count(), 2);
2071 assert_eq!(v.iter_mut().count(), 3);
2073 assert_eq!(v.iter_mut().count(), 4);
2075 for &() in v.iter_mut() {}
2076 unsafe { v.set_len(0); }
2077 assert_eq!(v.iter_mut().count(), 0);
2081 fn test_partition() {
2082 assert_eq!(vec![].partition(|x: &int| *x < 3), (vec![], vec![]));
2083 assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
2084 assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 2), (vec![1], vec![2, 3]));
2085 assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
2089 fn test_partitioned() {
2090 assert_eq!(vec![].partitioned(|x: &int| *x < 3), (vec![], vec![]));
2091 assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
2092 assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 2), (vec![1], vec![2, 3]));
2093 assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
2097 fn test_zip_unzip() {
2098 let z1 = vec![(1i, 4i), (2, 5), (3, 6)];
2100 let (left, right) = unzip(z1.iter().map(|&x| x));
2102 assert_eq!((1, 4), (left[0], right[0]));
2103 assert_eq!((2, 5), (left[1], right[1]));
2104 assert_eq!((3, 6), (left[2], right[2]));
2108 fn test_unsafe_ptrs() {
2110 // Test on-stack copy-from-buf.
2112 let ptr = a.as_ptr();
2113 let b = raw::from_buf(ptr, 3u);
2114 assert_eq!(b, vec![1, 2, 3]);
2116 // Test on-heap copy-from-buf.
2117 let c = vec![1i, 2, 3, 4, 5];
2118 let ptr = c.as_ptr();
2119 let d = raw::from_buf(ptr, 5u);
2120 assert_eq!(d, vec![1, 2, 3, 4, 5]);
2125 fn test_vec_truncate_drop() {
2126 static mut drops: uint = 0;
2128 impl Drop for Elem {
2129 fn drop(&mut self) {
2130 unsafe { drops += 1; }
2134 let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
2135 assert_eq!(unsafe { drops }, 0);
2137 assert_eq!(unsafe { drops }, 2);
2139 assert_eq!(unsafe { drops }, 5);
2144 fn test_vec_truncate_fail() {
2145 struct BadElem(int);
2146 impl Drop for BadElem {
2147 fn drop(&mut self) {
2148 let BadElem(ref mut x) = *self;
2149 if *x == 0xbadbeef {
2150 panic!("BadElem panic: 0xbadbeef")
2155 let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
2161 let vec = vec!(1i, 2, 3);
2162 assert!(vec[1] == 2);
2167 fn test_index_out_of_bounds() {
2168 let vec = vec!(1i, 2, 3);
2174 fn test_slice_out_of_bounds_1() {
2175 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2181 fn test_slice_out_of_bounds_2() {
2182 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2188 fn test_slice_out_of_bounds_3() {
2189 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2195 fn test_slice_out_of_bounds_4() {
2196 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2202 fn test_slice_out_of_bounds_5() {
2203 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2209 fn test_swap_remove_empty() {
2210 let mut vec: Vec<uint> = vec!();
2215 fn test_move_iter_unwrap() {
2216 let mut vec: Vec<uint> = Vec::with_capacity(7);
2219 let ptr = vec.as_ptr();
2220 vec = vec.into_iter().unwrap();
2221 assert_eq!(vec.as_ptr(), ptr);
2222 assert_eq!(vec.capacity(), 7);
2223 assert_eq!(vec.len(), 0);
2228 fn test_map_in_place_incompatible_types_fail() {
2229 let v = vec![0u, 1, 2];
2230 v.map_in_place(|_| ());
2234 fn test_map_in_place() {
2235 let v = vec![0u, 1, 2];
2236 assert_eq!(v.map_in_place(|i: uint| i as int - 1), [-1i, 0, 1]);
2240 fn test_map_in_place_zero_sized() {
2241 let v = vec![(), ()];
2242 #[deriving(PartialEq, Show)]
2244 assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
2248 fn test_map_in_place_zero_drop_count() {
2249 use std::sync::atomic;
2250 use std::sync::atomic::AtomicUint;
2252 #[deriving(Clone, PartialEq, Show)]
2254 impl Drop for Nothing { fn drop(&mut self) { } }
2256 #[deriving(Clone, PartialEq, Show)]
2258 impl Drop for ZeroSized {
2259 fn drop(&mut self) {
2260 DROP_COUNTER.fetch_add(1, atomic::Relaxed);
2263 const NUM_ELEMENTS: uint = 2;
2264 static DROP_COUNTER: AtomicUint = atomic::INIT_ATOMIC_UINT;
2266 let v = Vec::from_elem(NUM_ELEMENTS, Nothing);
2268 DROP_COUNTER.store(0, atomic::Relaxed);
2270 let v = v.map_in_place(|_| ZeroSized);
2271 assert_eq!(DROP_COUNTER.load(atomic::Relaxed), 0);
2273 assert_eq!(DROP_COUNTER.load(atomic::Relaxed), NUM_ELEMENTS);
2277 fn test_move_items() {
2278 let vec = vec![1, 2, 3];
2279 let mut vec2 : Vec<i32> = vec![];
2280 for i in vec.into_iter() {
2283 assert!(vec2 == vec![1, 2, 3]);
2287 fn test_move_items_reverse() {
2288 let vec = vec![1, 2, 3];
2289 let mut vec2 : Vec<i32> = vec![];
2290 for i in vec.into_iter().rev() {
2293 assert!(vec2 == vec![3, 2, 1]);
2297 fn test_move_items_zero_sized() {
2298 let vec = vec![(), (), ()];
2299 let mut vec2 : Vec<()> = vec![];
2300 for i in vec.into_iter() {
2303 assert!(vec2 == vec![(), (), ()]);
2307 fn test_drain_items() {
2308 let mut vec = vec![1, 2, 3];
2309 let mut vec2: Vec<i32> = vec![];
2310 for i in vec.drain() {
2313 assert_eq!(vec, []);
2314 assert_eq!(vec2, [ 1, 2, 3 ]);
2318 fn test_drain_items_reverse() {
2319 let mut vec = vec![1, 2, 3];
2320 let mut vec2: Vec<i32> = vec![];
2321 for i in vec.drain().rev() {
2324 assert_eq!(vec, []);
2325 assert_eq!(vec2, [ 3, 2, 1 ]);
2329 fn test_drain_items_zero_sized() {
2330 let mut vec = vec![(), (), ()];
2331 let mut vec2: Vec<()> = vec![];
2332 for i in vec.drain() {
2335 assert_eq!(vec, []);
2336 assert_eq!(vec2, [(), (), ()]);
2340 fn test_into_boxed_slice() {
2341 let xs = vec![1u, 2, 3];
2342 let ys = xs.into_boxed_slice();
2343 assert_eq!(ys.as_slice(), [1u, 2, 3]);
2347 fn bench_new(b: &mut Bencher) {
2349 let v: Vec<uint> = Vec::new();
2350 assert_eq!(v.len(), 0);
2351 assert_eq!(v.capacity(), 0);
2355 fn do_bench_with_capacity(b: &mut Bencher, src_len: uint) {
2356 b.bytes = src_len as u64;
2359 let v: Vec<uint> = Vec::with_capacity(src_len);
2360 assert_eq!(v.len(), 0);
2361 assert_eq!(v.capacity(), src_len);
2366 fn bench_with_capacity_0000(b: &mut Bencher) {
2367 do_bench_with_capacity(b, 0)
2371 fn bench_with_capacity_0010(b: &mut Bencher) {
2372 do_bench_with_capacity(b, 10)
2376 fn bench_with_capacity_0100(b: &mut Bencher) {
2377 do_bench_with_capacity(b, 100)
2381 fn bench_with_capacity_1000(b: &mut Bencher) {
2382 do_bench_with_capacity(b, 1000)
2385 fn do_bench_from_fn(b: &mut Bencher, src_len: uint) {
2386 b.bytes = src_len as u64;
2389 let dst = Vec::from_fn(src_len, |i| i);
2390 assert_eq!(dst.len(), src_len);
2391 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2396 fn bench_from_fn_0000(b: &mut Bencher) {
2397 do_bench_from_fn(b, 0)
2401 fn bench_from_fn_0010(b: &mut Bencher) {
2402 do_bench_from_fn(b, 10)
2406 fn bench_from_fn_0100(b: &mut Bencher) {
2407 do_bench_from_fn(b, 100)
2411 fn bench_from_fn_1000(b: &mut Bencher) {
2412 do_bench_from_fn(b, 1000)
2415 fn do_bench_from_elem(b: &mut Bencher, src_len: uint) {
2416 b.bytes = src_len as u64;
2419 let dst: Vec<uint> = Vec::from_elem(src_len, 5);
2420 assert_eq!(dst.len(), src_len);
2421 assert!(dst.iter().all(|x| *x == 5));
2426 fn bench_from_elem_0000(b: &mut Bencher) {
2427 do_bench_from_elem(b, 0)
2431 fn bench_from_elem_0010(b: &mut Bencher) {
2432 do_bench_from_elem(b, 10)
2436 fn bench_from_elem_0100(b: &mut Bencher) {
2437 do_bench_from_elem(b, 100)
2441 fn bench_from_elem_1000(b: &mut Bencher) {
2442 do_bench_from_elem(b, 1000)
2445 fn do_bench_from_slice(b: &mut Bencher, src_len: uint) {
2446 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2448 b.bytes = src_len as u64;
2451 let dst = src.clone().as_slice().to_vec();
2452 assert_eq!(dst.len(), src_len);
2453 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2458 fn bench_from_slice_0000(b: &mut Bencher) {
2459 do_bench_from_slice(b, 0)
2463 fn bench_from_slice_0010(b: &mut Bencher) {
2464 do_bench_from_slice(b, 10)
2468 fn bench_from_slice_0100(b: &mut Bencher) {
2469 do_bench_from_slice(b, 100)
2473 fn bench_from_slice_1000(b: &mut Bencher) {
2474 do_bench_from_slice(b, 1000)
2477 fn do_bench_from_iter(b: &mut Bencher, src_len: uint) {
2478 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2480 b.bytes = src_len as u64;
2483 let dst: Vec<uint> = FromIterator::from_iter(src.clone().into_iter());
2484 assert_eq!(dst.len(), src_len);
2485 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2490 fn bench_from_iter_0000(b: &mut Bencher) {
2491 do_bench_from_iter(b, 0)
2495 fn bench_from_iter_0010(b: &mut Bencher) {
2496 do_bench_from_iter(b, 10)
2500 fn bench_from_iter_0100(b: &mut Bencher) {
2501 do_bench_from_iter(b, 100)
2505 fn bench_from_iter_1000(b: &mut Bencher) {
2506 do_bench_from_iter(b, 1000)
2509 fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) {
2510 let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len));
2511 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2513 b.bytes = src_len as u64;
2516 let mut dst = dst.clone();
2517 dst.extend(src.clone().into_iter());
2518 assert_eq!(dst.len(), dst_len + src_len);
2519 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2524 fn bench_extend_0000_0000(b: &mut Bencher) {
2525 do_bench_extend(b, 0, 0)
2529 fn bench_extend_0000_0010(b: &mut Bencher) {
2530 do_bench_extend(b, 0, 10)
2534 fn bench_extend_0000_0100(b: &mut Bencher) {
2535 do_bench_extend(b, 0, 100)
2539 fn bench_extend_0000_1000(b: &mut Bencher) {
2540 do_bench_extend(b, 0, 1000)
2544 fn bench_extend_0010_0010(b: &mut Bencher) {
2545 do_bench_extend(b, 10, 10)
2549 fn bench_extend_0100_0100(b: &mut Bencher) {
2550 do_bench_extend(b, 100, 100)
2554 fn bench_extend_1000_1000(b: &mut Bencher) {
2555 do_bench_extend(b, 1000, 1000)
2558 fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) {
2559 let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len));
2560 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2562 b.bytes = src_len as u64;
2565 let mut dst = dst.clone();
2566 dst.push_all(src.as_slice());
2567 assert_eq!(dst.len(), dst_len + src_len);
2568 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2573 fn bench_push_all_0000_0000(b: &mut Bencher) {
2574 do_bench_push_all(b, 0, 0)
2578 fn bench_push_all_0000_0010(b: &mut Bencher) {
2579 do_bench_push_all(b, 0, 10)
2583 fn bench_push_all_0000_0100(b: &mut Bencher) {
2584 do_bench_push_all(b, 0, 100)
2588 fn bench_push_all_0000_1000(b: &mut Bencher) {
2589 do_bench_push_all(b, 0, 1000)
2593 fn bench_push_all_0010_0010(b: &mut Bencher) {
2594 do_bench_push_all(b, 10, 10)
2598 fn bench_push_all_0100_0100(b: &mut Bencher) {
2599 do_bench_push_all(b, 100, 100)
2603 fn bench_push_all_1000_1000(b: &mut Bencher) {
2604 do_bench_push_all(b, 1000, 1000)
2607 fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) {
2608 let dst: Vec<uint> = FromIterator::from_iter(range(0u, dst_len));
2609 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2611 b.bytes = src_len as u64;
2614 let mut dst = dst.clone();
2615 dst.extend(src.clone().into_iter());
2616 assert_eq!(dst.len(), dst_len + src_len);
2617 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2622 fn bench_push_all_move_0000_0000(b: &mut Bencher) {
2623 do_bench_push_all_move(b, 0, 0)
2627 fn bench_push_all_move_0000_0010(b: &mut Bencher) {
2628 do_bench_push_all_move(b, 0, 10)
2632 fn bench_push_all_move_0000_0100(b: &mut Bencher) {
2633 do_bench_push_all_move(b, 0, 100)
2637 fn bench_push_all_move_0000_1000(b: &mut Bencher) {
2638 do_bench_push_all_move(b, 0, 1000)
2642 fn bench_push_all_move_0010_0010(b: &mut Bencher) {
2643 do_bench_push_all_move(b, 10, 10)
2647 fn bench_push_all_move_0100_0100(b: &mut Bencher) {
2648 do_bench_push_all_move(b, 100, 100)
2652 fn bench_push_all_move_1000_1000(b: &mut Bencher) {
2653 do_bench_push_all_move(b, 1000, 1000)
2656 fn do_bench_clone(b: &mut Bencher, src_len: uint) {
2657 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2659 b.bytes = src_len as u64;
2662 let dst = src.clone();
2663 assert_eq!(dst.len(), src_len);
2664 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2669 fn bench_clone_0000(b: &mut Bencher) {
2670 do_bench_clone(b, 0)
2674 fn bench_clone_0010(b: &mut Bencher) {
2675 do_bench_clone(b, 10)
2679 fn bench_clone_0100(b: &mut Bencher) {
2680 do_bench_clone(b, 100)
2684 fn bench_clone_1000(b: &mut Bencher) {
2685 do_bench_clone(b, 1000)
2688 fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) {
2689 let dst: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2690 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2692 b.bytes = (times * src_len) as u64;
2695 let mut dst = dst.clone();
2697 for _ in range(0, times) {
2698 dst.clone_from(&src);
2700 assert_eq!(dst.len(), src_len);
2701 assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
2707 fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
2708 do_bench_clone_from(b, 1, 0, 0)
2712 fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
2713 do_bench_clone_from(b, 1, 0, 10)
2717 fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
2718 do_bench_clone_from(b, 1, 0, 100)
2722 fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
2723 do_bench_clone_from(b, 1, 0, 1000)
2727 fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
2728 do_bench_clone_from(b, 1, 10, 10)
2732 fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
2733 do_bench_clone_from(b, 1, 100, 100)
2737 fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
2738 do_bench_clone_from(b, 1, 1000, 1000)
2742 fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
2743 do_bench_clone_from(b, 1, 10, 100)
2747 fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
2748 do_bench_clone_from(b, 1, 100, 1000)
2752 fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
2753 do_bench_clone_from(b, 1, 10, 0)
2757 fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
2758 do_bench_clone_from(b, 1, 100, 10)
2762 fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
2763 do_bench_clone_from(b, 1, 1000, 100)
2767 fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
2768 do_bench_clone_from(b, 10, 0, 0)
2772 fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
2773 do_bench_clone_from(b, 10, 0, 10)
2777 fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
2778 do_bench_clone_from(b, 10, 0, 100)
2782 fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
2783 do_bench_clone_from(b, 10, 0, 1000)
2787 fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
2788 do_bench_clone_from(b, 10, 10, 10)
2792 fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
2793 do_bench_clone_from(b, 10, 100, 100)
2797 fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
2798 do_bench_clone_from(b, 10, 1000, 1000)
2802 fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
2803 do_bench_clone_from(b, 10, 10, 100)
2807 fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
2808 do_bench_clone_from(b, 10, 100, 1000)
2812 fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
2813 do_bench_clone_from(b, 10, 10, 0)
2817 fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
2818 do_bench_clone_from(b, 10, 100, 10)
2822 fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
2823 do_bench_clone_from(b, 10, 1000, 100)