1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type, written `Vec<T>` but pronounced 'vector.'
13 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
17 //! Explicitly creating a `Vec<T>` with `new()`:
20 //! let xs: Vec<i32> = Vec::new();
23 //! Using the `vec!` macro:
26 //! let ys: Vec<i32> = vec![];
28 //! let zs = vec![1i32, 2, 3, 4, 5];
34 //! let mut xs = vec![1i32, 2];
42 //! let mut xs = vec![1i32, 2];
44 //! let two = xs.pop();
49 use alloc::boxed::Box;
50 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
51 use core::borrow::{Cow, IntoCow};
53 use core::default::Default;
55 use core::hash::{mod, Hash};
56 use core::kinds::marker::{ContravariantLifetime, InvariantType};
58 use core::num::{Int, UnsignedInt};
61 use core::raw::Slice as RawSlice;
64 use slice::CloneSliceExt;
66 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
71 /// let mut vec = Vec::new();
75 /// assert_eq!(vec.len(), 2);
76 /// assert_eq!(vec[0], 1);
78 /// assert_eq!(vec.pop(), Some(2));
79 /// assert_eq!(vec.len(), 1);
82 /// assert_eq!(vec[0], 7);
84 /// vec.push_all(&[1, 2, 3]);
86 /// for x in vec.iter() {
87 /// println!("{}", x);
89 /// assert_eq!(vec, vec![7i, 1, 2, 3]);
92 /// The `vec!` macro is provided to make initialization more convenient:
95 /// let mut vec = vec![1i, 2i, 3i];
97 /// assert_eq!(vec, vec![1, 2, 3, 4]);
100 /// Use a `Vec<T>` as an efficient stack:
103 /// let mut stack = Vec::new();
110 /// let top = match stack.pop() {
111 /// None => break, // empty
114 /// // Prints 3, 2, 1
115 /// println!("{}", top);
119 /// # Capacity and reallocation
121 /// The capacity of a vector is the amount of space allocated for any future elements that will be
122 /// added onto the vector. This is not to be confused with the *length* of a vector, which
123 /// specifies the number of actual elements within the vector. If a vector's length exceeds its
124 /// capacity, its capacity will automatically be increased, but its elements will have to be
127 /// For example, a vector with capacity 10 and length 0 would be an empty vector with space for 10
128 /// more elements. Pushing 10 or fewer elements onto the vector will not change its capacity or
129 /// cause reallocation to occur. However, if the vector's length is increased to 11, it will have
130 /// to reallocate, which can be slow. For this reason, it is recommended to use
131 /// `Vec::with_capacity` whenever possible to specify how big the vector is expected to get.
132 #[unsafe_no_drop_flag]
140 /// A clone-on-write vector
141 pub type CowVec<'a, T> = Cow<'a, Vec<T>, [T]>;
143 impl<'a, T> FromIterator<T> for CowVec<'a, T> where T: Clone {
144 fn from_iter<I: Iterator<T>>(it: I) -> CowVec<'a, T> {
145 Cow::Owned(FromIterator::from_iter(it))
149 impl<'a, T: 'a> IntoCow<'a, Vec<T>, [T]> for Vec<T> where T: Clone {
150 fn into_cow(self) -> CowVec<'a, T> {
155 impl<'a, T> IntoCow<'a, Vec<T>, [T]> for &'a [T] where T: Clone {
156 fn into_cow(self) -> CowVec<'a, T> {
162 /// Constructs a new, empty `Vec<T>`.
164 /// The vector will not allocate until elements are pushed onto it.
169 /// let mut vec: Vec<int> = Vec::new();
173 pub fn new() -> Vec<T> {
174 // We want ptr to never be NULL so instead we set it to some arbitrary
175 // non-null value which is fine since we never call deallocate on the ptr
176 // if cap is 0. The reason for this is because the pointer of a slice
177 // being NULL would break the null pointer optimization for enums.
178 Vec { ptr: EMPTY as *mut T, len: 0, cap: 0 }
181 /// Constructs a new, empty `Vec<T>` with the specified capacity.
183 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
184 /// `capacity` is 0, the vector will not allocate.
186 /// It is important to note that this function does not specify the *length* of the returned
187 /// vector, but only the *capacity*. (For an explanation of the difference between length and
188 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.) To create a
189 /// vector of a given length, use `Vec::from_elem` or `Vec::from_fn`.
194 /// let mut vec: Vec<int> = Vec::with_capacity(10);
196 /// // The vector contains no items, even though it has capacity for more
197 /// assert_eq!(vec.len(), 0);
199 /// // These are all done without reallocating...
200 /// for i in range(0i, 10) {
204 /// // ...but this may make the vector reallocate
209 pub fn with_capacity(capacity: uint) -> Vec<T> {
210 if mem::size_of::<T>() == 0 {
211 Vec { ptr: EMPTY as *mut T, len: 0, cap: uint::MAX }
212 } else if capacity == 0 {
215 let size = capacity.checked_mul(mem::size_of::<T>())
216 .expect("capacity overflow");
217 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
218 if ptr.is_null() { ::alloc::oom() }
219 Vec { ptr: ptr as *mut T, len: 0, cap: capacity }
223 /// Creates and initializes a `Vec<T>`.
225 /// Creates a `Vec<T>` of size `length` and initializes the elements to the value returned by
226 /// the closure `op`.
231 /// let vec = Vec::from_fn(3, |idx| idx * 2);
232 /// assert_eq!(vec, vec![0, 2, 4]);
235 #[unstable = "the naming is uncertain as well as this migrating to unboxed \
236 closures in the future"]
237 pub fn from_fn<F>(length: uint, mut op: F) -> Vec<T> where F: FnMut(uint) -> T {
239 let mut xs = Vec::with_capacity(length);
240 while xs.len < length {
242 ptr::write(xs.unsafe_mut(len), op(len));
249 /// Creates a `Vec<T>` directly from the raw components of another vector.
251 /// This is highly unsafe, due to the number of invariants that aren't checked.
260 /// let mut v = vec![1i, 2, 3];
262 /// // Pull out the various important pieces of information about `v`
263 /// let p = v.as_mut_ptr();
264 /// let len = v.len();
265 /// let cap = v.capacity();
268 /// // Cast `v` into the void: no destructor run, so we are in
269 /// // complete control of the allocation to which `p` points.
272 /// // Overwrite memory with 4, 5, 6
273 /// for i in range(0, len as int) {
274 /// ptr::write(p.offset(i), 4 + i);
277 /// // Put everything back together into a Vec
278 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
279 /// assert_eq!(rebuilt, vec![4i, 5i, 6i]);
283 #[unstable = "needs finalization"]
284 pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
285 capacity: uint) -> Vec<T> {
286 Vec { ptr: ptr, len: length, cap: capacity }
289 /// Creates a vector by copying the elements from a raw pointer.
291 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
292 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
293 /// without cloning, as if `ptr::read()` were called on them.
295 #[unstable = "just renamed from raw::from_buf"]
296 pub unsafe fn from_raw_buf(ptr: *const T, elts: uint) -> Vec<T> {
297 let mut dst = Vec::with_capacity(elts);
299 ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
303 /// Consumes the `Vec<T>`, partitioning it based on a predicate.
305 /// Partitions the `Vec<T>` into two `Vec<T>`s `(A,B)`, where all elements of `A` satisfy `f`
306 /// and all elements of `B` do not. The order of elements is preserved.
311 /// let vec = vec![1i, 2i, 3i, 4i];
312 /// let (even, odd) = vec.partition(|&n| n % 2 == 0);
313 /// assert_eq!(even, vec![2, 4]);
314 /// assert_eq!(odd, vec![1, 3]);
318 pub fn partition<F>(self, mut f: F) -> (Vec<T>, Vec<T>) where F: FnMut(&T) -> bool {
319 let mut lefts = Vec::new();
320 let mut rights = Vec::new();
322 for elt in self.into_iter() {
334 impl<T: Clone> Vec<T> {
335 /// Constructs a `Vec<T>` with copies of a value.
337 /// Creates a `Vec<T>` with `length` copies of `value`.
342 /// let vec = Vec::from_elem(3, "hi");
343 /// println!("{}", vec); // prints [hi, hi, hi]
346 #[unstable = "this functionality may become more generic over all collections"]
347 pub fn from_elem(length: uint, value: T) -> Vec<T> {
349 let mut xs = Vec::with_capacity(length);
350 while xs.len < length {
352 ptr::write(xs.unsafe_mut(len),
360 /// Appends all elements in a slice to the `Vec<T>`.
362 /// Iterates over the slice `other`, clones each element, and then appends
363 /// it to this `Vec<T>`. The `other` vector is traversed in-order.
368 /// let mut vec = vec![1i];
369 /// vec.push_all(&[2i, 3, 4]);
370 /// assert_eq!(vec, vec![1, 2, 3, 4]);
374 pub fn push_all(&mut self, other: &[T]) {
375 self.reserve(other.len());
377 for i in range(0, other.len()) {
378 let len = self.len();
380 // Unsafe code so this can be optimised to a memcpy (or something similarly
381 // fast) when T is Copy. LLVM is easily confused, so any extra operations
382 // during the loop can prevent this optimisation.
385 self.unsafe_mut(len),
386 other.unsafe_get(i).clone());
387 self.set_len(len + 1);
392 /// Grows the `Vec<T>` in-place.
394 /// Adds `n` copies of `value` to the `Vec<T>`.
399 /// let mut vec = vec!["hello"];
400 /// vec.grow(2, "world");
401 /// assert_eq!(vec, vec!["hello", "world", "world"]);
404 pub fn grow(&mut self, n: uint, value: T) {
406 let mut i: uint = 0u;
409 self.push(value.clone());
414 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
416 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
417 /// is larger than the current value of `len()` or not.
422 /// let mut vec = vec!["hello"];
423 /// vec.resize(3, "world");
424 /// assert_eq!(vec, vec!["hello", "world", "world"]);
426 /// let mut vec = vec![1i, 2, 3, 4];
427 /// vec.resize(2, 0);
428 /// assert_eq!(vec, vec![1, 2]);
430 #[unstable = "matches collection reform specification; waiting for dust to settle"]
431 pub fn resize(&mut self, new_len: uint, value: T) {
432 let len = self.len();
435 self.extend(repeat(value).take(new_len - len));
437 self.truncate(new_len);
441 /// Partitions a vector based on a predicate.
443 /// Clones the elements of the vector, partitioning them into two `Vec<T>`s
444 /// `(a, b)`, where all elements of `a` satisfy `f` and all elements of `b`
445 /// do not. The order of elements is preserved.
450 /// let vec = vec![1i, 2, 3, 4];
451 /// let (even, odd) = vec.partitioned(|&n| n % 2 == 0);
452 /// assert_eq!(even, vec![2i, 4]);
453 /// assert_eq!(odd, vec![1i, 3]);
456 pub fn partitioned<F>(&self, mut f: F) -> (Vec<T>, Vec<T>) where F: FnMut(&T) -> bool {
457 let mut lefts = Vec::new();
458 let mut rights = Vec::new();
460 for elt in self.iter() {
462 lefts.push(elt.clone());
464 rights.push(elt.clone());
473 impl<T:Clone> Clone for Vec<T> {
474 fn clone(&self) -> Vec<T> { self.as_slice().to_vec() }
476 fn clone_from(&mut self, other: &Vec<T>) {
477 // drop anything in self that will not be overwritten
478 if self.len() > other.len() {
479 self.truncate(other.len())
482 // reuse the contained values' allocations/resources.
483 for (place, thing) in self.iter_mut().zip(other.iter()) {
484 place.clone_from(thing)
487 // self.len <= other.len due to the truncate above, so the
488 // slice here is always in-bounds.
489 let slice = other[self.len()..];
490 self.push_all(slice);
494 #[experimental = "waiting on Index stability"]
495 impl<T> Index<uint,T> for Vec<T> {
497 fn index<'a>(&'a self, index: &uint) -> &'a T {
498 &self.as_slice()[*index]
502 impl<T> IndexMut<uint,T> for Vec<T> {
504 fn index_mut<'a>(&'a mut self, index: &uint) -> &'a mut T {
505 &mut self.as_mut_slice()[*index]
509 impl<T> ops::Slice<uint, [T]> for Vec<T> {
511 fn as_slice_<'a>(&'a self) -> &'a [T] {
516 fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] {
517 self.as_slice().slice_from_or_fail(start)
521 fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] {
522 self.as_slice().slice_to_or_fail(end)
525 fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] {
526 self.as_slice().slice_or_fail(start, end)
530 impl<T> ops::SliceMut<uint, [T]> for Vec<T> {
532 fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] {
537 fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] {
538 self.as_mut_slice().slice_from_or_fail_mut(start)
542 fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] {
543 self.as_mut_slice().slice_to_or_fail_mut(end)
546 fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] {
547 self.as_mut_slice().slice_or_fail_mut(start, end)
551 #[experimental = "waiting on Deref stability"]
552 impl<T> ops::Deref<[T]> for Vec<T> {
553 fn deref<'a>(&'a self) -> &'a [T] { self.as_slice() }
556 #[experimental = "waiting on DerefMut stability"]
557 impl<T> ops::DerefMut<[T]> for Vec<T> {
558 fn deref_mut<'a>(&'a mut self) -> &'a mut [T] { self.as_mut_slice() }
561 #[experimental = "waiting on FromIterator stability"]
562 impl<T> FromIterator<T> for Vec<T> {
564 fn from_iter<I:Iterator<T>>(mut iterator: I) -> Vec<T> {
565 let (lower, _) = iterator.size_hint();
566 let mut vector = Vec::with_capacity(lower);
567 for element in iterator {
574 #[experimental = "waiting on Extend stability"]
575 impl<T> Extend<T> for Vec<T> {
577 fn extend<I: Iterator<T>>(&mut self, mut iterator: I) {
578 let (lower, _) = iterator.size_hint();
580 for element in iterator {
586 impl<A, B> PartialEq<Vec<B>> for Vec<A> where A: PartialEq<B> {
588 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
590 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
593 macro_rules! impl_eq {
594 ($lhs:ty, $rhs:ty) => {
595 impl<'b, A, B> PartialEq<$rhs> for $lhs where A: PartialEq<B> {
597 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
599 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
602 impl<'b, A, B> PartialEq<$lhs> for $rhs where B: PartialEq<A> {
604 fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
606 fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) }
611 impl_eq! { Vec<A>, &'b [B] }
612 impl_eq! { Vec<A>, &'b mut [B] }
614 impl<'a, A, B> PartialEq<Vec<B>> for CowVec<'a, A> where A: PartialEq<B> + Clone {
616 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
618 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
621 impl<'a, A, B> PartialEq<CowVec<'a, A>> for Vec<B> where A: Clone, B: PartialEq<A> {
623 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
625 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
628 macro_rules! impl_eq_for_cowvec {
630 impl<'a, 'b, A, B> PartialEq<$rhs> for CowVec<'a, A> where A: PartialEq<B> + Clone {
632 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
634 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
637 impl<'a, 'b, A, B> PartialEq<CowVec<'a, A>> for $rhs where A: Clone, B: PartialEq<A> {
639 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
641 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
646 impl_eq_for_cowvec! { &'b [B] }
647 impl_eq_for_cowvec! { &'b mut [B] }
649 #[unstable = "waiting on PartialOrd stability"]
650 impl<T: PartialOrd> PartialOrd for Vec<T> {
652 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
653 self.as_slice().partial_cmp(other.as_slice())
657 #[unstable = "waiting on Eq stability"]
658 impl<T: Eq> Eq for Vec<T> {}
661 #[deprecated = "Use overloaded `core::cmp::PartialEq`"]
662 impl<T: PartialEq, Sized? V: AsSlice<T>> Equiv<V> for Vec<T> {
664 fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
667 #[unstable = "waiting on Ord stability"]
668 impl<T: Ord> Ord for Vec<T> {
670 fn cmp(&self, other: &Vec<T>) -> Ordering {
671 self.as_slice().cmp(other.as_slice())
675 impl<S: hash::Writer, T: Hash<S>> Hash<S> for Vec<T> {
677 fn hash(&self, state: &mut S) {
678 self.as_slice().hash(state);
682 // FIXME: #13996: need a way to mark the return value as `noalias`
684 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: uint, size: uint) -> *mut T {
686 allocate(size, mem::min_align_of::<T>()) as *mut T
688 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
693 unsafe fn dealloc<T>(ptr: *mut T, len: uint) {
694 if mem::size_of::<T>() != 0 {
695 deallocate(ptr as *mut u8,
696 len * mem::size_of::<T>(),
697 mem::min_align_of::<T>())
702 /// Returns the number of elements the vector can hold without reallocating.
707 /// let vec: Vec<int> = Vec::with_capacity(10);
708 /// assert_eq!(vec.capacity(), 10);
712 pub fn capacity(&self) -> uint {
716 /// Deprecated: Renamed to `reserve`.
717 #[deprecated = "Renamed to `reserve`"]
718 pub fn reserve_additional(&mut self, extra: uint) {
722 /// Reserves capacity for at least `additional` more elements to be inserted in the given
723 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
727 /// Panics if the new capacity overflows `uint`.
732 /// let mut vec: Vec<int> = vec![1];
734 /// assert!(vec.capacity() >= 11);
736 #[unstable = "matches collection reform specification, waiting for dust to settle"]
737 pub fn reserve(&mut self, additional: uint) {
738 if self.cap - self.len < additional {
739 let err_msg = "Vec::reserve: `uint` overflow";
740 let new_cap = self.len.checked_add(additional).expect(err_msg)
741 .checked_next_power_of_two().expect(err_msg);
742 self.grow_capacity(new_cap);
746 /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
747 /// given `Vec<T>`. Does nothing if the capacity is already sufficient.
749 /// Note that the allocator may give the collection more space than it requests. Therefore
750 /// capacity can not be relied upon to be precisely minimal. Prefer `reserve` if future
751 /// insertions are expected.
755 /// Panics if the new capacity overflows `uint`.
760 /// let mut vec: Vec<int> = vec![1];
761 /// vec.reserve_exact(10);
762 /// assert!(vec.capacity() >= 11);
764 #[unstable = "matches collection reform specification, waiting for dust to settle"]
765 pub fn reserve_exact(&mut self, additional: uint) {
766 if self.cap - self.len < additional {
767 match self.len.checked_add(additional) {
768 None => panic!("Vec::reserve: `uint` overflow"),
769 Some(new_cap) => self.grow_capacity(new_cap)
774 /// Shrinks the capacity of the vector as much as possible.
776 /// It will drop down as close as possible to the length but the allocator may still inform the
777 /// vector that there is space for a few more elements.
782 /// let mut vec: Vec<int> = Vec::with_capacity(10);
784 /// vec.push_all(&[1, 2, 3]);
785 /// assert_eq!(vec.capacity(), 10);
787 /// vec.shrink_to_fit();
788 /// assert!(vec.capacity() >= 3);
791 pub fn shrink_to_fit(&mut self) {
792 if mem::size_of::<T>() == 0 { return }
797 dealloc(self.ptr, self.cap)
803 // Overflow check is unnecessary as the vector is already at
805 self.ptr = reallocate(self.ptr as *mut u8,
806 self.cap * mem::size_of::<T>(),
807 self.len * mem::size_of::<T>(),
808 mem::min_align_of::<T>()) as *mut T;
809 if self.ptr.is_null() { ::alloc::oom() }
815 /// Convert the vector into Box<[T]>.
817 /// Note that this will drop any excess capacity. Calling this and converting back to a vector
818 /// with `into_vec()` is equivalent to calling `shrink_to_fit()`.
820 pub fn into_boxed_slice(mut self) -> Box<[T]> {
821 self.shrink_to_fit();
823 let xs: Box<[T]> = mem::transmute(self.as_mut_slice());
829 /// Shorten a vector, dropping excess elements.
831 /// If `len` is greater than the vector's current length, this has no
837 /// let mut vec = vec![1i, 2, 3, 4];
839 /// assert_eq!(vec, vec![1, 2]);
841 #[unstable = "matches collection reform specification; waiting on panic semantics"]
842 pub fn truncate(&mut self, len: uint) {
844 // drop any extra elements
845 while len < self.len {
846 // decrement len before the read(), so a panic on Drop doesn't
847 // re-drop the just-failed value.
849 ptr::read(self.unsafe_get(self.len));
854 /// Returns a mutable slice of the elements of `self`.
859 /// fn foo(slice: &mut [int]) {}
861 /// let mut vec = vec![1i, 2];
862 /// foo(vec.as_mut_slice());
866 pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
868 mem::transmute(RawSlice {
869 data: self.ptr as *const T,
875 /// Creates a consuming iterator, that is, one that moves each value out of the vector (from
876 /// start to end). The vector cannot be used after calling this.
881 /// let v = vec!["a".to_string(), "b".to_string()];
883 /// for s in v.into_iter() {
884 /// // s has type String, not &String
885 /// println!("{}", s);
889 #[unstable = "matches collection reform specification, waiting for dust to settle"]
890 pub fn into_iter(self) -> MoveItems<T> {
894 let begin = self.ptr as *const T;
895 let end = if mem::size_of::<T>() == 0 {
896 (ptr as uint + self.len()) as *const T
898 ptr.offset(self.len() as int) as *const T
901 MoveItems { allocation: ptr, cap: cap, ptr: begin, end: end }
905 /// Sets the length of a vector.
907 /// This will explicitly set the size of the vector, without actually modifying its buffers, so
908 /// it is up to the caller to ensure that the vector is actually the specified size.
913 /// let mut v = vec![1u, 2, 3, 4];
920 pub unsafe fn set_len(&mut self, len: uint) {
924 /// Removes an element from anywhere in the vector and return it, replacing it with the last
927 /// This does not preserve ordering, but is O(1).
929 /// Returns `None` if `index` is out of bounds.
934 /// let mut v = vec!["foo", "bar", "baz", "qux"];
936 /// assert_eq!(v.swap_remove(1), Some("bar"));
937 /// assert_eq!(v, vec!["foo", "qux", "baz"]);
939 /// assert_eq!(v.swap_remove(0), Some("foo"));
940 /// assert_eq!(v, vec!["baz", "qux"]);
942 /// assert_eq!(v.swap_remove(2), None);
945 #[unstable = "the naming of this function may be altered"]
946 pub fn swap_remove(&mut self, index: uint) -> Option<T> {
947 let length = self.len();
948 if length > 0 && index < length - 1 {
949 self.swap(index, length - 1);
950 } else if index >= length {
956 /// Inserts an element at position `index` within the vector, shifting all elements after
957 /// position `i` one position to the right.
961 /// Panics if `index` is not between `0` and the vector's length (both bounds inclusive).
966 /// let mut vec = vec![1i, 2, 3];
967 /// vec.insert(1, 4);
968 /// assert_eq!(vec, vec![1, 4, 2, 3]);
969 /// vec.insert(4, 5);
970 /// assert_eq!(vec, vec![1, 4, 2, 3, 5]);
972 #[unstable = "panic semantics need settling"]
973 pub fn insert(&mut self, index: uint, element: T) {
974 let len = self.len();
975 assert!(index <= len);
976 // space for the new element
979 unsafe { // infallible
980 // The spot to put the new value
982 let p = self.as_mut_ptr().offset(index as int);
983 // Shift everything over to make space. (Duplicating the
984 // `index`th element into two consecutive places.)
985 ptr::copy_memory(p.offset(1), &*p, len - index);
986 // Write it in, overwriting the first copy of the `index`th
988 ptr::write(&mut *p, element);
990 self.set_len(len + 1);
994 /// Removes and returns the element at position `index` within the vector, shifting all
995 /// elements after position `index` one position to the left. Returns `None` if `i` is out of
1001 /// let mut v = vec![1i, 2, 3];
1002 /// assert_eq!(v.remove(1), Some(2));
1003 /// assert_eq!(v, vec![1, 3]);
1005 /// assert_eq!(v.remove(4), None);
1006 /// // v is unchanged:
1007 /// assert_eq!(v, vec![1, 3]);
1009 #[unstable = "panic semantics need settling"]
1010 pub fn remove(&mut self, index: uint) -> Option<T> {
1011 let len = self.len();
1013 unsafe { // infallible
1016 // the place we are taking from.
1017 let ptr = self.as_mut_ptr().offset(index as int);
1018 // copy it out, unsafely having a copy of the value on
1019 // the stack and in the vector at the same time.
1020 ret = Some(ptr::read(ptr as *const T));
1022 // Shift everything down to fill in that spot.
1023 ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
1025 self.set_len(len - 1);
1033 /// Retains only the elements specified by the predicate.
1035 /// In other words, remove all elements `e` such that `f(&e)` returns false. This method
1036 /// operates in place and preserves the order of the retained elements.
1041 /// let mut vec = vec![1i, 2, 3, 4];
1042 /// vec.retain(|&x| x%2 == 0);
1043 /// assert_eq!(vec, vec![2, 4]);
1045 #[unstable = "the closure argument may become an unboxed closure"]
1046 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
1047 let len = self.len();
1050 let v = self.as_mut_slice();
1052 for i in range(0u, len) {
1061 self.truncate(len - del);
1065 /// Expands a vector in place, initializing the new elements to the result of a function.
1067 /// The vector is grown by `n` elements. The i-th new element are initialized to the value
1068 /// returned by `f(i)` where `i` is in the range [0, n).
1073 /// let mut vec = vec![0u, 1];
1074 /// vec.grow_fn(3, |i| i);
1075 /// assert_eq!(vec, vec![0, 1, 0, 1, 2]);
1077 #[unstable = "this function may be renamed or change to unboxed closures"]
1078 pub fn grow_fn<F>(&mut self, n: uint, mut f: F) where F: FnMut(uint) -> T {
1080 for i in range(0u, n) {
1085 /// Appends an element to the back of a collection.
1089 /// Panics if the number of elements in the vector overflows a `uint`.
1094 /// let mut vec = vec!(1i, 2);
1096 /// assert_eq!(vec, vec!(1, 2, 3));
1100 pub fn push(&mut self, value: T) {
1101 if mem::size_of::<T>() == 0 {
1102 // zero-size types consume no memory, so we can't rely on the address space running out
1103 self.len = self.len.checked_add(1).expect("length overflow");
1104 unsafe { mem::forget(value); }
1107 if self.len == self.cap {
1108 let old_size = self.cap * mem::size_of::<T>();
1109 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
1110 if old_size > size { panic!("capacity overflow") }
1112 self.ptr = alloc_or_realloc(self.ptr, old_size, size);
1113 if self.ptr.is_null() { ::alloc::oom() }
1115 self.cap = max(self.cap, 2) * 2;
1119 let end = (self.ptr as *const T).offset(self.len as int) as *mut T;
1120 ptr::write(&mut *end, value);
1125 /// Removes the last element from a vector and returns it, or `None` if it is empty.
1130 /// let mut vec = vec![1i, 2, 3];
1131 /// assert_eq!(vec.pop(), Some(3));
1132 /// assert_eq!(vec, vec![1, 2]);
1136 pub fn pop(&mut self) -> Option<T> {
1142 Some(ptr::read(self.unsafe_get(self.len())))
1147 /// Creates a draining iterator that clears the `Vec` and iterates over
1148 /// the removed items from start to end.
1153 /// let mut v = vec!["a".to_string(), "b".to_string()];
1154 /// for s in v.drain() {
1155 /// // s has type String, not &String
1156 /// println!("{}", s);
1158 /// assert!(v.is_empty());
1161 #[unstable = "matches collection reform specification, waiting for dust to settle"]
1162 pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
1164 let begin = self.ptr as *const T;
1165 let end = if mem::size_of::<T>() == 0 {
1166 (self.ptr as uint + self.len()) as *const T
1168 self.ptr.offset(self.len() as int) as *const T
1174 marker: ContravariantLifetime,
1179 /// Clears the vector, removing all values.
1184 /// let mut v = vec![1i, 2, 3];
1188 /// assert!(v.is_empty());
1192 pub fn clear(&mut self) {
1196 /// Returns the number of elements in the vector.
1201 /// let a = vec![1i, 2, 3];
1202 /// assert_eq!(a.len(), 3);
1206 pub fn len(&self) -> uint { self.len }
1208 /// Returns `true` if the vector contains no elements.
1213 /// let mut v = Vec::new();
1214 /// assert!(v.is_empty());
1217 /// assert!(!v.is_empty());
1219 #[unstable = "matches collection reform specification, waiting for dust to settle"]
1220 pub fn is_empty(&self) -> bool { self.len() == 0 }
1222 /// Reserves capacity for exactly `capacity` elements in the given vector.
1224 /// If the capacity for `self` is already equal to or greater than the
1225 /// requested capacity, then no action is taken.
1226 fn grow_capacity(&mut self, capacity: uint) {
1227 if mem::size_of::<T>() == 0 { return }
1229 if capacity > self.cap {
1230 let size = capacity.checked_mul(mem::size_of::<T>())
1231 .expect("capacity overflow");
1233 self.ptr = alloc_or_realloc(self.ptr, self.cap * mem::size_of::<T>(), size);
1234 if self.ptr.is_null() { ::alloc::oom() }
1236 self.cap = capacity;
1241 impl<T: PartialEq> Vec<T> {
1242 /// Removes consecutive repeated elements in the vector.
1244 /// If the vector is sorted, this removes all duplicates.
1249 /// let mut vec = vec![1i, 2, 2, 3, 2];
1253 /// assert_eq!(vec, vec![1i, 2, 3, 2]);
1255 #[unstable = "this function may be renamed"]
1256 pub fn dedup(&mut self) {
1258 // Although we have a mutable reference to `self`, we cannot make
1259 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1260 // must ensure that the vector is in a valid state at all time.
1262 // The way that we handle this is by using swaps; we iterate
1263 // over all the elements, swapping as we go so that at the end
1264 // the elements we wish to keep are in the front, and those we
1265 // wish to reject are at the back. We can then truncate the
1266 // vector. This operation is still O(n).
1268 // Example: We start in this state, where `r` represents "next
1269 // read" and `w` represents "next_write`.
1272 // +---+---+---+---+---+---+
1273 // | 0 | 1 | 1 | 2 | 3 | 3 |
1274 // +---+---+---+---+---+---+
1277 // Comparing self[r] against self[w-1], this is not a duplicate, so
1278 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1279 // r and w, leaving us with:
1282 // +---+---+---+---+---+---+
1283 // | 0 | 1 | 1 | 2 | 3 | 3 |
1284 // +---+---+---+---+---+---+
1287 // Comparing self[r] against self[w-1], this value is a duplicate,
1288 // so we increment `r` but leave everything else unchanged:
1291 // +---+---+---+---+---+---+
1292 // | 0 | 1 | 1 | 2 | 3 | 3 |
1293 // +---+---+---+---+---+---+
1296 // Comparing self[r] against self[w-1], this is not a duplicate,
1297 // so swap self[r] and self[w] and advance r and w:
1300 // +---+---+---+---+---+---+
1301 // | 0 | 1 | 2 | 1 | 3 | 3 |
1302 // +---+---+---+---+---+---+
1305 // Not a duplicate, repeat:
1308 // +---+---+---+---+---+---+
1309 // | 0 | 1 | 2 | 3 | 1 | 3 |
1310 // +---+---+---+---+---+---+
1313 // Duplicate, advance r. End of vec. Truncate to w.
1315 let ln = self.len();
1316 if ln < 1 { return; }
1318 // Avoid bounds checks by using unsafe pointers.
1319 let p = self.as_mut_ptr();
1324 let p_r = p.offset(r as int);
1325 let p_wm1 = p.offset((w - 1) as int);
1328 let p_w = p_wm1.offset(1);
1329 mem::swap(&mut *p_r, &mut *p_w);
1341 impl<T> AsSlice<T> for Vec<T> {
1342 /// Returns a slice into `self`.
1347 /// fn foo(slice: &[int]) {}
1349 /// let vec = vec![1i, 2];
1350 /// foo(vec.as_slice());
1354 fn as_slice<'a>(&'a self) -> &'a [T] {
1356 mem::transmute(RawSlice {
1357 data: self.ptr as *const T,
1364 // NOTE(stage0): Remove impl after a snapshot
1366 impl<T: Clone, Sized? V: AsSlice<T>> Add<V, Vec<T>> for Vec<T> {
1368 fn add(&self, rhs: &V) -> Vec<T> {
1369 let mut res = Vec::with_capacity(self.len() + rhs.as_slice().len());
1370 res.push_all(self.as_slice());
1371 res.push_all(rhs.as_slice());
1377 #[cfg(not(stage0))] // NOTE(stage0): Remove impl after a snapshot
1378 impl<'a, T: Clone> Add<&'a [T], Vec<T>> for Vec<T> {
1380 fn add(mut self, rhs: &[T]) -> Vec<T> {
1386 #[unsafe_destructor]
1387 impl<T> Drop for Vec<T> {
1388 fn drop(&mut self) {
1389 // This is (and should always remain) a no-op if the fields are
1390 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1393 for x in self.iter() {
1396 dealloc(self.ptr, self.cap)
1403 impl<T> Default for Vec<T> {
1405 fn default() -> Vec<T> {
1410 #[experimental = "waiting on Show stability"]
1411 impl<T:fmt::Show> fmt::Show for Vec<T> {
1412 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1413 self.as_slice().fmt(f)
1417 /// An iterator that moves out of a vector.
1418 pub struct MoveItems<T> {
1419 allocation: *mut T, // the block of memory allocated for the vector
1420 cap: uint, // the capacity of the vector
1425 impl<T> MoveItems<T> {
1426 /// Drops all items that have not yet been moved and returns the empty vector.
1429 pub fn into_inner(mut self) -> Vec<T> {
1432 let MoveItems { allocation, cap, ptr: _ptr, end: _end } = self;
1434 Vec { ptr: allocation, cap: cap, len: 0 }
1438 /// Deprecated, use .into_inner() instead
1439 #[deprecated = "use .into_inner() instead"]
1440 pub fn unwrap(self) -> Vec<T> { self.into_inner() }
1443 impl<T> Iterator<T> for MoveItems<T> {
1445 fn next<'a>(&'a mut self) -> Option<T> {
1447 if self.ptr == self.end {
1450 if mem::size_of::<T>() == 0 {
1451 // purposefully don't use 'ptr.offset' because for
1452 // vectors with 0-size elements this would return the
1454 self.ptr = mem::transmute(self.ptr as uint + 1);
1456 // Use a non-null pointer value
1457 Some(ptr::read(mem::transmute(1u)))
1460 self.ptr = self.ptr.offset(1);
1462 Some(ptr::read(old))
1469 fn size_hint(&self) -> (uint, Option<uint>) {
1470 let diff = (self.end as uint) - (self.ptr as uint);
1471 let size = mem::size_of::<T>();
1472 let exact = diff / (if size == 0 {1} else {size});
1473 (exact, Some(exact))
1477 impl<T> DoubleEndedIterator<T> for MoveItems<T> {
1479 fn next_back<'a>(&'a mut self) -> Option<T> {
1481 if self.end == self.ptr {
1484 if mem::size_of::<T>() == 0 {
1485 // See above for why 'ptr.offset' isn't used
1486 self.end = mem::transmute(self.end as uint - 1);
1488 // Use a non-null pointer value
1489 Some(ptr::read(mem::transmute(1u)))
1491 self.end = self.end.offset(-1);
1493 Some(ptr::read(mem::transmute(self.end)))
1500 impl<T> ExactSizeIterator<T> for MoveItems<T> {}
1502 #[unsafe_destructor]
1503 impl<T> Drop for MoveItems<T> {
1504 fn drop(&mut self) {
1505 // destroy the remaining elements
1509 dealloc(self.allocation, self.cap);
1515 /// An iterator that drains a vector.
1516 #[unsafe_no_drop_flag]
1517 pub struct Drain<'a, T> {
1520 marker: ContravariantLifetime<'a>,
1523 impl<'a, T> Iterator<T> for Drain<'a, T> {
1525 fn next(&mut self) -> Option<T> {
1527 if self.ptr == self.end {
1530 if mem::size_of::<T>() == 0 {
1531 // purposefully don't use 'ptr.offset' because for
1532 // vectors with 0-size elements this would return the
1534 self.ptr = mem::transmute(self.ptr as uint + 1);
1536 // Use a non-null pointer value
1537 Some(ptr::read(mem::transmute(1u)))
1540 self.ptr = self.ptr.offset(1);
1542 Some(ptr::read(old))
1549 fn size_hint(&self) -> (uint, Option<uint>) {
1550 let diff = (self.end as uint) - (self.ptr as uint);
1551 let size = mem::size_of::<T>();
1552 let exact = diff / (if size == 0 {1} else {size});
1553 (exact, Some(exact))
1557 impl<'a, T> DoubleEndedIterator<T> for Drain<'a, T> {
1559 fn next_back(&mut self) -> Option<T> {
1561 if self.end == self.ptr {
1564 if mem::size_of::<T>() == 0 {
1565 // See above for why 'ptr.offset' isn't used
1566 self.end = mem::transmute(self.end as uint - 1);
1568 // Use a non-null pointer value
1569 Some(ptr::read(mem::transmute(1u)))
1571 self.end = self.end.offset(-1);
1573 Some(ptr::read(self.end))
1580 impl<'a, T> ExactSizeIterator<T> for Drain<'a, T> {}
1582 #[unsafe_destructor]
1583 impl<'a, T> Drop for Drain<'a, T> {
1584 fn drop(&mut self) {
1585 // self.ptr == self.end == null if drop has already been called,
1586 // so we can use #[unsafe_no_drop_flag].
1588 // destroy the remaining elements
1593 /// Converts an iterator of pairs into a pair of vectors.
1595 /// Returns a tuple containing two vectors where the i-th element of the first vector contains the
1596 /// first element of the i-th tuple of the input iterator, and the i-th element of the second
1597 /// vector contains the second element of the i-th tuple of the input iterator.
1598 #[unstable = "this functionality may become more generic over time"]
1599 pub fn unzip<T, U, V: Iterator<(T, U)>>(mut iter: V) -> (Vec<T>, Vec<U>) {
1600 let (lo, _) = iter.size_hint();
1601 let mut ts = Vec::with_capacity(lo);
1602 let mut us = Vec::with_capacity(lo);
1603 for (t, u) in iter {
1610 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1612 pub struct DerefVec<'a, T> {
1614 l: ContravariantLifetime<'a>
1617 impl<'a, T> Deref<Vec<T>> for DerefVec<'a, T> {
1618 fn deref<'b>(&'b self) -> &'b Vec<T> {
1623 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1624 #[unsafe_destructor]
1625 impl<'a, T> Drop for DerefVec<'a, T> {
1626 fn drop(&mut self) {
1632 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1634 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1637 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1638 l: ContravariantLifetime::<'a>
1643 /// Unsafe vector operations.
1648 /// Constructs a vector from an unsafe pointer to a buffer.
1650 /// The elements of the buffer are copied into the vector without cloning,
1651 /// as if `ptr::read()` were called on them.
1653 #[deprecated = "renamed to Vec::from_raw_buf"]
1654 pub unsafe fn from_buf<T>(ptr: *const T, elts: uint) -> Vec<T> {
1655 Vec::from_raw_buf(ptr, elts)
1659 /// An owned, partially type-converted vector of elements with non-zero size.
1661 /// `T` and `U` must have the same, non-zero size. They must also have the same
1664 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1665 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1666 /// destructed. Additionally the underlying storage of `vec` will be freed.
1667 struct PartialVecNonZeroSized<T,U> {
1676 /// An owned, partially type-converted vector of zero-sized elements.
1678 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1680 struct PartialVecZeroSized<T,U> {
1683 marker_t: InvariantType<T>,
1684 marker_u: InvariantType<U>,
1687 #[unsafe_destructor]
1688 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1689 fn drop(&mut self) {
1691 // `vec` hasn't been modified until now. As it has a length
1692 // currently, this would run destructors of `T`s which might not be
1693 // there. So at first, set `vec`s length to `0`. This must be done
1694 // at first to remain memory-safe as the destructors of `U` or `T`
1695 // might cause unwinding where `vec`s destructor would be executed.
1696 self.vec.set_len(0);
1698 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1699 while self.start_u != self.end_u {
1700 let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor.
1701 self.start_u = self.start_u.offset(1);
1703 while self.start_t != self.end_t {
1704 let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor.
1705 self.start_t = self.start_t.offset(1);
1707 // After this destructor ran, the destructor of `vec` will run,
1708 // deallocating the underlying memory.
1713 #[unsafe_destructor]
1714 impl<T,U> Drop for PartialVecZeroSized<T,U> {
1715 fn drop(&mut self) {
1717 // Destruct the instances of `T` and `U` this struct owns.
1718 while self.num_t != 0 {
1719 let _: T = mem::uninitialized(); // Run a `T` destructor.
1722 while self.num_u != 0 {
1723 let _: U = mem::uninitialized(); // Run a `U` destructor.
1731 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
1732 /// size and in case they are not zero-sized the same minimal alignment.
1736 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
1737 /// have differing minimal alignments.
1742 /// let v = vec![0u, 1, 2];
1743 /// let w = v.map_in_place(|i| i + 3);
1744 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
1746 /// #[deriving(PartialEq, Show)]
1747 /// struct Newtype(u8);
1748 /// let bytes = vec![0x11, 0x22];
1749 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
1750 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
1752 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
1753 // FIXME: Assert statically that the types `T` and `U` have the same
1755 assert!(mem::size_of::<T>() == mem::size_of::<U>());
1759 if mem::size_of::<T>() != 0 {
1760 // FIXME: Assert statically that the types `T` and `U` have the
1761 // same minimal alignment in case they are not zero-sized.
1763 // These asserts are necessary because the `min_align_of` of the
1764 // types are passed to the allocator by `Vec`.
1765 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
1767 // This `as int` cast is safe, because the size of the elements of the
1768 // vector is not 0, and:
1770 // 1) If the size of the elements in the vector is 1, the `int` may
1771 // overflow, but it has the correct bit pattern so that the
1772 // `.offset()` function will work.
1775 // Address space 0x0-0xF.
1776 // `u8` array at: 0x1.
1777 // Size of `u8` array: 0x8.
1778 // Calculated `offset`: -0x8.
1779 // After `array.offset(offset)`: 0x9.
1780 // (0x1 + 0x8 = 0x1 - 0x8)
1782 // 2) If the size of the elements in the vector is >1, the `uint` ->
1783 // `int` conversion can't overflow.
1784 let offset = vec.len() as int;
1785 let start = vec.as_mut_ptr();
1787 let mut pv = PartialVecNonZeroSized {
1791 // This points inside the vector, as the vector has length
1793 end_t: unsafe { start.offset(offset) },
1794 start_u: start as *mut U,
1795 end_u: start as *mut U,
1806 while pv.end_u as *mut T != pv.end_t {
1810 // +-+-+-+-+-+-+-+-+-+
1811 // |U|...|U|T|T|...|T|
1812 // +-+-+-+-+-+-+-+-+-+
1816 let t = ptr::read(pv.start_t as *const T);
1819 // +-+-+-+-+-+-+-+-+-+
1820 // |U|...|U|X|T|...|T|
1821 // +-+-+-+-+-+-+-+-+-+
1824 // We must not panic here, one cell is marked as `T`
1825 // although it is not `T`.
1827 pv.start_t = pv.start_t.offset(1);
1830 // +-+-+-+-+-+-+-+-+-+
1831 // |U|...|U|X|T|...|T|
1832 // +-+-+-+-+-+-+-+-+-+
1835 // We may panic again.
1837 // The function given by the user might panic.
1840 ptr::write(pv.end_u, u);
1843 // +-+-+-+-+-+-+-+-+-+
1844 // |U|...|U|U|T|...|T|
1845 // +-+-+-+-+-+-+-+-+-+
1848 // We should not panic here, because that would leak the `U`
1849 // pointed to by `end_u`.
1851 pv.end_u = pv.end_u.offset(1);
1854 // +-+-+-+-+-+-+-+-+-+
1855 // |U|...|U|U|T|...|T|
1856 // +-+-+-+-+-+-+-+-+-+
1859 // We may panic again.
1871 // Extract `vec` and prevent the destructor of
1872 // `PartialVecNonZeroSized` from running. Note that none of the
1873 // function calls can panic, thus no resources can be leaked (as the
1874 // `vec` member of `PartialVec` is the only one which holds
1875 // allocations -- and it is returned from this function. None of
1878 let vec_len = pv.vec.len();
1879 let vec_cap = pv.vec.capacity();
1880 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
1882 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
1885 // Put the `Vec` into the `PartialVecZeroSized` structure and
1886 // prevent the destructor of the `Vec` from running. Since the
1887 // `Vec` contained zero-sized objects, it did not allocate, so we
1888 // are not leaking memory here.
1889 let mut pv = PartialVecZeroSized::<T,U> {
1892 marker_t: InvariantType,
1893 marker_u: InvariantType,
1895 unsafe { mem::forget(vec); }
1897 while pv.num_t != 0 {
1899 // Create a `T` out of thin air and decrement `num_t`. This
1900 // must not panic between these steps, as otherwise a
1901 // destructor of `T` which doesn't exist runs.
1902 let t = mem::uninitialized();
1905 // The function given by the user might panic.
1908 // Forget the `U` and increment `num_u`. This increment
1909 // cannot overflow the `uint` as we only do this for a
1910 // number of times that fits into a `uint` (and start with
1911 // `0`). Again, we should not panic between these steps.
1916 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
1917 // destructor of the latter will not run. None of this can panic.
1918 let mut result = Vec::new();
1920 result.set_len(pv.num_u);
1928 impl<'a> fmt::FormatWriter for Vec<u8> {
1929 fn write(&mut self, buf: &[u8]) -> fmt::Result {
1938 use core::mem::size_of;
1940 use super::{as_vec, unzip, raw};
1942 struct DropCounter<'a> {
1946 #[unsafe_destructor]
1947 impl<'a> Drop for DropCounter<'a> {
1948 fn drop(&mut self) {
1955 let xs = [1u8, 2u8, 3u8];
1956 assert_eq!(as_vec(&xs).as_slice(), xs);
1960 fn test_as_vec_dtor() {
1961 let (mut count_x, mut count_y) = (0, 0);
1963 let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }];
1964 assert_eq!(as_vec(xs).len(), 2);
1966 assert_eq!(count_x, 1);
1967 assert_eq!(count_y, 1);
1971 fn test_small_vec_struct() {
1972 assert!(size_of::<Vec<u8>>() == size_of::<uint>() * 3);
1976 fn test_double_drop() {
1982 let (mut count_x, mut count_y) = (0, 0);
1984 let mut tv = TwoVec {
1988 tv.x.push(DropCounter {count: &mut count_x});
1989 tv.y.push(DropCounter {count: &mut count_y});
1991 // If Vec had a drop flag, here is where it would be zeroed.
1992 // Instead, it should rely on its internal state to prevent
1993 // doing anything significant when dropped multiple times.
1996 // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
1999 assert_eq!(count_x, 1);
2000 assert_eq!(count_y, 1);
2005 let mut v = Vec::new();
2006 assert_eq!(v.capacity(), 0);
2009 assert!(v.capacity() >= 2);
2011 for i in range(0i, 16) {
2015 assert!(v.capacity() >= 16);
2017 assert!(v.capacity() >= 32);
2022 assert!(v.capacity() >= 33)
2027 let mut v = Vec::new();
2028 let mut w = Vec::new();
2030 v.extend(range(0i, 3));
2031 for i in range(0i, 3) { w.push(i) }
2035 v.extend(range(3i, 10));
2036 for i in range(3i, 10) { w.push(i) }
2042 fn test_slice_from_mut() {
2043 let mut values = vec![1u8,2,3,4,5];
2045 let slice = values.slice_from_mut(2);
2046 assert!(slice == [3, 4, 5]);
2047 for p in slice.iter_mut() {
2052 assert!(values == [1, 2, 5, 6, 7]);
2056 fn test_slice_to_mut() {
2057 let mut values = vec![1u8,2,3,4,5];
2059 let slice = values.slice_to_mut(2);
2060 assert!(slice == [1, 2]);
2061 for p in slice.iter_mut() {
2066 assert!(values == [2, 3, 3, 4, 5]);
2070 fn test_split_at_mut() {
2071 let mut values = vec![1u8,2,3,4,5];
2073 let (left, right) = values.split_at_mut(2);
2075 let left: &[_] = left;
2076 assert!(left[0..left.len()] == [1, 2][]);
2078 for p in left.iter_mut() {
2083 let right: &[_] = right;
2084 assert!(right[0..right.len()] == [3, 4, 5][]);
2086 for p in right.iter_mut() {
2091 assert!(values == vec![2u8, 3, 5, 6, 7]);
2096 let v: Vec<int> = vec!();
2097 let w = vec!(1i, 2, 3);
2099 assert_eq!(v, v.clone());
2103 // they should be disjoint in memory.
2104 assert!(w.as_ptr() != z.as_ptr())
2108 fn test_clone_from() {
2110 let three = vec!(box 1i, box 2, box 3);
2111 let two = vec!(box 4i, box 5);
2113 v.clone_from(&three);
2114 assert_eq!(v, three);
2117 v.clone_from(&three);
2118 assert_eq!(v, three);
2125 v.clone_from(&three);
2126 assert_eq!(v, three)
2131 let mut v = vec![0u, 1];
2132 v.grow_fn(3, |i| i);
2133 assert!(v == vec![0u, 1, 0, 1, 2]);
2138 let mut vec = vec![1u, 2, 3, 4];
2139 vec.retain(|&x| x % 2 == 0);
2140 assert!(vec == vec![2u, 4]);
2144 fn zero_sized_values() {
2145 let mut v = Vec::new();
2146 assert_eq!(v.len(), 0);
2148 assert_eq!(v.len(), 1);
2150 assert_eq!(v.len(), 2);
2151 assert_eq!(v.pop(), Some(()));
2152 assert_eq!(v.pop(), Some(()));
2153 assert_eq!(v.pop(), None);
2155 assert_eq!(v.iter().count(), 0);
2157 assert_eq!(v.iter().count(), 1);
2159 assert_eq!(v.iter().count(), 2);
2161 for &() in v.iter() {}
2163 assert_eq!(v.iter_mut().count(), 2);
2165 assert_eq!(v.iter_mut().count(), 3);
2167 assert_eq!(v.iter_mut().count(), 4);
2169 for &() in v.iter_mut() {}
2170 unsafe { v.set_len(0); }
2171 assert_eq!(v.iter_mut().count(), 0);
2175 fn test_partition() {
2176 assert_eq!(vec![].partition(|x: &int| *x < 3), (vec![], vec![]));
2177 assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
2178 assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 2), (vec![1], vec![2, 3]));
2179 assert_eq!(vec![1i, 2, 3].partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
2183 fn test_partitioned() {
2184 assert_eq!(vec![].partitioned(|x: &int| *x < 3), (vec![], vec![]));
2185 assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
2186 assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 2), (vec![1], vec![2, 3]));
2187 assert_eq!(vec![1i, 2, 3].partitioned(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
2191 fn test_zip_unzip() {
2192 let z1 = vec![(1i, 4i), (2, 5), (3, 6)];
2194 let (left, right) = unzip(z1.iter().map(|&x| x));
2196 assert_eq!((1, 4), (left[0], right[0]));
2197 assert_eq!((2, 5), (left[1], right[1]));
2198 assert_eq!((3, 6), (left[2], right[2]));
2202 fn test_unsafe_ptrs() {
2204 // Test on-stack copy-from-buf.
2206 let ptr = a.as_ptr();
2207 let b = raw::from_buf(ptr, 3u);
2208 assert_eq!(b, vec![1, 2, 3]);
2210 // Test on-heap copy-from-buf.
2211 let c = vec![1i, 2, 3, 4, 5];
2212 let ptr = c.as_ptr();
2213 let d = raw::from_buf(ptr, 5u);
2214 assert_eq!(d, vec![1, 2, 3, 4, 5]);
2219 fn test_vec_truncate_drop() {
2220 static mut drops: uint = 0;
2222 impl Drop for Elem {
2223 fn drop(&mut self) {
2224 unsafe { drops += 1; }
2228 let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
2229 assert_eq!(unsafe { drops }, 0);
2231 assert_eq!(unsafe { drops }, 2);
2233 assert_eq!(unsafe { drops }, 5);
2238 fn test_vec_truncate_fail() {
2239 struct BadElem(int);
2240 impl Drop for BadElem {
2241 fn drop(&mut self) {
2242 let BadElem(ref mut x) = *self;
2243 if *x == 0xbadbeef {
2244 panic!("BadElem panic: 0xbadbeef")
2249 let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
2255 let vec = vec!(1i, 2, 3);
2256 assert!(vec[1] == 2);
2261 fn test_index_out_of_bounds() {
2262 let vec = vec!(1i, 2, 3);
2268 fn test_slice_out_of_bounds_1() {
2269 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2275 fn test_slice_out_of_bounds_2() {
2276 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2282 fn test_slice_out_of_bounds_3() {
2283 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2289 fn test_slice_out_of_bounds_4() {
2290 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2296 fn test_slice_out_of_bounds_5() {
2297 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2302 fn test_swap_remove_empty() {
2303 let mut vec: Vec<uint> = vec!();
2304 assert_eq!(vec.swap_remove(0), None);
2308 fn test_move_iter_unwrap() {
2309 let mut vec: Vec<uint> = Vec::with_capacity(7);
2312 let ptr = vec.as_ptr();
2313 vec = vec.into_iter().unwrap();
2314 assert_eq!(vec.as_ptr(), ptr);
2315 assert_eq!(vec.capacity(), 7);
2316 assert_eq!(vec.len(), 0);
2321 fn test_map_in_place_incompatible_types_fail() {
2322 let v = vec![0u, 1, 2];
2323 v.map_in_place(|_| ());
2327 fn test_map_in_place() {
2328 let v = vec![0u, 1, 2];
2329 assert_eq!(v.map_in_place(|i: uint| i as int - 1), [-1i, 0, 1]);
2333 fn test_map_in_place_zero_sized() {
2334 let v = vec![(), ()];
2335 #[deriving(PartialEq, Show)]
2337 assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
2341 fn test_map_in_place_zero_drop_count() {
2342 use std::sync::atomic;
2343 use std::sync::atomic::AtomicUint;
2345 #[deriving(Clone, PartialEq, Show)]
2347 impl Drop for Nothing { fn drop(&mut self) { } }
2349 #[deriving(Clone, PartialEq, Show)]
2351 impl Drop for ZeroSized {
2352 fn drop(&mut self) {
2353 DROP_COUNTER.fetch_add(1, atomic::Relaxed);
2356 const NUM_ELEMENTS: uint = 2;
2357 static DROP_COUNTER: AtomicUint = atomic::INIT_ATOMIC_UINT;
2359 let v = Vec::from_elem(NUM_ELEMENTS, Nothing);
2361 DROP_COUNTER.store(0, atomic::Relaxed);
2363 let v = v.map_in_place(|_| ZeroSized);
2364 assert_eq!(DROP_COUNTER.load(atomic::Relaxed), 0);
2366 assert_eq!(DROP_COUNTER.load(atomic::Relaxed), NUM_ELEMENTS);
2370 fn test_move_items() {
2371 let vec = vec![1, 2, 3];
2372 let mut vec2 : Vec<i32> = vec![];
2373 for i in vec.into_iter() {
2376 assert!(vec2 == vec![1, 2, 3]);
2380 fn test_move_items_reverse() {
2381 let vec = vec![1, 2, 3];
2382 let mut vec2 : Vec<i32> = vec![];
2383 for i in vec.into_iter().rev() {
2386 assert!(vec2 == vec![3, 2, 1]);
2390 fn test_move_items_zero_sized() {
2391 let vec = vec![(), (), ()];
2392 let mut vec2 : Vec<()> = vec![];
2393 for i in vec.into_iter() {
2396 assert!(vec2 == vec![(), (), ()]);
2400 fn test_drain_items() {
2401 let mut vec = vec![1, 2, 3];
2402 let mut vec2: Vec<i32> = vec![];
2403 for i in vec.drain() {
2406 assert_eq!(vec, []);
2407 assert_eq!(vec2, [ 1, 2, 3 ]);
2411 fn test_drain_items_reverse() {
2412 let mut vec = vec![1, 2, 3];
2413 let mut vec2: Vec<i32> = vec![];
2414 for i in vec.drain().rev() {
2417 assert_eq!(vec, []);
2418 assert_eq!(vec2, [ 3, 2, 1 ]);
2422 fn test_drain_items_zero_sized() {
2423 let mut vec = vec![(), (), ()];
2424 let mut vec2: Vec<()> = vec![];
2425 for i in vec.drain() {
2428 assert_eq!(vec, []);
2429 assert_eq!(vec2, [(), (), ()]);
2433 fn test_into_boxed_slice() {
2434 let xs = vec![1u, 2, 3];
2435 let ys = xs.into_boxed_slice();
2436 assert_eq!(ys.as_slice(), [1u, 2, 3]);
2440 fn bench_new(b: &mut Bencher) {
2442 let v: Vec<uint> = Vec::new();
2443 assert_eq!(v.len(), 0);
2444 assert_eq!(v.capacity(), 0);
2448 fn do_bench_with_capacity(b: &mut Bencher, src_len: uint) {
2449 b.bytes = src_len as u64;
2452 let v: Vec<uint> = Vec::with_capacity(src_len);
2453 assert_eq!(v.len(), 0);
2454 assert_eq!(v.capacity(), src_len);
2459 fn bench_with_capacity_0000(b: &mut Bencher) {
2460 do_bench_with_capacity(b, 0)
2464 fn bench_with_capacity_0010(b: &mut Bencher) {
2465 do_bench_with_capacity(b, 10)
2469 fn bench_with_capacity_0100(b: &mut Bencher) {
2470 do_bench_with_capacity(b, 100)
2474 fn bench_with_capacity_1000(b: &mut Bencher) {
2475 do_bench_with_capacity(b, 1000)
2478 fn do_bench_from_fn(b: &mut Bencher, src_len: uint) {
2479 b.bytes = src_len as u64;
2482 let dst = Vec::from_fn(src_len, |i| i);
2483 assert_eq!(dst.len(), src_len);
2484 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2489 fn bench_from_fn_0000(b: &mut Bencher) {
2490 do_bench_from_fn(b, 0)
2494 fn bench_from_fn_0010(b: &mut Bencher) {
2495 do_bench_from_fn(b, 10)
2499 fn bench_from_fn_0100(b: &mut Bencher) {
2500 do_bench_from_fn(b, 100)
2504 fn bench_from_fn_1000(b: &mut Bencher) {
2505 do_bench_from_fn(b, 1000)
2508 fn do_bench_from_elem(b: &mut Bencher, src_len: uint) {
2509 b.bytes = src_len as u64;
2512 let dst: Vec<uint> = Vec::from_elem(src_len, 5);
2513 assert_eq!(dst.len(), src_len);
2514 assert!(dst.iter().all(|x| *x == 5));
2519 fn bench_from_elem_0000(b: &mut Bencher) {
2520 do_bench_from_elem(b, 0)
2524 fn bench_from_elem_0010(b: &mut Bencher) {
2525 do_bench_from_elem(b, 10)
2529 fn bench_from_elem_0100(b: &mut Bencher) {
2530 do_bench_from_elem(b, 100)
2534 fn bench_from_elem_1000(b: &mut Bencher) {
2535 do_bench_from_elem(b, 1000)
2538 fn do_bench_from_slice(b: &mut Bencher, src_len: uint) {
2539 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2541 b.bytes = src_len as u64;
2544 let dst = src.clone().as_slice().to_vec();
2545 assert_eq!(dst.len(), src_len);
2546 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2551 fn bench_from_slice_0000(b: &mut Bencher) {
2552 do_bench_from_slice(b, 0)
2556 fn bench_from_slice_0010(b: &mut Bencher) {
2557 do_bench_from_slice(b, 10)
2561 fn bench_from_slice_0100(b: &mut Bencher) {
2562 do_bench_from_slice(b, 100)
2566 fn bench_from_slice_1000(b: &mut Bencher) {
2567 do_bench_from_slice(b, 1000)
2570 fn do_bench_from_iter(b: &mut Bencher, src_len: uint) {
2571 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2573 b.bytes = src_len as u64;
2576 let dst: Vec<uint> = FromIterator::from_iter(src.clone().into_iter());
2577 assert_eq!(dst.len(), src_len);
2578 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2583 fn bench_from_iter_0000(b: &mut Bencher) {
2584 do_bench_from_iter(b, 0)
2588 fn bench_from_iter_0010(b: &mut Bencher) {
2589 do_bench_from_iter(b, 10)
2593 fn bench_from_iter_0100(b: &mut Bencher) {
2594 do_bench_from_iter(b, 100)
2598 fn bench_from_iter_1000(b: &mut Bencher) {
2599 do_bench_from_iter(b, 1000)
2602 fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) {
2603 let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len));
2604 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2606 b.bytes = src_len as u64;
2609 let mut dst = dst.clone();
2610 dst.extend(src.clone().into_iter());
2611 assert_eq!(dst.len(), dst_len + src_len);
2612 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2617 fn bench_extend_0000_0000(b: &mut Bencher) {
2618 do_bench_extend(b, 0, 0)
2622 fn bench_extend_0000_0010(b: &mut Bencher) {
2623 do_bench_extend(b, 0, 10)
2627 fn bench_extend_0000_0100(b: &mut Bencher) {
2628 do_bench_extend(b, 0, 100)
2632 fn bench_extend_0000_1000(b: &mut Bencher) {
2633 do_bench_extend(b, 0, 1000)
2637 fn bench_extend_0010_0010(b: &mut Bencher) {
2638 do_bench_extend(b, 10, 10)
2642 fn bench_extend_0100_0100(b: &mut Bencher) {
2643 do_bench_extend(b, 100, 100)
2647 fn bench_extend_1000_1000(b: &mut Bencher) {
2648 do_bench_extend(b, 1000, 1000)
2651 fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) {
2652 let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len));
2653 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2655 b.bytes = src_len as u64;
2658 let mut dst = dst.clone();
2659 dst.push_all(src.as_slice());
2660 assert_eq!(dst.len(), dst_len + src_len);
2661 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2666 fn bench_push_all_0000_0000(b: &mut Bencher) {
2667 do_bench_push_all(b, 0, 0)
2671 fn bench_push_all_0000_0010(b: &mut Bencher) {
2672 do_bench_push_all(b, 0, 10)
2676 fn bench_push_all_0000_0100(b: &mut Bencher) {
2677 do_bench_push_all(b, 0, 100)
2681 fn bench_push_all_0000_1000(b: &mut Bencher) {
2682 do_bench_push_all(b, 0, 1000)
2686 fn bench_push_all_0010_0010(b: &mut Bencher) {
2687 do_bench_push_all(b, 10, 10)
2691 fn bench_push_all_0100_0100(b: &mut Bencher) {
2692 do_bench_push_all(b, 100, 100)
2696 fn bench_push_all_1000_1000(b: &mut Bencher) {
2697 do_bench_push_all(b, 1000, 1000)
2700 fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) {
2701 let dst: Vec<uint> = FromIterator::from_iter(range(0u, dst_len));
2702 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2704 b.bytes = src_len as u64;
2707 let mut dst = dst.clone();
2708 dst.extend(src.clone().into_iter());
2709 assert_eq!(dst.len(), dst_len + src_len);
2710 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2715 fn bench_push_all_move_0000_0000(b: &mut Bencher) {
2716 do_bench_push_all_move(b, 0, 0)
2720 fn bench_push_all_move_0000_0010(b: &mut Bencher) {
2721 do_bench_push_all_move(b, 0, 10)
2725 fn bench_push_all_move_0000_0100(b: &mut Bencher) {
2726 do_bench_push_all_move(b, 0, 100)
2730 fn bench_push_all_move_0000_1000(b: &mut Bencher) {
2731 do_bench_push_all_move(b, 0, 1000)
2735 fn bench_push_all_move_0010_0010(b: &mut Bencher) {
2736 do_bench_push_all_move(b, 10, 10)
2740 fn bench_push_all_move_0100_0100(b: &mut Bencher) {
2741 do_bench_push_all_move(b, 100, 100)
2745 fn bench_push_all_move_1000_1000(b: &mut Bencher) {
2746 do_bench_push_all_move(b, 1000, 1000)
2749 fn do_bench_clone(b: &mut Bencher, src_len: uint) {
2750 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2752 b.bytes = src_len as u64;
2755 let dst = src.clone();
2756 assert_eq!(dst.len(), src_len);
2757 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2762 fn bench_clone_0000(b: &mut Bencher) {
2763 do_bench_clone(b, 0)
2767 fn bench_clone_0010(b: &mut Bencher) {
2768 do_bench_clone(b, 10)
2772 fn bench_clone_0100(b: &mut Bencher) {
2773 do_bench_clone(b, 100)
2777 fn bench_clone_1000(b: &mut Bencher) {
2778 do_bench_clone(b, 1000)
2781 fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) {
2782 let dst: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2783 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2785 b.bytes = (times * src_len) as u64;
2788 let mut dst = dst.clone();
2790 for _ in range(0, times) {
2791 dst.clone_from(&src);
2793 assert_eq!(dst.len(), src_len);
2794 assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
2800 fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
2801 do_bench_clone_from(b, 1, 0, 0)
2805 fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
2806 do_bench_clone_from(b, 1, 0, 10)
2810 fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
2811 do_bench_clone_from(b, 1, 0, 100)
2815 fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
2816 do_bench_clone_from(b, 1, 0, 1000)
2820 fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
2821 do_bench_clone_from(b, 1, 10, 10)
2825 fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
2826 do_bench_clone_from(b, 1, 100, 100)
2830 fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
2831 do_bench_clone_from(b, 1, 1000, 1000)
2835 fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
2836 do_bench_clone_from(b, 1, 10, 100)
2840 fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
2841 do_bench_clone_from(b, 1, 100, 1000)
2845 fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
2846 do_bench_clone_from(b, 1, 10, 0)
2850 fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
2851 do_bench_clone_from(b, 1, 100, 10)
2855 fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
2856 do_bench_clone_from(b, 1, 1000, 100)
2860 fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
2861 do_bench_clone_from(b, 10, 0, 0)
2865 fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
2866 do_bench_clone_from(b, 10, 0, 10)
2870 fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
2871 do_bench_clone_from(b, 10, 0, 100)
2875 fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
2876 do_bench_clone_from(b, 10, 0, 1000)
2880 fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
2881 do_bench_clone_from(b, 10, 10, 10)
2885 fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
2886 do_bench_clone_from(b, 10, 100, 100)
2890 fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
2891 do_bench_clone_from(b, 10, 1000, 1000)
2895 fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
2896 do_bench_clone_from(b, 10, 10, 100)
2900 fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
2901 do_bench_clone_from(b, 10, 100, 1000)
2905 fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
2906 do_bench_clone_from(b, 10, 10, 0)
2910 fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
2911 do_bench_clone_from(b, 10, 100, 10)
2915 fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
2916 do_bench_clone_from(b, 10, 1000, 100)