1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type with heap-allocated contents, written `Vec<T>` but pronounced 'vector.'
13 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
17 //! Explicitly creating a `Vec<T>` with `new()`:
20 //! let xs: Vec<i32> = Vec::new();
23 //! Using the `vec!` macro:
26 //! let ys: Vec<i32> = vec![];
28 //! let zs = vec![1i32, 2, 3, 4, 5];
34 //! let mut xs = vec![1i32, 2];
42 //! let mut xs = vec![1i32, 2];
44 //! let two = xs.pop();
51 use alloc::boxed::Box;
52 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
53 use core::borrow::{Cow, IntoCow};
55 use core::cmp::{Ordering};
56 use core::default::Default;
58 use core::hash::{self, Hash};
59 use core::iter::{repeat, FromIterator};
60 use core::marker::{ContravariantLifetime, InvariantType};
62 use core::nonzero::NonZero;
63 use core::num::{Int, UnsignedInt};
64 use core::ops::{Index, IndexMut, Deref, Add};
67 use core::raw::Slice as RawSlice;
70 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
75 /// let mut vec = Vec::new();
79 /// assert_eq!(vec.len(), 2);
80 /// assert_eq!(vec[0], 1);
82 /// assert_eq!(vec.pop(), Some(2));
83 /// assert_eq!(vec.len(), 1);
86 /// assert_eq!(vec[0], 7);
88 /// vec.push_all(&[1, 2, 3]);
90 /// for x in vec.iter() {
91 /// println!("{}", x);
93 /// assert_eq!(vec, vec![7i, 1, 2, 3]);
96 /// The `vec!` macro is provided to make initialization more convenient:
99 /// let mut vec = vec![1i, 2i, 3i];
101 /// assert_eq!(vec, vec![1, 2, 3, 4]);
104 /// Use a `Vec<T>` as an efficient stack:
107 /// let mut stack = Vec::new();
114 /// let top = match stack.pop() {
115 /// None => break, // empty
118 /// // Prints 3, 2, 1
119 /// println!("{}", top);
123 /// # Capacity and reallocation
125 /// The capacity of a vector is the amount of space allocated for any future elements that will be
126 /// added onto the vector. This is not to be confused with the *length* of a vector, which
127 /// specifies the number of actual elements within the vector. If a vector's length exceeds its
128 /// capacity, its capacity will automatically be increased, but its elements will have to be
131 /// For example, a vector with capacity 10 and length 0 would be an empty vector with space for 10
132 /// more elements. Pushing 10 or fewer elements onto the vector will not change its capacity or
133 /// cause reallocation to occur. However, if the vector's length is increased to 11, it will have
134 /// to reallocate, which can be slow. For this reason, it is recommended to use
135 /// `Vec::with_capacity` whenever possible to specify how big the vector is expected to get.
136 #[unsafe_no_drop_flag]
139 ptr: NonZero<*mut T>,
144 unsafe impl<T: Send> Send for Vec<T> { }
145 unsafe impl<T: Sync> Sync for Vec<T> { }
147 ////////////////////////////////////////////////////////////////////////////////
149 ////////////////////////////////////////////////////////////////////////////////
152 /// Constructs a new, empty `Vec<T>`.
154 /// The vector will not allocate until elements are pushed onto it.
159 /// let mut vec: Vec<int> = Vec::new();
163 pub fn new() -> Vec<T> {
164 // We want ptr to never be NULL so instead we set it to some arbitrary
165 // non-null value which is fine since we never call deallocate on the ptr
166 // if cap is 0. The reason for this is because the pointer of a slice
167 // being NULL would break the null pointer optimization for enums.
168 Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
171 /// Constructs a new, empty `Vec<T>` with the specified capacity.
173 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
174 /// `capacity` is 0, the vector will not allocate.
176 /// It is important to note that this function does not specify the *length* of the returned
177 /// vector, but only the *capacity*. (For an explanation of the difference between length and
178 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
183 /// let mut vec: Vec<int> = Vec::with_capacity(10);
185 /// // The vector contains no items, even though it has capacity for more
186 /// assert_eq!(vec.len(), 0);
188 /// // These are all done without reallocating...
189 /// for i in range(0i, 10) {
193 /// // ...but this may make the vector reallocate
198 pub fn with_capacity(capacity: uint) -> Vec<T> {
199 if mem::size_of::<T>() == 0 {
200 Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
201 } else if capacity == 0 {
204 let size = capacity.checked_mul(mem::size_of::<T>())
205 .expect("capacity overflow");
206 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
207 if ptr.is_null() { ::alloc::oom() }
208 Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
212 /// Creates a `Vec<T>` directly from the raw components of another vector.
214 /// This is highly unsafe, due to the number of invariants that aren't checked.
223 /// let mut v = vec![1i, 2, 3];
225 /// // Pull out the various important pieces of information about `v`
226 /// let p = v.as_mut_ptr();
227 /// let len = v.len();
228 /// let cap = v.capacity();
231 /// // Cast `v` into the void: no destructor run, so we are in
232 /// // complete control of the allocation to which `p` points.
235 /// // Overwrite memory with 4, 5, 6
236 /// for i in range(0, len as int) {
237 /// ptr::write(p.offset(i), 4 + i);
240 /// // Put everything back together into a Vec
241 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
242 /// assert_eq!(rebuilt, vec![4i, 5i, 6i]);
247 pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
248 capacity: uint) -> Vec<T> {
249 Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
252 /// Creates a vector by copying the elements from a raw pointer.
254 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
255 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
256 /// without cloning, as if `ptr::read()` were called on them.
258 #[unstable = "may be better expressed via composition"]
259 pub unsafe fn from_raw_buf(ptr: *const T, elts: uint) -> Vec<T> {
260 let mut dst = Vec::with_capacity(elts);
262 ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
266 /// Returns the number of elements the vector can hold without
272 /// let vec: Vec<int> = Vec::with_capacity(10);
273 /// assert_eq!(vec.capacity(), 10);
277 pub fn capacity(&self) -> uint {
281 /// Reserves capacity for at least `additional` more elements to be inserted in the given
282 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
286 /// Panics if the new capacity overflows `uint`.
291 /// let mut vec: Vec<int> = vec![1];
293 /// assert!(vec.capacity() >= 11);
296 pub fn reserve(&mut self, additional: uint) {
297 if self.cap - self.len < additional {
298 let err_msg = "Vec::reserve: `uint` overflow";
299 let new_cap = self.len.checked_add(additional).expect(err_msg)
300 .checked_next_power_of_two().expect(err_msg);
301 self.grow_capacity(new_cap);
305 /// Reserves the minimum capacity for exactly `additional` more elements to
306 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
309 /// Note that the allocator may give the collection more space than it
310 /// requests. Therefore capacity can not be relied upon to be precisely
311 /// minimal. Prefer `reserve` if future insertions are expected.
315 /// Panics if the new capacity overflows `uint`.
320 /// let mut vec: Vec<int> = vec![1];
321 /// vec.reserve_exact(10);
322 /// assert!(vec.capacity() >= 11);
325 pub fn reserve_exact(&mut self, additional: uint) {
326 if self.cap - self.len < additional {
327 match self.len.checked_add(additional) {
328 None => panic!("Vec::reserve: `uint` overflow"),
329 Some(new_cap) => self.grow_capacity(new_cap)
334 /// Shrinks the capacity of the vector as much as possible.
336 /// It will drop down as close as possible to the length but the allocator
337 /// may still inform the vector that there is space for a few more elements.
342 /// let mut vec: Vec<int> = Vec::with_capacity(10);
343 /// vec.push_all(&[1, 2, 3]);
344 /// assert_eq!(vec.capacity(), 10);
345 /// vec.shrink_to_fit();
346 /// assert!(vec.capacity() >= 3);
349 pub fn shrink_to_fit(&mut self) {
350 if mem::size_of::<T>() == 0 { return }
355 dealloc(*self.ptr, self.cap)
361 // Overflow check is unnecessary as the vector is already at
363 let ptr = reallocate(*self.ptr as *mut u8,
364 self.cap * mem::size_of::<T>(),
365 self.len * mem::size_of::<T>(),
366 mem::min_align_of::<T>()) as *mut T;
367 if ptr.is_null() { ::alloc::oom() }
368 self.ptr = NonZero::new(ptr);
374 /// Convert the vector into Box<[T]>.
376 /// Note that this will drop any excess capacity. Calling this and
377 /// converting back to a vector with `into_vec()` is equivalent to calling
378 /// `shrink_to_fit()`.
380 pub fn into_boxed_slice(mut self) -> Box<[T]> {
381 self.shrink_to_fit();
383 let xs: Box<[T]> = mem::transmute(self.as_mut_slice());
389 /// Shorten a vector, dropping excess elements.
391 /// If `len` is greater than the vector's current length, this has no
397 /// let mut vec = vec![1i, 2, 3, 4];
399 /// assert_eq!(vec, vec![1, 2]);
402 pub fn truncate(&mut self, len: uint) {
404 // drop any extra elements
405 while len < self.len {
406 // decrement len before the read(), so a panic on Drop doesn't
407 // re-drop the just-failed value.
409 ptr::read(self.get_unchecked(self.len));
414 /// Returns a mutable slice of the elements of `self`.
419 /// fn foo(slice: &mut [int]) {}
421 /// let mut vec = vec![1i, 2];
422 /// foo(vec.as_mut_slice());
426 pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
428 mem::transmute(RawSlice {
435 /// Creates a consuming iterator, that is, one that moves each value out of
436 /// the vector (from start to end). The vector cannot be used after calling
442 /// let v = vec!["a".to_string(), "b".to_string()];
443 /// for s in v.into_iter() {
444 /// // s has type String, not &String
445 /// println!("{}", s);
450 pub fn into_iter(self) -> IntoIter<T> {
454 let begin = ptr as *const T;
455 let end = if mem::size_of::<T>() == 0 {
456 (ptr as uint + self.len()) as *const T
458 ptr.offset(self.len() as int) as *const T
461 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
465 /// Sets the length of a vector.
467 /// This will explicitly set the size of the vector, without actually
468 /// modifying its buffers, so it is up to the caller to ensure that the
469 /// vector is actually the specified size.
474 /// let mut v = vec![1u, 2, 3, 4];
481 pub unsafe fn set_len(&mut self, len: uint) {
485 /// Removes an element from anywhere in the vector and return it, replacing
486 /// it with the last element.
488 /// This does not preserve ordering, but is O(1).
492 /// Panics if `index` is out of bounds.
497 /// let mut v = vec!["foo", "bar", "baz", "qux"];
499 /// assert_eq!(v.swap_remove(1), "bar");
500 /// assert_eq!(v, vec!["foo", "qux", "baz"]);
502 /// assert_eq!(v.swap_remove(0), "foo");
503 /// assert_eq!(v, vec!["baz", "qux"]);
507 pub fn swap_remove(&mut self, index: uint) -> T {
508 let length = self.len();
509 self.swap(index, length - 1);
513 /// Inserts an element at position `index` within the vector, shifting all
514 /// elements after position `i` one position to the right.
518 /// Panics if `index` is not between `0` and the vector's length (both
519 /// bounds inclusive).
524 /// let mut vec = vec![1i, 2, 3];
525 /// vec.insert(1, 4);
526 /// assert_eq!(vec, vec![1, 4, 2, 3]);
527 /// vec.insert(4, 5);
528 /// assert_eq!(vec, vec![1, 4, 2, 3, 5]);
531 pub fn insert(&mut self, index: uint, element: T) {
532 let len = self.len();
533 assert!(index <= len);
534 // space for the new element
537 unsafe { // infallible
538 // The spot to put the new value
540 let p = self.as_mut_ptr().offset(index as int);
541 // Shift everything over to make space. (Duplicating the
542 // `index`th element into two consecutive places.)
543 ptr::copy_memory(p.offset(1), &*p, len - index);
544 // Write it in, overwriting the first copy of the `index`th
546 ptr::write(&mut *p, element);
548 self.set_len(len + 1);
552 /// Removes and returns the element at position `index` within the vector,
553 /// shifting all elements after position `index` one position to the left.
557 /// Panics if `i` is out of bounds.
562 /// let mut v = vec![1i, 2, 3];
563 /// assert_eq!(v.remove(1), 2);
564 /// assert_eq!(v, vec![1, 3]);
567 pub fn remove(&mut self, index: uint) -> T {
568 let len = self.len();
569 assert!(index < len);
570 unsafe { // infallible
573 // the place we are taking from.
574 let ptr = self.as_mut_ptr().offset(index as int);
575 // copy it out, unsafely having a copy of the value on
576 // the stack and in the vector at the same time.
577 ret = ptr::read(ptr);
579 // Shift everything down to fill in that spot.
580 ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
582 self.set_len(len - 1);
587 /// Retains only the elements specified by the predicate.
589 /// In other words, remove all elements `e` such that `f(&e)` returns false.
590 /// This method operates in place and preserves the order of the retained
596 /// let mut vec = vec![1i, 2, 3, 4];
597 /// vec.retain(|&x| x%2 == 0);
598 /// assert_eq!(vec, vec![2, 4]);
601 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
602 let len = self.len();
605 let v = self.as_mut_slice();
607 for i in range(0u, len) {
616 self.truncate(len - del);
620 /// Appends an element to the back of a collection.
624 /// Panics if the number of elements in the vector overflows a `uint`.
629 /// let mut vec = vec!(1i, 2);
631 /// assert_eq!(vec, vec!(1, 2, 3));
635 pub fn push(&mut self, value: T) {
636 if mem::size_of::<T>() == 0 {
637 // zero-size types consume no memory, so we can't rely on the
638 // address space running out
639 self.len = self.len.checked_add(1).expect("length overflow");
640 unsafe { mem::forget(value); }
643 if self.len == self.cap {
644 let old_size = self.cap * mem::size_of::<T>();
645 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
646 if old_size > size { panic!("capacity overflow") }
648 let ptr = alloc_or_realloc(*self.ptr, old_size, size);
649 if ptr.is_null() { ::alloc::oom() }
650 self.ptr = NonZero::new(ptr);
652 self.cap = max(self.cap, 2) * 2;
656 let end = (*self.ptr).offset(self.len as int);
657 ptr::write(&mut *end, value);
662 /// Removes the last element from a vector and returns it, or `None` if it is empty.
667 /// let mut vec = vec![1i, 2, 3];
668 /// assert_eq!(vec.pop(), Some(3));
669 /// assert_eq!(vec, vec![1, 2]);
673 pub fn pop(&mut self) -> Option<T> {
679 Some(ptr::read(self.get_unchecked(self.len())))
684 /// Creates a draining iterator that clears the `Vec` and iterates over
685 /// the removed items from start to end.
690 /// let mut v = vec!["a".to_string(), "b".to_string()];
691 /// for s in v.drain() {
692 /// // s has type String, not &String
693 /// println!("{}", s);
695 /// assert!(v.is_empty());
698 #[unstable = "matches collection reform specification, waiting for dust to settle"]
699 pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
701 let begin = *self.ptr as *const T;
702 let end = if mem::size_of::<T>() == 0 {
703 (*self.ptr as uint + self.len()) as *const T
705 (*self.ptr).offset(self.len() as int) as *const T
711 marker: ContravariantLifetime,
716 /// Clears the vector, removing all values.
721 /// let mut v = vec![1i, 2, 3];
725 /// assert!(v.is_empty());
729 pub fn clear(&mut self) {
733 /// Returns the number of elements in the vector.
738 /// let a = vec![1i, 2, 3];
739 /// assert_eq!(a.len(), 3);
743 pub fn len(&self) -> uint { self.len }
745 /// Returns `true` if the vector contains no elements.
750 /// let mut v = Vec::new();
751 /// assert!(v.is_empty());
754 /// assert!(!v.is_empty());
757 pub fn is_empty(&self) -> bool { self.len() == 0 }
759 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
760 /// size and in case they are not zero-sized the same minimal alignment.
764 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
765 /// have differing minimal alignments.
770 /// let v = vec![0u, 1, 2];
771 /// let w = v.map_in_place(|i| i + 3);
772 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
774 /// #[derive(PartialEq, Show)]
775 /// struct Newtype(u8);
776 /// let bytes = vec![0x11, 0x22];
777 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
778 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
780 #[unstable = "API may change to provide stronger guarantees"]
781 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
782 // FIXME: Assert statically that the types `T` and `U` have the same
784 assert!(mem::size_of::<T>() == mem::size_of::<U>());
788 if mem::size_of::<T>() != 0 {
789 // FIXME: Assert statically that the types `T` and `U` have the
790 // same minimal alignment in case they are not zero-sized.
792 // These asserts are necessary because the `min_align_of` of the
793 // types are passed to the allocator by `Vec`.
794 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
796 // This `as int` cast is safe, because the size of the elements of the
797 // vector is not 0, and:
799 // 1) If the size of the elements in the vector is 1, the `int` may
800 // overflow, but it has the correct bit pattern so that the
801 // `.offset()` function will work.
804 // Address space 0x0-0xF.
805 // `u8` array at: 0x1.
806 // Size of `u8` array: 0x8.
807 // Calculated `offset`: -0x8.
808 // After `array.offset(offset)`: 0x9.
809 // (0x1 + 0x8 = 0x1 - 0x8)
811 // 2) If the size of the elements in the vector is >1, the `uint` ->
812 // `int` conversion can't overflow.
813 let offset = vec.len() as int;
814 let start = vec.as_mut_ptr();
816 let mut pv = PartialVecNonZeroSized {
820 // This points inside the vector, as the vector has length
822 end_t: unsafe { start.offset(offset) },
823 start_u: start as *mut U,
824 end_u: start as *mut U,
835 while pv.end_u as *mut T != pv.end_t {
839 // +-+-+-+-+-+-+-+-+-+
840 // |U|...|U|T|T|...|T|
841 // +-+-+-+-+-+-+-+-+-+
845 let t = ptr::read(pv.start_t);
848 // +-+-+-+-+-+-+-+-+-+
849 // |U|...|U|X|T|...|T|
850 // +-+-+-+-+-+-+-+-+-+
853 // We must not panic here, one cell is marked as `T`
854 // although it is not `T`.
856 pv.start_t = pv.start_t.offset(1);
859 // +-+-+-+-+-+-+-+-+-+
860 // |U|...|U|X|T|...|T|
861 // +-+-+-+-+-+-+-+-+-+
864 // We may panic again.
866 // The function given by the user might panic.
869 ptr::write(pv.end_u, u);
872 // +-+-+-+-+-+-+-+-+-+
873 // |U|...|U|U|T|...|T|
874 // +-+-+-+-+-+-+-+-+-+
877 // We should not panic here, because that would leak the `U`
878 // pointed to by `end_u`.
880 pv.end_u = pv.end_u.offset(1);
883 // +-+-+-+-+-+-+-+-+-+
884 // |U|...|U|U|T|...|T|
885 // +-+-+-+-+-+-+-+-+-+
888 // We may panic again.
900 // Extract `vec` and prevent the destructor of
901 // `PartialVecNonZeroSized` from running. Note that none of the
902 // function calls can panic, thus no resources can be leaked (as the
903 // `vec` member of `PartialVec` is the only one which holds
904 // allocations -- and it is returned from this function. None of
907 let vec_len = pv.vec.len();
908 let vec_cap = pv.vec.capacity();
909 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
911 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
914 // Put the `Vec` into the `PartialVecZeroSized` structure and
915 // prevent the destructor of the `Vec` from running. Since the
916 // `Vec` contained zero-sized objects, it did not allocate, so we
917 // are not leaking memory here.
918 let mut pv = PartialVecZeroSized::<T,U> {
921 marker_t: InvariantType,
922 marker_u: InvariantType,
924 unsafe { mem::forget(vec); }
926 while pv.num_t != 0 {
928 // Create a `T` out of thin air and decrement `num_t`. This
929 // must not panic between these steps, as otherwise a
930 // destructor of `T` which doesn't exist runs.
931 let t = mem::uninitialized();
934 // The function given by the user might panic.
937 // Forget the `U` and increment `num_u`. This increment
938 // cannot overflow the `uint` as we only do this for a
939 // number of times that fits into a `uint` (and start with
940 // `0`). Again, we should not panic between these steps.
945 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
946 // destructor of the latter will not run. None of this can panic.
947 let mut result = Vec::new();
949 result.set_len(pv.num_u);
957 impl<T: Clone> Vec<T> {
958 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
960 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
961 /// is larger than the current value of `len()` or not.
966 /// let mut vec = vec!["hello"];
967 /// vec.resize(3, "world");
968 /// assert_eq!(vec, vec!["hello", "world", "world"]);
970 /// let mut vec = vec![1i, 2, 3, 4];
971 /// vec.resize(2, 0);
972 /// assert_eq!(vec, vec![1, 2]);
974 #[unstable = "matches collection reform specification; waiting for dust to settle"]
975 pub fn resize(&mut self, new_len: uint, value: T) {
976 let len = self.len();
979 self.extend(repeat(value).take(new_len - len));
981 self.truncate(new_len);
985 /// Appends all elements in a slice to the `Vec`.
987 /// Iterates over the slice `other`, clones each element, and then appends
988 /// it to this `Vec`. The `other` vector is traversed in-order.
993 /// let mut vec = vec![1i];
994 /// vec.push_all(&[2i, 3, 4]);
995 /// assert_eq!(vec, vec![1, 2, 3, 4]);
998 #[unstable = "likely to be replaced by a more optimized extend"]
999 pub fn push_all(&mut self, other: &[T]) {
1000 self.reserve(other.len());
1002 for i in range(0, other.len()) {
1003 let len = self.len();
1005 // Unsafe code so this can be optimised to a memcpy (or something similarly
1006 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1007 // during the loop can prevent this optimisation.
1010 self.get_unchecked_mut(len),
1011 other.get_unchecked(i).clone());
1012 self.set_len(len + 1);
1018 impl<T: PartialEq> Vec<T> {
1019 /// Removes consecutive repeated elements in the vector.
1021 /// If the vector is sorted, this removes all duplicates.
1026 /// let mut vec = vec![1i, 2, 2, 3, 2];
1030 /// assert_eq!(vec, vec![1i, 2, 3, 2]);
1033 pub fn dedup(&mut self) {
1035 // Although we have a mutable reference to `self`, we cannot make
1036 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1037 // must ensure that the vector is in a valid state at all time.
1039 // The way that we handle this is by using swaps; we iterate
1040 // over all the elements, swapping as we go so that at the end
1041 // the elements we wish to keep are in the front, and those we
1042 // wish to reject are at the back. We can then truncate the
1043 // vector. This operation is still O(n).
1045 // Example: We start in this state, where `r` represents "next
1046 // read" and `w` represents "next_write`.
1049 // +---+---+---+---+---+---+
1050 // | 0 | 1 | 1 | 2 | 3 | 3 |
1051 // +---+---+---+---+---+---+
1054 // Comparing self[r] against self[w-1], this is not a duplicate, so
1055 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1056 // r and w, leaving us with:
1059 // +---+---+---+---+---+---+
1060 // | 0 | 1 | 1 | 2 | 3 | 3 |
1061 // +---+---+---+---+---+---+
1064 // Comparing self[r] against self[w-1], this value is a duplicate,
1065 // so we increment `r` but leave everything else unchanged:
1068 // +---+---+---+---+---+---+
1069 // | 0 | 1 | 1 | 2 | 3 | 3 |
1070 // +---+---+---+---+---+---+
1073 // Comparing self[r] against self[w-1], this is not a duplicate,
1074 // so swap self[r] and self[w] and advance r and w:
1077 // +---+---+---+---+---+---+
1078 // | 0 | 1 | 2 | 1 | 3 | 3 |
1079 // +---+---+---+---+---+---+
1082 // Not a duplicate, repeat:
1085 // +---+---+---+---+---+---+
1086 // | 0 | 1 | 2 | 3 | 1 | 3 |
1087 // +---+---+---+---+---+---+
1090 // Duplicate, advance r. End of vec. Truncate to w.
1092 let ln = self.len();
1093 if ln < 1 { return; }
1095 // Avoid bounds checks by using unsafe pointers.
1096 let p = self.as_mut_ptr();
1101 let p_r = p.offset(r as int);
1102 let p_wm1 = p.offset((w - 1) as int);
1105 let p_w = p_wm1.offset(1);
1106 mem::swap(&mut *p_r, &mut *p_w);
1118 ////////////////////////////////////////////////////////////////////////////////
1119 // Internal methods and functions
1120 ////////////////////////////////////////////////////////////////////////////////
1123 /// Reserves capacity for exactly `capacity` elements in the given vector.
1125 /// If the capacity for `self` is already equal to or greater than the
1126 /// requested capacity, then no action is taken.
1127 fn grow_capacity(&mut self, capacity: uint) {
1128 if mem::size_of::<T>() == 0 { return }
1130 if capacity > self.cap {
1131 let size = capacity.checked_mul(mem::size_of::<T>())
1132 .expect("capacity overflow");
1134 let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
1135 if ptr.is_null() { ::alloc::oom() }
1136 self.ptr = NonZero::new(ptr);
1138 self.cap = capacity;
1143 // FIXME: #13996: need a way to mark the return value as `noalias`
1145 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: uint, size: uint) -> *mut T {
1147 allocate(size, mem::min_align_of::<T>()) as *mut T
1149 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
1154 unsafe fn dealloc<T>(ptr: *mut T, len: uint) {
1155 if mem::size_of::<T>() != 0 {
1156 deallocate(ptr as *mut u8,
1157 len * mem::size_of::<T>(),
1158 mem::min_align_of::<T>())
1162 ////////////////////////////////////////////////////////////////////////////////
1163 // Common trait implementations for Vec
1164 ////////////////////////////////////////////////////////////////////////////////
1167 impl<T:Clone> Clone for Vec<T> {
1168 fn clone(&self) -> Vec<T> { ::slice::SliceExt::to_vec(self.as_slice()) }
1170 fn clone_from(&mut self, other: &Vec<T>) {
1171 // drop anything in self that will not be overwritten
1172 if self.len() > other.len() {
1173 self.truncate(other.len())
1176 // reuse the contained values' allocations/resources.
1177 for (place, thing) in self.iter_mut().zip(other.iter()) {
1178 place.clone_from(thing)
1181 // self.len <= other.len due to the truncate above, so the
1182 // slice here is always in-bounds.
1183 let slice = &other[self.len()..];
1184 self.push_all(slice);
1189 impl<S: hash::Writer, T: Hash<S>> Hash<S> for Vec<T> {
1191 fn hash(&self, state: &mut S) {
1192 self.as_slice().hash(state);
1196 impl<S: hash::Writer + hash::Hasher, T: Hash<S>> Hash<S> for Vec<T> {
1198 fn hash(&self, state: &mut S) {
1199 self.as_slice().hash(state);
1203 #[unstable = "waiting on Index stability"]
1204 impl<T> Index<uint> for Vec<T> {
1208 fn index<'a>(&'a self, index: &uint) -> &'a T {
1209 &self.as_slice()[*index]
1213 impl<T> IndexMut<uint> for Vec<T> {
1217 fn index_mut<'a>(&'a mut self, index: &uint) -> &'a mut T {
1218 &mut self.as_mut_slice()[*index]
1223 impl<T> ops::Index<ops::Range<uint>> for Vec<T> {
1226 fn index(&self, index: &ops::Range<uint>) -> &[T] {
1227 self.as_slice().index(index)
1230 impl<T> ops::Index<ops::RangeTo<uint>> for Vec<T> {
1233 fn index(&self, index: &ops::RangeTo<uint>) -> &[T] {
1234 self.as_slice().index(index)
1237 impl<T> ops::Index<ops::RangeFrom<uint>> for Vec<T> {
1240 fn index(&self, index: &ops::RangeFrom<uint>) -> &[T] {
1241 self.as_slice().index(index)
1244 impl<T> ops::Index<ops::FullRange> for Vec<T> {
1247 fn index(&self, _index: &ops::FullRange) -> &[T] {
1252 impl<T> ops::IndexMut<ops::Range<uint>> for Vec<T> {
1255 fn index_mut(&mut self, index: &ops::Range<uint>) -> &mut [T] {
1256 self.as_mut_slice().index_mut(index)
1259 impl<T> ops::IndexMut<ops::RangeTo<uint>> for Vec<T> {
1262 fn index_mut(&mut self, index: &ops::RangeTo<uint>) -> &mut [T] {
1263 self.as_mut_slice().index_mut(index)
1266 impl<T> ops::IndexMut<ops::RangeFrom<uint>> for Vec<T> {
1269 fn index_mut(&mut self, index: &ops::RangeFrom<uint>) -> &mut [T] {
1270 self.as_mut_slice().index_mut(index)
1273 impl<T> ops::IndexMut<ops::FullRange> for Vec<T> {
1276 fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] {
1283 impl<T> ops::Deref for Vec<T> {
1286 fn deref<'a>(&'a self) -> &'a [T] { self.as_slice() }
1290 impl<T> ops::DerefMut for Vec<T> {
1291 fn deref_mut<'a>(&'a mut self) -> &'a mut [T] { self.as_mut_slice() }
1295 impl<T> FromIterator<T> for Vec<T> {
1297 fn from_iter<I:Iterator<Item=T>>(mut iterator: I) -> Vec<T> {
1298 let (lower, _) = iterator.size_hint();
1299 let mut vector = Vec::with_capacity(lower);
1300 for element in iterator {
1301 vector.push(element)
1307 #[unstable = "waiting on Extend stability"]
1308 impl<T> Extend<T> for Vec<T> {
1310 fn extend<I: Iterator<Item=T>>(&mut self, mut iterator: I) {
1311 let (lower, _) = iterator.size_hint();
1312 self.reserve(lower);
1313 for element in iterator {
1319 impl<A, B> PartialEq<Vec<B>> for Vec<A> where A: PartialEq<B> {
1321 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1323 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1326 macro_rules! impl_eq {
1327 ($lhs:ty, $rhs:ty) => {
1328 impl<'b, A, B> PartialEq<$rhs> for $lhs where A: PartialEq<B> {
1330 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1332 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1335 impl<'b, A, B> PartialEq<$lhs> for $rhs where B: PartialEq<A> {
1337 fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
1339 fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) }
1344 impl_eq! { Vec<A>, &'b [B] }
1345 impl_eq! { Vec<A>, &'b mut [B] }
1347 impl<'a, A, B> PartialEq<Vec<B>> for CowVec<'a, A> where A: PartialEq<B> + Clone {
1349 fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
1351 fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
1354 impl<'a, A, B> PartialEq<CowVec<'a, A>> for Vec<B> where A: Clone, B: PartialEq<A> {
1356 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
1358 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
1361 macro_rules! impl_eq_for_cowvec {
1363 impl<'a, 'b, A, B> PartialEq<$rhs> for CowVec<'a, A> where A: PartialEq<B> + Clone {
1365 fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
1367 fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
1370 impl<'a, 'b, A, B> PartialEq<CowVec<'a, A>> for $rhs where A: Clone, B: PartialEq<A> {
1372 fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
1374 fn ne(&self, other: &CowVec<'a, A>) -> bool { PartialEq::ne(&**self, &**other) }
1379 impl_eq_for_cowvec! { &'b [B] }
1380 impl_eq_for_cowvec! { &'b mut [B] }
1382 #[unstable = "waiting on PartialOrd stability"]
1383 impl<T: PartialOrd> PartialOrd for Vec<T> {
1385 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
1386 self.as_slice().partial_cmp(other.as_slice())
1390 #[unstable = "waiting on Eq stability"]
1391 impl<T: Eq> Eq for Vec<T> {}
1393 #[unstable = "waiting on Ord stability"]
1394 impl<T: Ord> Ord for Vec<T> {
1396 fn cmp(&self, other: &Vec<T>) -> Ordering {
1397 self.as_slice().cmp(other.as_slice())
1401 impl<T> AsSlice<T> for Vec<T> {
1402 /// Returns a slice into `self`.
1407 /// fn foo(slice: &[int]) {}
1409 /// let vec = vec![1i, 2];
1410 /// foo(vec.as_slice());
1414 fn as_slice<'a>(&'a self) -> &'a [T] {
1416 mem::transmute(RawSlice {
1424 #[unstable = "recent addition, needs more experience"]
1425 impl<'a, T: Clone> Add<&'a [T]> for Vec<T> {
1426 type Output = Vec<T>;
1429 fn add(mut self, rhs: &[T]) -> Vec<T> {
1435 #[unsafe_destructor]
1437 impl<T> Drop for Vec<T> {
1438 fn drop(&mut self) {
1439 // This is (and should always remain) a no-op if the fields are
1440 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1443 for x in self.iter() {
1446 dealloc(*self.ptr, self.cap)
1453 impl<T> Default for Vec<T> {
1455 fn default() -> Vec<T> {
1460 #[unstable = "waiting on Show stability"]
1461 impl<T: fmt::Show> fmt::Show for Vec<T> {
1462 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1463 fmt::Show::fmt(self.as_slice(), f)
1467 impl<'a> fmt::Writer for Vec<u8> {
1468 fn write_str(&mut self, s: &str) -> fmt::Result {
1469 self.push_all(s.as_bytes());
1474 ////////////////////////////////////////////////////////////////////////////////
1476 ////////////////////////////////////////////////////////////////////////////////
1478 #[unstable = "unclear how valuable this alias is"]
1479 /// A clone-on-write vector
1480 pub type CowVec<'a, T> = Cow<'a, Vec<T>, [T]>;
1483 impl<'a, T> FromIterator<T> for CowVec<'a, T> where T: Clone {
1484 fn from_iter<I: Iterator<Item=T>>(it: I) -> CowVec<'a, T> {
1485 Cow::Owned(FromIterator::from_iter(it))
1489 impl<'a, T: 'a> IntoCow<'a, Vec<T>, [T]> for Vec<T> where T: Clone {
1490 fn into_cow(self) -> CowVec<'a, T> {
1495 impl<'a, T> IntoCow<'a, Vec<T>, [T]> for &'a [T] where T: Clone {
1496 fn into_cow(self) -> CowVec<'a, T> {
1501 ////////////////////////////////////////////////////////////////////////////////
1503 ////////////////////////////////////////////////////////////////////////////////
1505 /// An iterator that moves out of a vector.
1507 pub struct IntoIter<T> {
1508 allocation: *mut T, // the block of memory allocated for the vector
1509 cap: uint, // the capacity of the vector
1514 unsafe impl<T: Send> Send for IntoIter<T> { }
1515 unsafe impl<T: Sync> Sync for IntoIter<T> { }
1517 impl<T> IntoIter<T> {
1519 /// Drops all items that have not yet been moved and returns the empty vector.
1521 pub fn into_inner(mut self) -> Vec<T> {
1524 let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
1526 Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
1532 impl<T> Iterator for IntoIter<T> {
1536 fn next<'a>(&'a mut self) -> Option<T> {
1538 if self.ptr == self.end {
1541 if mem::size_of::<T>() == 0 {
1542 // purposefully don't use 'ptr.offset' because for
1543 // vectors with 0-size elements this would return the
1545 self.ptr = mem::transmute(self.ptr as uint + 1);
1547 // Use a non-null pointer value
1548 Some(ptr::read(mem::transmute(1u)))
1551 self.ptr = self.ptr.offset(1);
1553 Some(ptr::read(old))
1560 fn size_hint(&self) -> (uint, Option<uint>) {
1561 let diff = (self.end as uint) - (self.ptr as uint);
1562 let size = mem::size_of::<T>();
1563 let exact = diff / (if size == 0 {1} else {size});
1564 (exact, Some(exact))
1569 impl<T> DoubleEndedIterator for IntoIter<T> {
1571 fn next_back<'a>(&'a mut self) -> Option<T> {
1573 if self.end == self.ptr {
1576 if mem::size_of::<T>() == 0 {
1577 // See above for why 'ptr.offset' isn't used
1578 self.end = mem::transmute(self.end as uint - 1);
1580 // Use a non-null pointer value
1581 Some(ptr::read(mem::transmute(1u)))
1583 self.end = self.end.offset(-1);
1585 Some(ptr::read(mem::transmute(self.end)))
1593 impl<T> ExactSizeIterator for IntoIter<T> {}
1595 #[unsafe_destructor]
1597 impl<T> Drop for IntoIter<T> {
1598 fn drop(&mut self) {
1599 // destroy the remaining elements
1603 dealloc(self.allocation, self.cap);
1609 /// An iterator that drains a vector.
1610 #[unsafe_no_drop_flag]
1611 #[unstable = "recently added as part of collections reform 2"]
1612 pub struct Drain<'a, T> {
1615 marker: ContravariantLifetime<'a>,
1619 impl<'a, T> Iterator for Drain<'a, T> {
1623 fn next(&mut self) -> Option<T> {
1625 if self.ptr == self.end {
1628 if mem::size_of::<T>() == 0 {
1629 // purposefully don't use 'ptr.offset' because for
1630 // vectors with 0-size elements this would return the
1632 self.ptr = mem::transmute(self.ptr as uint + 1);
1634 // Use a non-null pointer value
1635 Some(ptr::read(mem::transmute(1u)))
1638 self.ptr = self.ptr.offset(1);
1640 Some(ptr::read(old))
1647 fn size_hint(&self) -> (uint, Option<uint>) {
1648 let diff = (self.end as uint) - (self.ptr as uint);
1649 let size = mem::size_of::<T>();
1650 let exact = diff / (if size == 0 {1} else {size});
1651 (exact, Some(exact))
1656 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1658 fn next_back(&mut self) -> Option<T> {
1660 if self.end == self.ptr {
1663 if mem::size_of::<T>() == 0 {
1664 // See above for why 'ptr.offset' isn't used
1665 self.end = mem::transmute(self.end as uint - 1);
1667 // Use a non-null pointer value
1668 Some(ptr::read(mem::transmute(1u)))
1670 self.end = self.end.offset(-1);
1672 Some(ptr::read(self.end))
1680 impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1682 #[unsafe_destructor]
1684 impl<'a, T> Drop for Drain<'a, T> {
1685 fn drop(&mut self) {
1686 // self.ptr == self.end == null if drop has already been called,
1687 // so we can use #[unsafe_no_drop_flag].
1689 // destroy the remaining elements
1694 ////////////////////////////////////////////////////////////////////////////////
1695 // Conversion from &[T] to &Vec<T>
1696 ////////////////////////////////////////////////////////////////////////////////
1698 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1700 pub struct DerefVec<'a, T> {
1702 l: ContravariantLifetime<'a>
1706 impl<'a, T> Deref for DerefVec<'a, T> {
1707 type Target = Vec<T>;
1709 fn deref<'b>(&'b self) -> &'b Vec<T> {
1714 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1715 #[unsafe_destructor]
1717 impl<'a, T> Drop for DerefVec<'a, T> {
1718 fn drop(&mut self) {
1724 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1726 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1729 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1730 l: ContravariantLifetime::<'a>
1735 ////////////////////////////////////////////////////////////////////////////////
1736 // Partial vec, used for map_in_place
1737 ////////////////////////////////////////////////////////////////////////////////
1739 /// An owned, partially type-converted vector of elements with non-zero size.
1741 /// `T` and `U` must have the same, non-zero size. They must also have the same
1744 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1745 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1746 /// destructed. Additionally the underlying storage of `vec` will be freed.
1747 struct PartialVecNonZeroSized<T,U> {
1756 /// An owned, partially type-converted vector of zero-sized elements.
1758 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1760 struct PartialVecZeroSized<T,U> {
1763 marker_t: InvariantType<T>,
1764 marker_u: InvariantType<U>,
1767 #[unsafe_destructor]
1768 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1769 fn drop(&mut self) {
1771 // `vec` hasn't been modified until now. As it has a length
1772 // currently, this would run destructors of `T`s which might not be
1773 // there. So at first, set `vec`s length to `0`. This must be done
1774 // at first to remain memory-safe as the destructors of `U` or `T`
1775 // might cause unwinding where `vec`s destructor would be executed.
1776 self.vec.set_len(0);
1778 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1779 while self.start_u != self.end_u {
1780 let _ = ptr::read(self.start_u); // Run a `U` destructor.
1781 self.start_u = self.start_u.offset(1);
1783 while self.start_t != self.end_t {
1784 let _ = ptr::read(self.start_t); // Run a `T` destructor.
1785 self.start_t = self.start_t.offset(1);
1787 // After this destructor ran, the destructor of `vec` will run,
1788 // deallocating the underlying memory.
1793 #[unsafe_destructor]
1794 impl<T,U> Drop for PartialVecZeroSized<T,U> {
1795 fn drop(&mut self) {
1797 // Destruct the instances of `T` and `U` this struct owns.
1798 while self.num_t != 0 {
1799 let _: T = mem::uninitialized(); // Run a `T` destructor.
1802 while self.num_u != 0 {
1803 let _: U = mem::uninitialized(); // Run a `U` destructor.
1813 use core::mem::size_of;
1814 use core::iter::repeat;
1815 use core::ops::FullRange;
1819 struct DropCounter<'a> {
1823 #[unsafe_destructor]
1824 impl<'a> Drop for DropCounter<'a> {
1825 fn drop(&mut self) {
1832 let xs = [1u8, 2u8, 3u8];
1833 assert_eq!(as_vec(&xs).as_slice(), xs);
1837 fn test_as_vec_dtor() {
1838 let (mut count_x, mut count_y) = (0, 0);
1840 let xs = &[DropCounter { count: &mut count_x }, DropCounter { count: &mut count_y }];
1841 assert_eq!(as_vec(xs).len(), 2);
1843 assert_eq!(count_x, 1);
1844 assert_eq!(count_y, 1);
1848 fn test_small_vec_struct() {
1849 assert!(size_of::<Vec<u8>>() == size_of::<uint>() * 3);
1853 fn test_double_drop() {
1859 let (mut count_x, mut count_y) = (0, 0);
1861 let mut tv = TwoVec {
1865 tv.x.push(DropCounter {count: &mut count_x});
1866 tv.y.push(DropCounter {count: &mut count_y});
1868 // If Vec had a drop flag, here is where it would be zeroed.
1869 // Instead, it should rely on its internal state to prevent
1870 // doing anything significant when dropped multiple times.
1873 // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
1876 assert_eq!(count_x, 1);
1877 assert_eq!(count_y, 1);
1882 let mut v = Vec::new();
1883 assert_eq!(v.capacity(), 0);
1886 assert!(v.capacity() >= 2);
1888 for i in range(0i, 16) {
1892 assert!(v.capacity() >= 16);
1894 assert!(v.capacity() >= 32);
1899 assert!(v.capacity() >= 33)
1904 let mut v = Vec::new();
1905 let mut w = Vec::new();
1907 v.extend(range(0i, 3));
1908 for i in range(0i, 3) { w.push(i) }
1912 v.extend(range(3i, 10));
1913 for i in range(3i, 10) { w.push(i) }
1919 fn test_slice_from_mut() {
1920 let mut values = vec![1u8,2,3,4,5];
1922 let slice = values.slice_from_mut(2);
1923 assert!(slice == [3, 4, 5]);
1924 for p in slice.iter_mut() {
1929 assert!(values == [1, 2, 5, 6, 7]);
1933 fn test_slice_to_mut() {
1934 let mut values = vec![1u8,2,3,4,5];
1936 let slice = values.slice_to_mut(2);
1937 assert!(slice == [1, 2]);
1938 for p in slice.iter_mut() {
1943 assert!(values == [2, 3, 3, 4, 5]);
1947 fn test_split_at_mut() {
1948 let mut values = vec![1u8,2,3,4,5];
1950 let (left, right) = values.split_at_mut(2);
1952 let left: &[_] = left;
1953 assert!(&left[..left.len()] == &[1, 2][]);
1955 for p in left.iter_mut() {
1960 let right: &[_] = right;
1961 assert!(&right[..right.len()] == &[3, 4, 5][]);
1963 for p in right.iter_mut() {
1968 assert!(values == vec![2u8, 3, 5, 6, 7]);
1973 let v: Vec<int> = vec!();
1974 let w = vec!(1i, 2, 3);
1976 assert_eq!(v, v.clone());
1980 // they should be disjoint in memory.
1981 assert!(w.as_ptr() != z.as_ptr())
1985 fn test_clone_from() {
1987 let three = vec!(box 1i, box 2, box 3);
1988 let two = vec!(box 4i, box 5);
1990 v.clone_from(&three);
1991 assert_eq!(v, three);
1994 v.clone_from(&three);
1995 assert_eq!(v, three);
2002 v.clone_from(&three);
2003 assert_eq!(v, three)
2008 let mut vec = vec![1u, 2, 3, 4];
2009 vec.retain(|&x| x % 2 == 0);
2010 assert!(vec == vec![2u, 4]);
2014 fn zero_sized_values() {
2015 let mut v = Vec::new();
2016 assert_eq!(v.len(), 0);
2018 assert_eq!(v.len(), 1);
2020 assert_eq!(v.len(), 2);
2021 assert_eq!(v.pop(), Some(()));
2022 assert_eq!(v.pop(), Some(()));
2023 assert_eq!(v.pop(), None);
2025 assert_eq!(v.iter().count(), 0);
2027 assert_eq!(v.iter().count(), 1);
2029 assert_eq!(v.iter().count(), 2);
2031 for &() in v.iter() {}
2033 assert_eq!(v.iter_mut().count(), 2);
2035 assert_eq!(v.iter_mut().count(), 3);
2037 assert_eq!(v.iter_mut().count(), 4);
2039 for &mut () in v.iter_mut() {}
2040 unsafe { v.set_len(0); }
2041 assert_eq!(v.iter_mut().count(), 0);
2045 fn test_partition() {
2046 assert_eq!(vec![].into_iter().partition(|x: &int| *x < 3), (vec![], vec![]));
2047 assert_eq!(vec![1i, 2, 3].into_iter().partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
2048 assert_eq!(vec![1i, 2, 3].into_iter().partition(|x: &int| *x < 2), (vec![1], vec![2, 3]));
2049 assert_eq!(vec![1i, 2, 3].into_iter().partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
2053 fn test_zip_unzip() {
2054 let z1 = vec![(1i, 4i), (2, 5), (3, 6)];
2056 let (left, right): (Vec<_>, Vec<_>) = z1.iter().map(|&x| x).unzip();
2058 assert_eq!((1, 4), (left[0], right[0]));
2059 assert_eq!((2, 5), (left[1], right[1]));
2060 assert_eq!((3, 6), (left[2], right[2]));
2064 fn test_unsafe_ptrs() {
2066 // Test on-stack copy-from-buf.
2068 let ptr = a.as_ptr();
2069 let b = Vec::from_raw_buf(ptr, 3u);
2070 assert_eq!(b, vec![1, 2, 3]);
2072 // Test on-heap copy-from-buf.
2073 let c = vec![1i, 2, 3, 4, 5];
2074 let ptr = c.as_ptr();
2075 let d = Vec::from_raw_buf(ptr, 5u);
2076 assert_eq!(d, vec![1, 2, 3, 4, 5]);
2081 fn test_vec_truncate_drop() {
2082 static mut drops: uint = 0;
2084 impl Drop for Elem {
2085 fn drop(&mut self) {
2086 unsafe { drops += 1; }
2090 let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
2091 assert_eq!(unsafe { drops }, 0);
2093 assert_eq!(unsafe { drops }, 2);
2095 assert_eq!(unsafe { drops }, 5);
2100 fn test_vec_truncate_fail() {
2101 struct BadElem(int);
2102 impl Drop for BadElem {
2103 fn drop(&mut self) {
2104 let BadElem(ref mut x) = *self;
2105 if *x == 0xbadbeef {
2106 panic!("BadElem panic: 0xbadbeef")
2111 let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
2117 let vec = vec!(1i, 2, 3);
2118 assert!(vec[1] == 2);
2123 fn test_index_out_of_bounds() {
2124 let vec = vec!(1i, 2, 3);
2130 fn test_slice_out_of_bounds_1() {
2131 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2137 fn test_slice_out_of_bounds_2() {
2138 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2144 fn test_slice_out_of_bounds_3() {
2145 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2151 fn test_slice_out_of_bounds_4() {
2152 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2158 fn test_slice_out_of_bounds_5() {
2159 let x: Vec<int> = vec![1, 2, 3, 4, 5];
2165 fn test_swap_remove_empty() {
2166 let mut vec: Vec<uint> = vec!();
2171 fn test_move_iter_unwrap() {
2172 let mut vec: Vec<uint> = Vec::with_capacity(7);
2175 let ptr = vec.as_ptr();
2176 vec = vec.into_iter().into_inner();
2177 assert_eq!(vec.as_ptr(), ptr);
2178 assert_eq!(vec.capacity(), 7);
2179 assert_eq!(vec.len(), 0);
2184 fn test_map_in_place_incompatible_types_fail() {
2185 let v = vec![0u, 1, 2];
2186 v.map_in_place(|_| ());
2190 fn test_map_in_place() {
2191 let v = vec![0u, 1, 2];
2192 assert_eq!(v.map_in_place(|i: uint| i as int - 1), [-1i, 0, 1]);
2196 fn test_map_in_place_zero_sized() {
2197 let v = vec![(), ()];
2198 #[derive(PartialEq, Show)]
2200 assert_eq!(v.map_in_place(|_| ZeroSized), [ZeroSized, ZeroSized]);
2204 fn test_map_in_place_zero_drop_count() {
2205 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
2207 #[derive(Clone, PartialEq, Show)]
2209 impl Drop for Nothing { fn drop(&mut self) { } }
2211 #[derive(Clone, PartialEq, Show)]
2213 impl Drop for ZeroSized {
2214 fn drop(&mut self) {
2215 DROP_COUNTER.fetch_add(1, Ordering::Relaxed);
2218 const NUM_ELEMENTS: uint = 2;
2219 static DROP_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
2221 let v = repeat(Nothing).take(NUM_ELEMENTS).collect::<Vec<_>>();
2223 DROP_COUNTER.store(0, Ordering::Relaxed);
2225 let v = v.map_in_place(|_| ZeroSized);
2226 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), 0);
2228 assert_eq!(DROP_COUNTER.load(Ordering::Relaxed), NUM_ELEMENTS);
2232 fn test_move_items() {
2233 let vec = vec![1, 2, 3];
2234 let mut vec2 : Vec<i32> = vec![];
2235 for i in vec.into_iter() {
2238 assert!(vec2 == vec![1, 2, 3]);
2242 fn test_move_items_reverse() {
2243 let vec = vec![1, 2, 3];
2244 let mut vec2 : Vec<i32> = vec![];
2245 for i in vec.into_iter().rev() {
2248 assert!(vec2 == vec![3, 2, 1]);
2252 fn test_move_items_zero_sized() {
2253 let vec = vec![(), (), ()];
2254 let mut vec2 : Vec<()> = vec![];
2255 for i in vec.into_iter() {
2258 assert!(vec2 == vec![(), (), ()]);
2262 fn test_drain_items() {
2263 let mut vec = vec![1, 2, 3];
2264 let mut vec2: Vec<i32> = vec![];
2265 for i in vec.drain() {
2268 assert_eq!(vec, []);
2269 assert_eq!(vec2, [ 1, 2, 3 ]);
2273 fn test_drain_items_reverse() {
2274 let mut vec = vec![1, 2, 3];
2275 let mut vec2: Vec<i32> = vec![];
2276 for i in vec.drain().rev() {
2279 assert_eq!(vec, []);
2280 assert_eq!(vec2, [ 3, 2, 1 ]);
2284 fn test_drain_items_zero_sized() {
2285 let mut vec = vec![(), (), ()];
2286 let mut vec2: Vec<()> = vec![];
2287 for i in vec.drain() {
2290 assert_eq!(vec, []);
2291 assert_eq!(vec2, [(), (), ()]);
2295 fn test_into_boxed_slice() {
2296 let xs = vec![1u, 2, 3];
2297 let ys = xs.into_boxed_slice();
2298 assert_eq!(ys.as_slice(), [1u, 2, 3]);
2302 fn bench_new(b: &mut Bencher) {
2304 let v: Vec<uint> = Vec::new();
2305 assert_eq!(v.len(), 0);
2306 assert_eq!(v.capacity(), 0);
2310 fn do_bench_with_capacity(b: &mut Bencher, src_len: uint) {
2311 b.bytes = src_len as u64;
2314 let v: Vec<uint> = Vec::with_capacity(src_len);
2315 assert_eq!(v.len(), 0);
2316 assert_eq!(v.capacity(), src_len);
2321 fn bench_with_capacity_0000(b: &mut Bencher) {
2322 do_bench_with_capacity(b, 0)
2326 fn bench_with_capacity_0010(b: &mut Bencher) {
2327 do_bench_with_capacity(b, 10)
2331 fn bench_with_capacity_0100(b: &mut Bencher) {
2332 do_bench_with_capacity(b, 100)
2336 fn bench_with_capacity_1000(b: &mut Bencher) {
2337 do_bench_with_capacity(b, 1000)
2340 fn do_bench_from_fn(b: &mut Bencher, src_len: uint) {
2341 b.bytes = src_len as u64;
2344 let dst = range(0, src_len).collect::<Vec<_>>();
2345 assert_eq!(dst.len(), src_len);
2346 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2351 fn bench_from_fn_0000(b: &mut Bencher) {
2352 do_bench_from_fn(b, 0)
2356 fn bench_from_fn_0010(b: &mut Bencher) {
2357 do_bench_from_fn(b, 10)
2361 fn bench_from_fn_0100(b: &mut Bencher) {
2362 do_bench_from_fn(b, 100)
2366 fn bench_from_fn_1000(b: &mut Bencher) {
2367 do_bench_from_fn(b, 1000)
2370 fn do_bench_from_elem(b: &mut Bencher, src_len: uint) {
2371 b.bytes = src_len as u64;
2374 let dst: Vec<uint> = repeat(5).take(src_len).collect();
2375 assert_eq!(dst.len(), src_len);
2376 assert!(dst.iter().all(|x| *x == 5));
2381 fn bench_from_elem_0000(b: &mut Bencher) {
2382 do_bench_from_elem(b, 0)
2386 fn bench_from_elem_0010(b: &mut Bencher) {
2387 do_bench_from_elem(b, 10)
2391 fn bench_from_elem_0100(b: &mut Bencher) {
2392 do_bench_from_elem(b, 100)
2396 fn bench_from_elem_1000(b: &mut Bencher) {
2397 do_bench_from_elem(b, 1000)
2400 fn do_bench_from_slice(b: &mut Bencher, src_len: uint) {
2401 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2403 b.bytes = src_len as u64;
2406 let dst = src.clone()[].to_vec();
2407 assert_eq!(dst.len(), src_len);
2408 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2413 fn bench_from_slice_0000(b: &mut Bencher) {
2414 do_bench_from_slice(b, 0)
2418 fn bench_from_slice_0010(b: &mut Bencher) {
2419 do_bench_from_slice(b, 10)
2423 fn bench_from_slice_0100(b: &mut Bencher) {
2424 do_bench_from_slice(b, 100)
2428 fn bench_from_slice_1000(b: &mut Bencher) {
2429 do_bench_from_slice(b, 1000)
2432 fn do_bench_from_iter(b: &mut Bencher, src_len: uint) {
2433 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2435 b.bytes = src_len as u64;
2438 let dst: Vec<uint> = FromIterator::from_iter(src.clone().into_iter());
2439 assert_eq!(dst.len(), src_len);
2440 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2445 fn bench_from_iter_0000(b: &mut Bencher) {
2446 do_bench_from_iter(b, 0)
2450 fn bench_from_iter_0010(b: &mut Bencher) {
2451 do_bench_from_iter(b, 10)
2455 fn bench_from_iter_0100(b: &mut Bencher) {
2456 do_bench_from_iter(b, 100)
2460 fn bench_from_iter_1000(b: &mut Bencher) {
2461 do_bench_from_iter(b, 1000)
2464 fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) {
2465 let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len));
2466 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2468 b.bytes = src_len as u64;
2471 let mut dst = dst.clone();
2472 dst.extend(src.clone().into_iter());
2473 assert_eq!(dst.len(), dst_len + src_len);
2474 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2479 fn bench_extend_0000_0000(b: &mut Bencher) {
2480 do_bench_extend(b, 0, 0)
2484 fn bench_extend_0000_0010(b: &mut Bencher) {
2485 do_bench_extend(b, 0, 10)
2489 fn bench_extend_0000_0100(b: &mut Bencher) {
2490 do_bench_extend(b, 0, 100)
2494 fn bench_extend_0000_1000(b: &mut Bencher) {
2495 do_bench_extend(b, 0, 1000)
2499 fn bench_extend_0010_0010(b: &mut Bencher) {
2500 do_bench_extend(b, 10, 10)
2504 fn bench_extend_0100_0100(b: &mut Bencher) {
2505 do_bench_extend(b, 100, 100)
2509 fn bench_extend_1000_1000(b: &mut Bencher) {
2510 do_bench_extend(b, 1000, 1000)
2513 fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) {
2514 let dst: Vec<uint> = FromIterator::from_iter(range(0, dst_len));
2515 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2517 b.bytes = src_len as u64;
2520 let mut dst = dst.clone();
2521 dst.push_all(src.as_slice());
2522 assert_eq!(dst.len(), dst_len + src_len);
2523 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2528 fn bench_push_all_0000_0000(b: &mut Bencher) {
2529 do_bench_push_all(b, 0, 0)
2533 fn bench_push_all_0000_0010(b: &mut Bencher) {
2534 do_bench_push_all(b, 0, 10)
2538 fn bench_push_all_0000_0100(b: &mut Bencher) {
2539 do_bench_push_all(b, 0, 100)
2543 fn bench_push_all_0000_1000(b: &mut Bencher) {
2544 do_bench_push_all(b, 0, 1000)
2548 fn bench_push_all_0010_0010(b: &mut Bencher) {
2549 do_bench_push_all(b, 10, 10)
2553 fn bench_push_all_0100_0100(b: &mut Bencher) {
2554 do_bench_push_all(b, 100, 100)
2558 fn bench_push_all_1000_1000(b: &mut Bencher) {
2559 do_bench_push_all(b, 1000, 1000)
2562 fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) {
2563 let dst: Vec<uint> = FromIterator::from_iter(range(0u, dst_len));
2564 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2566 b.bytes = src_len as u64;
2569 let mut dst = dst.clone();
2570 dst.extend(src.clone().into_iter());
2571 assert_eq!(dst.len(), dst_len + src_len);
2572 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2577 fn bench_push_all_move_0000_0000(b: &mut Bencher) {
2578 do_bench_push_all_move(b, 0, 0)
2582 fn bench_push_all_move_0000_0010(b: &mut Bencher) {
2583 do_bench_push_all_move(b, 0, 10)
2587 fn bench_push_all_move_0000_0100(b: &mut Bencher) {
2588 do_bench_push_all_move(b, 0, 100)
2592 fn bench_push_all_move_0000_1000(b: &mut Bencher) {
2593 do_bench_push_all_move(b, 0, 1000)
2597 fn bench_push_all_move_0010_0010(b: &mut Bencher) {
2598 do_bench_push_all_move(b, 10, 10)
2602 fn bench_push_all_move_0100_0100(b: &mut Bencher) {
2603 do_bench_push_all_move(b, 100, 100)
2607 fn bench_push_all_move_1000_1000(b: &mut Bencher) {
2608 do_bench_push_all_move(b, 1000, 1000)
2611 fn do_bench_clone(b: &mut Bencher, src_len: uint) {
2612 let src: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2614 b.bytes = src_len as u64;
2617 let dst = src.clone();
2618 assert_eq!(dst.len(), src_len);
2619 assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
2624 fn bench_clone_0000(b: &mut Bencher) {
2625 do_bench_clone(b, 0)
2629 fn bench_clone_0010(b: &mut Bencher) {
2630 do_bench_clone(b, 10)
2634 fn bench_clone_0100(b: &mut Bencher) {
2635 do_bench_clone(b, 100)
2639 fn bench_clone_1000(b: &mut Bencher) {
2640 do_bench_clone(b, 1000)
2643 fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) {
2644 let dst: Vec<uint> = FromIterator::from_iter(range(0, src_len));
2645 let src: Vec<uint> = FromIterator::from_iter(range(dst_len, dst_len + src_len));
2647 b.bytes = (times * src_len) as u64;
2650 let mut dst = dst.clone();
2652 for _ in range(0, times) {
2653 dst.clone_from(&src);
2655 assert_eq!(dst.len(), src_len);
2656 assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
2662 fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
2663 do_bench_clone_from(b, 1, 0, 0)
2667 fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
2668 do_bench_clone_from(b, 1, 0, 10)
2672 fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
2673 do_bench_clone_from(b, 1, 0, 100)
2677 fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
2678 do_bench_clone_from(b, 1, 0, 1000)
2682 fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
2683 do_bench_clone_from(b, 1, 10, 10)
2687 fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
2688 do_bench_clone_from(b, 1, 100, 100)
2692 fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
2693 do_bench_clone_from(b, 1, 1000, 1000)
2697 fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
2698 do_bench_clone_from(b, 1, 10, 100)
2702 fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
2703 do_bench_clone_from(b, 1, 100, 1000)
2707 fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
2708 do_bench_clone_from(b, 1, 10, 0)
2712 fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
2713 do_bench_clone_from(b, 1, 100, 10)
2717 fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
2718 do_bench_clone_from(b, 1, 1000, 100)
2722 fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
2723 do_bench_clone_from(b, 10, 0, 0)
2727 fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
2728 do_bench_clone_from(b, 10, 0, 10)
2732 fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
2733 do_bench_clone_from(b, 10, 0, 100)
2737 fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
2738 do_bench_clone_from(b, 10, 0, 1000)
2742 fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
2743 do_bench_clone_from(b, 10, 10, 10)
2747 fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
2748 do_bench_clone_from(b, 10, 100, 100)
2752 fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
2753 do_bench_clone_from(b, 10, 1000, 1000)
2757 fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
2758 do_bench_clone_from(b, 10, 10, 100)
2762 fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
2763 do_bench_clone_from(b, 10, 100, 1000)
2767 fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
2768 do_bench_clone_from(b, 10, 10, 0)
2772 fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
2773 do_bench_clone_from(b, 10, 100, 10)
2777 fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
2778 do_bench_clone_from(b, 10, 1000, 100)