1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A growable list type with heap-allocated contents, written `Vec<T>` but
12 //! pronounced 'vector.'
14 //! Vectors have `O(1)` indexing, push (to the end) and pop (from the end).
18 //! Explicitly creating a `Vec<T>` with `new()`:
21 //! let xs: Vec<i32> = Vec::new();
24 //! Using the `vec!` macro:
27 //! let ys: Vec<i32> = vec![];
29 //! let zs = vec![1i32, 2, 3, 4, 5];
35 //! let mut xs = vec![1i32, 2];
43 //! let mut xs = vec![1i32, 2];
45 //! let two = xs.pop();
48 #![stable(feature = "rust1", since = "1.0.0")]
52 use alloc::boxed::Box;
53 use alloc::heap::{EMPTY, allocate, reallocate, deallocate};
55 use core::cmp::Ordering;
56 use core::default::Default;
58 use core::hash::{self, Hash};
59 use core::intrinsics::assume;
60 use core::iter::{repeat, FromIterator, IntoIterator};
61 use core::marker::PhantomData;
63 use core::ops::{Index, IndexMut, Deref, Add};
66 use core::ptr::Unique;
70 use borrow::{Cow, IntoCow};
72 /// A growable list type, written `Vec<T>` but pronounced 'vector.'
77 /// # #![feature(collections)]
78 /// let mut vec = Vec::new();
82 /// assert_eq!(vec.len(), 2);
83 /// assert_eq!(vec[0], 1);
85 /// assert_eq!(vec.pop(), Some(2));
86 /// assert_eq!(vec.len(), 1);
89 /// assert_eq!(vec[0], 7);
91 /// vec.push_all(&[1, 2, 3]);
93 /// for x in vec.iter() {
94 /// println!("{}", x);
96 /// assert_eq!(vec, [7, 1, 2, 3]);
99 /// The `vec!` macro is provided to make initialization more convenient:
102 /// let mut vec = vec![1, 2, 3];
104 /// assert_eq!(vec, [1, 2, 3, 4]);
107 /// Use a `Vec<T>` as an efficient stack:
110 /// let mut stack = Vec::new();
117 /// let top = match stack.pop() {
118 /// None => break, // empty
121 /// // Prints 3, 2, 1
122 /// println!("{}", top);
126 /// # Capacity and reallocation
128 /// The capacity of a vector is the amount of space allocated for any future
129 /// elements that will be added onto the vector. This is not to be confused with
130 /// the *length* of a vector, which specifies the number of actual elements
131 /// within the vector. If a vector's length exceeds its capacity, its capacity
132 /// will automatically be increased, but its elements will have to be
135 /// For example, a vector with capacity 10 and length 0 would be an empty vector
136 /// with space for 10 more elements. Pushing 10 or fewer elements onto the
137 /// vector will not change its capacity or cause reallocation to occur. However,
138 /// if the vector's length is increased to 11, it will have to reallocate, which
139 /// can be slow. For this reason, it is recommended to use `Vec::with_capacity`
140 /// whenever possible to specify how big the vector is expected to get.
141 #[unsafe_no_drop_flag]
142 #[stable(feature = "rust1", since = "1.0.0")]
149 unsafe impl<T: Send> Send for Vec<T> { }
150 unsafe impl<T: Sync> Sync for Vec<T> { }
152 ////////////////////////////////////////////////////////////////////////////////
154 ////////////////////////////////////////////////////////////////////////////////
157 /// Constructs a new, empty `Vec<T>`.
159 /// The vector will not allocate until elements are pushed onto it.
164 /// let mut vec: Vec<i32> = Vec::new();
167 #[stable(feature = "rust1", since = "1.0.0")]
168 pub fn new() -> Vec<T> {
169 // We want ptr to never be NULL so instead we set it to some arbitrary
170 // non-null value which is fine since we never call deallocate on the ptr
171 // if cap is 0. The reason for this is because the pointer of a slice
172 // being NULL would break the null pointer optimization for enums.
173 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
176 /// Constructs a new, empty `Vec<T>` with the specified capacity.
178 /// The vector will be able to hold exactly `capacity` elements without reallocating. If
179 /// `capacity` is 0, the vector will not allocate.
181 /// It is important to note that this function does not specify the *length* of the returned
182 /// vector, but only the *capacity*. (For an explanation of the difference between length and
183 /// capacity, see the main `Vec<T>` docs above, 'Capacity and reallocation'.)
188 /// let mut vec: Vec<_> = Vec::with_capacity(10);
190 /// // The vector contains no items, even though it has capacity for more
191 /// assert_eq!(vec.len(), 0);
193 /// // These are all done without reallocating...
198 /// // ...but this may make the vector reallocate
202 #[stable(feature = "rust1", since = "1.0.0")]
203 pub fn with_capacity(capacity: usize) -> Vec<T> {
204 if mem::size_of::<T>() == 0 {
205 unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
206 } else if capacity == 0 {
209 let size = capacity.checked_mul(mem::size_of::<T>())
210 .expect("capacity overflow");
211 let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
212 if ptr.is_null() { ::alloc::oom() }
213 unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
217 /// Creates a `Vec<T>` directly from the raw components of another vector.
219 /// This is highly unsafe, due to the number of invariants that aren't checked.
228 /// let mut v = vec![1, 2, 3];
230 /// // Pull out the various important pieces of information about `v`
231 /// let p = v.as_mut_ptr();
232 /// let len = v.len();
233 /// let cap = v.capacity();
236 /// // Cast `v` into the void: no destructor run, so we are in
237 /// // complete control of the allocation to which `p` points.
240 /// // Overwrite memory with 4, 5, 6
241 /// for i in 0..len as isize {
242 /// ptr::write(p.offset(i), 4 + i);
245 /// // Put everything back together into a Vec
246 /// let rebuilt = Vec::from_raw_parts(p, len, cap);
247 /// assert_eq!(rebuilt, [4, 5, 6]);
251 #[stable(feature = "rust1", since = "1.0.0")]
252 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
253 capacity: usize) -> Vec<T> {
255 ptr: Unique::new(ptr),
261 /// Creates a vector by copying the elements from a raw pointer.
263 /// This function will copy `elts` contiguous elements starting at `ptr` into a new allocation
264 /// owned by the returned `Vec<T>`. The elements of the buffer are copied into the vector
265 /// without cloning, as if `ptr::read()` were called on them.
267 #[unstable(feature = "collections",
268 reason = "may be better expressed via composition")]
269 pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
270 let mut dst = Vec::with_capacity(elts);
272 ptr::copy_nonoverlapping(dst.as_mut_ptr(), ptr, elts);
276 /// Returns the number of elements the vector can hold without
282 /// let vec: Vec<i32> = Vec::with_capacity(10);
283 /// assert_eq!(vec.capacity(), 10);
286 #[stable(feature = "rust1", since = "1.0.0")]
287 pub fn capacity(&self) -> usize {
291 /// Reserves capacity for at least `additional` more elements to be inserted in the given
292 /// `Vec<T>`. The collection may reserve more space to avoid frequent reallocations.
296 /// Panics if the new capacity overflows `usize`.
301 /// let mut vec = vec![1];
303 /// assert!(vec.capacity() >= 11);
305 #[stable(feature = "rust1", since = "1.0.0")]
306 pub fn reserve(&mut self, additional: usize) {
307 if self.cap - self.len < additional {
308 let err_msg = "Vec::reserve: `usize` overflow";
309 let new_cap = self.len.checked_add(additional).expect(err_msg)
310 .checked_next_power_of_two().expect(err_msg);
311 self.grow_capacity(new_cap);
315 /// Reserves the minimum capacity for exactly `additional` more elements to
316 /// be inserted in the given `Vec<T>`. Does nothing if the capacity is already
319 /// Note that the allocator may give the collection more space than it
320 /// requests. Therefore capacity can not be relied upon to be precisely
321 /// minimal. Prefer `reserve` if future insertions are expected.
325 /// Panics if the new capacity overflows `usize`.
330 /// let mut vec = vec![1];
331 /// vec.reserve_exact(10);
332 /// assert!(vec.capacity() >= 11);
334 #[stable(feature = "rust1", since = "1.0.0")]
335 pub fn reserve_exact(&mut self, additional: usize) {
336 if self.cap - self.len < additional {
337 match self.len.checked_add(additional) {
338 None => panic!("Vec::reserve: `usize` overflow"),
339 Some(new_cap) => self.grow_capacity(new_cap)
344 /// Shrinks the capacity of the vector as much as possible.
346 /// It will drop down as close as possible to the length but the allocator
347 /// may still inform the vector that there is space for a few more elements.
352 /// # #![feature(collections)]
353 /// let mut vec = Vec::with_capacity(10);
354 /// vec.push_all(&[1, 2, 3]);
355 /// assert_eq!(vec.capacity(), 10);
356 /// vec.shrink_to_fit();
357 /// assert!(vec.capacity() >= 3);
359 #[stable(feature = "rust1", since = "1.0.0")]
360 pub fn shrink_to_fit(&mut self) {
361 if mem::size_of::<T>() == 0 { return }
366 dealloc(*self.ptr, self.cap)
370 } else if self.cap != self.len {
372 // Overflow check is unnecessary as the vector is already at
374 let ptr = reallocate(*self.ptr as *mut u8,
375 self.cap * mem::size_of::<T>(),
376 self.len * mem::size_of::<T>(),
377 mem::min_align_of::<T>()) as *mut T;
378 if ptr.is_null() { ::alloc::oom() }
379 self.ptr = Unique::new(ptr);
385 /// Convert the vector into Box<[T]>.
387 /// Note that this will drop any excess capacity. Calling this and
388 /// converting back to a vector with `into_vec()` is equivalent to calling
389 /// `shrink_to_fit()`.
390 #[unstable(feature = "collections")]
391 pub fn into_boxed_slice(mut self) -> Box<[T]> {
392 self.shrink_to_fit();
394 let xs: Box<[T]> = Box::from_raw(&mut *self);
400 /// Shorten a vector, dropping excess elements.
402 /// If `len` is greater than the vector's current length, this has no
408 /// # #![feature(collections)]
409 /// let mut vec = vec![1, 2, 3, 4];
411 /// assert_eq!(vec, [1, 2]);
413 #[stable(feature = "rust1", since = "1.0.0")]
414 pub fn truncate(&mut self, len: usize) {
416 // drop any extra elements
417 while len < self.len {
418 // decrement len before the read(), so a panic on Drop doesn't
419 // re-drop the just-failed value.
421 ptr::read(self.get_unchecked(self.len));
426 /// Extract a slice containing the entire vector.
428 #[unstable(feature = "convert",
429 reason = "waiting on RFC revision")]
430 pub fn as_slice(&self) -> &[T] {
434 /// Deprecated: use `&mut s[..]` instead.
436 #[unstable(feature = "convert",
437 reason = "waiting on RFC revision")]
438 pub fn as_mut_slice(&mut self) -> &mut [T] {
442 /// Creates a consuming iterator, that is, one that moves each value out of
443 /// the vector (from start to end). The vector cannot be used after calling
449 /// let v = vec!["a".to_string(), "b".to_string()];
450 /// for s in v.into_iter() {
451 /// // s has type String, not &String
452 /// println!("{}", s);
456 #[stable(feature = "rust1", since = "1.0.0")]
457 pub fn into_iter(self) -> IntoIter<T> {
460 assume(!ptr.is_null());
462 let begin = ptr as *const T;
463 let end = if mem::size_of::<T>() == 0 {
464 (ptr as usize + self.len()) as *const T
466 ptr.offset(self.len() as isize) as *const T
469 IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
473 /// Sets the length of a vector.
475 /// This will explicitly set the size of the vector, without actually
476 /// modifying its buffers, so it is up to the caller to ensure that the
477 /// vector is actually the specified size.
482 /// let mut v = vec![1, 2, 3, 4];
488 #[stable(feature = "rust1", since = "1.0.0")]
489 pub unsafe fn set_len(&mut self, len: usize) {
493 /// Removes an element from anywhere in the vector and return it, replacing
494 /// it with the last element.
496 /// This does not preserve ordering, but is O(1).
500 /// Panics if `index` is out of bounds.
505 /// let mut v = vec!["foo", "bar", "baz", "qux"];
507 /// assert_eq!(v.swap_remove(1), "bar");
508 /// assert_eq!(v, ["foo", "qux", "baz"]);
510 /// assert_eq!(v.swap_remove(0), "foo");
511 /// assert_eq!(v, ["baz", "qux"]);
514 #[stable(feature = "rust1", since = "1.0.0")]
515 pub fn swap_remove(&mut self, index: usize) -> T {
516 let length = self.len();
517 self.swap(index, length - 1);
521 /// Inserts an element at position `index` within the vector, shifting all
522 /// elements after position `i` one position to the right.
526 /// Panics if `index` is not between `0` and the vector's length (both
527 /// bounds inclusive).
532 /// let mut vec = vec![1, 2, 3];
533 /// vec.insert(1, 4);
534 /// assert_eq!(vec, [1, 4, 2, 3]);
535 /// vec.insert(4, 5);
536 /// assert_eq!(vec, [1, 4, 2, 3, 5]);
538 #[stable(feature = "rust1", since = "1.0.0")]
539 pub fn insert(&mut self, index: usize, element: T) {
540 let len = self.len();
541 assert!(index <= len);
542 // space for the new element
545 unsafe { // infallible
546 // The spot to put the new value
548 let p = self.as_mut_ptr().offset(index as isize);
549 // Shift everything over to make space. (Duplicating the
550 // `index`th element into two consecutive places.)
551 ptr::copy(p.offset(1), &*p, len - index);
552 // Write it in, overwriting the first copy of the `index`th
554 ptr::write(&mut *p, element);
556 self.set_len(len + 1);
560 /// Removes and returns the element at position `index` within the vector,
561 /// shifting all elements after position `index` one position to the left.
565 /// Panics if `i` is out of bounds.
570 /// # #![feature(collections)]
571 /// let mut v = vec![1, 2, 3];
572 /// assert_eq!(v.remove(1), 2);
573 /// assert_eq!(v, [1, 3]);
575 #[stable(feature = "rust1", since = "1.0.0")]
576 pub fn remove(&mut self, index: usize) -> T {
577 let len = self.len();
578 assert!(index < len);
579 unsafe { // infallible
582 // the place we are taking from.
583 let ptr = self.as_mut_ptr().offset(index as isize);
584 // copy it out, unsafely having a copy of the value on
585 // the stack and in the vector at the same time.
586 ret = ptr::read(ptr);
588 // Shift everything down to fill in that spot.
589 ptr::copy(ptr, &*ptr.offset(1), len - index - 1);
591 self.set_len(len - 1);
596 /// Retains only the elements specified by the predicate.
598 /// In other words, remove all elements `e` such that `f(&e)` returns false.
599 /// This method operates in place and preserves the order of the retained
605 /// let mut vec = vec![1, 2, 3, 4];
606 /// vec.retain(|&x| x%2 == 0);
607 /// assert_eq!(vec, [2, 4]);
609 #[stable(feature = "rust1", since = "1.0.0")]
610 pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
611 let len = self.len();
625 self.truncate(len - del);
629 /// Appends an element to the back of a collection.
633 /// Panics if the number of elements in the vector overflows a `usize`.
638 /// let mut vec = vec!(1, 2);
640 /// assert_eq!(vec, [1, 2, 3]);
643 #[stable(feature = "rust1", since = "1.0.0")]
644 pub fn push(&mut self, value: T) {
647 fn resize<T>(vec: &mut Vec<T>) {
648 let old_size = vec.cap * mem::size_of::<T>();
649 let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
650 if old_size > size { panic!("capacity overflow") }
652 let ptr = alloc_or_realloc(*vec.ptr, old_size, size);
653 if ptr.is_null() { ::alloc::oom() }
654 vec.ptr = Unique::new(ptr);
656 vec.cap = max(vec.cap, 2) * 2;
659 if mem::size_of::<T>() == 0 {
660 // zero-size types consume no memory, so we can't rely on the
661 // address space running out
662 self.len = self.len.checked_add(1).expect("length overflow");
663 unsafe { mem::forget(value); }
667 if self.len == self.cap {
672 let end = (*self.ptr).offset(self.len as isize);
673 ptr::write(&mut *end, value);
678 /// Removes the last element from a vector and returns it, or `None` if it is empty.
683 /// let mut vec = vec![1, 2, 3];
684 /// assert_eq!(vec.pop(), Some(3));
685 /// assert_eq!(vec, [1, 2]);
688 #[stable(feature = "rust1", since = "1.0.0")]
689 pub fn pop(&mut self) -> Option<T> {
695 Some(ptr::read(self.get_unchecked(self.len())))
700 /// Moves all the elements of `other` into `Self`, leaving `other` empty.
704 /// Panics if the number of elements in the vector overflows a `usize`.
709 /// # #![feature(collections)]
710 /// let mut vec = vec![1, 2, 3];
711 /// let mut vec2 = vec![4, 5, 6];
712 /// vec.append(&mut vec2);
713 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
714 /// assert_eq!(vec2, []);
717 #[unstable(feature = "collections",
718 reason = "new API, waiting for dust to settle")]
719 pub fn append(&mut self, other: &mut Self) {
720 if mem::size_of::<T>() == 0 {
721 // zero-size types consume no memory, so we can't rely on the
722 // address space running out
723 self.len = self.len.checked_add(other.len()).expect("length overflow");
724 unsafe { other.set_len(0) }
727 self.reserve(other.len());
728 let len = self.len();
730 ptr::copy_nonoverlapping(
731 self.get_unchecked_mut(len),
736 self.len += other.len();
737 unsafe { other.set_len(0); }
740 /// Creates a draining iterator that clears the `Vec` and iterates over
741 /// the removed items from start to end.
746 /// # #![feature(collections)]
747 /// let mut v = vec!["a".to_string(), "b".to_string()];
748 /// for s in v.drain() {
749 /// // s has type String, not &String
750 /// println!("{}", s);
752 /// assert!(v.is_empty());
755 #[unstable(feature = "collections",
756 reason = "matches collection reform specification, waiting for dust to settle")]
757 pub fn drain(&mut self) -> Drain<T> {
759 let begin = *self.ptr as *const T;
760 let end = if mem::size_of::<T>() == 0 {
761 (*self.ptr as usize + self.len()) as *const T
763 (*self.ptr).offset(self.len() as isize) as *const T
774 /// Clears the vector, removing all values.
779 /// let mut v = vec![1, 2, 3];
783 /// assert!(v.is_empty());
786 #[stable(feature = "rust1", since = "1.0.0")]
787 pub fn clear(&mut self) {
791 /// Returns the number of elements in the vector.
796 /// let a = vec![1, 2, 3];
797 /// assert_eq!(a.len(), 3);
800 #[stable(feature = "rust1", since = "1.0.0")]
801 pub fn len(&self) -> usize { self.len }
803 /// Returns `true` if the vector contains no elements.
808 /// let mut v = Vec::new();
809 /// assert!(v.is_empty());
812 /// assert!(!v.is_empty());
814 #[stable(feature = "rust1", since = "1.0.0")]
815 pub fn is_empty(&self) -> bool { self.len() == 0 }
817 /// Converts a `Vec<T>` to a `Vec<U>` where `T` and `U` have the same
818 /// size and in case they are not zero-sized the same minimal alignment.
822 /// Panics if `T` and `U` have differing sizes or are not zero-sized and
823 /// have differing minimal alignments.
828 /// # #![feature(collections, core)]
829 /// let v = vec![0, 1, 2];
830 /// let w = v.map_in_place(|i| i + 3);
831 /// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
833 /// #[derive(PartialEq, Debug)]
834 /// struct Newtype(u8);
835 /// let bytes = vec![0x11, 0x22];
836 /// let newtyped_bytes = bytes.map_in_place(|x| Newtype(x));
837 /// assert_eq!(newtyped_bytes.as_slice(), [Newtype(0x11), Newtype(0x22)].as_slice());
839 #[unstable(feature = "collections",
840 reason = "API may change to provide stronger guarantees")]
841 pub fn map_in_place<U, F>(self, mut f: F) -> Vec<U> where F: FnMut(T) -> U {
842 // FIXME: Assert statically that the types `T` and `U` have the same
844 assert!(mem::size_of::<T>() == mem::size_of::<U>());
848 if mem::size_of::<T>() != 0 {
849 // FIXME: Assert statically that the types `T` and `U` have the
850 // same minimal alignment in case they are not zero-sized.
852 // These asserts are necessary because the `min_align_of` of the
853 // types are passed to the allocator by `Vec`.
854 assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
856 // This `as isize` cast is safe, because the size of the elements of the
857 // vector is not 0, and:
859 // 1) If the size of the elements in the vector is 1, the `isize` may
860 // overflow, but it has the correct bit pattern so that the
861 // `.offset()` function will work.
864 // Address space 0x0-0xF.
865 // `u8` array at: 0x1.
866 // Size of `u8` array: 0x8.
867 // Calculated `offset`: -0x8.
868 // After `array.offset(offset)`: 0x9.
869 // (0x1 + 0x8 = 0x1 - 0x8)
871 // 2) If the size of the elements in the vector is >1, the `usize` ->
872 // `isize` conversion can't overflow.
873 let offset = vec.len() as isize;
874 let start = vec.as_mut_ptr();
876 let mut pv = PartialVecNonZeroSized {
880 // This points inside the vector, as the vector has length
882 end_t: unsafe { start.offset(offset) },
883 start_u: start as *mut U,
884 end_u: start as *mut U,
886 _marker: PhantomData,
897 while pv.end_u as *mut T != pv.end_t {
901 // +-+-+-+-+-+-+-+-+-+
902 // |U|...|U|T|T|...|T|
903 // +-+-+-+-+-+-+-+-+-+
907 let t = ptr::read(pv.start_t);
910 // +-+-+-+-+-+-+-+-+-+
911 // |U|...|U|X|T|...|T|
912 // +-+-+-+-+-+-+-+-+-+
915 // We must not panic here, one cell is marked as `T`
916 // although it is not `T`.
918 pv.start_t = pv.start_t.offset(1);
921 // +-+-+-+-+-+-+-+-+-+
922 // |U|...|U|X|T|...|T|
923 // +-+-+-+-+-+-+-+-+-+
926 // We may panic again.
928 // The function given by the user might panic.
931 ptr::write(pv.end_u, u);
934 // +-+-+-+-+-+-+-+-+-+
935 // |U|...|U|U|T|...|T|
936 // +-+-+-+-+-+-+-+-+-+
939 // We should not panic here, because that would leak the `U`
940 // pointed to by `end_u`.
942 pv.end_u = pv.end_u.offset(1);
945 // +-+-+-+-+-+-+-+-+-+
946 // |U|...|U|U|T|...|T|
947 // +-+-+-+-+-+-+-+-+-+
950 // We may panic again.
962 // Extract `vec` and prevent the destructor of
963 // `PartialVecNonZeroSized` from running. Note that none of the
964 // function calls can panic, thus no resources can be leaked (as the
965 // `vec` member of `PartialVec` is the only one which holds
966 // allocations -- and it is returned from this function. None of
969 let vec_len = pv.vec.len();
970 let vec_cap = pv.vec.capacity();
971 let vec_ptr = pv.vec.as_mut_ptr() as *mut U;
973 Vec::from_raw_parts(vec_ptr, vec_len, vec_cap)
976 // Put the `Vec` into the `PartialVecZeroSized` structure and
977 // prevent the destructor of the `Vec` from running. Since the
978 // `Vec` contained zero-sized objects, it did not allocate, so we
979 // are not leaking memory here.
980 let mut pv = PartialVecZeroSized::<T,U> {
985 unsafe { mem::forget(vec); }
987 while pv.num_t != 0 {
989 // Create a `T` out of thin air and decrement `num_t`. This
990 // must not panic between these steps, as otherwise a
991 // destructor of `T` which doesn't exist runs.
992 let t = mem::uninitialized();
995 // The function given by the user might panic.
998 // Forget the `U` and increment `num_u`. This increment
999 // cannot overflow the `usize` as we only do this for a
1000 // number of times that fits into a `usize` (and start with
1001 // `0`). Again, we should not panic between these steps.
1006 // Create a `Vec` from our `PartialVecZeroSized` and make sure the
1007 // destructor of the latter will not run. None of this can panic.
1008 let mut result = Vec::new();
1010 result.set_len(pv.num_u);
1017 /// Splits the collection into two at the given index.
1019 /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`,
1020 /// and the returned `Self` contains elements `[at, len)`.
1022 /// Note that the capacity of `self` does not change.
1026 /// Panics if `at > len`.
1031 /// # #![feature(collections)]
1032 /// let mut vec = vec![1,2,3];
1033 /// let vec2 = vec.split_off(1);
1034 /// assert_eq!(vec, [1]);
1035 /// assert_eq!(vec2, [2, 3]);
1038 #[unstable(feature = "collections",
1039 reason = "new API, waiting for dust to settle")]
1040 pub fn split_off(&mut self, at: usize) -> Self {
1041 assert!(at <= self.len(), "`at` out of bounds");
1043 let other_len = self.len - at;
1044 let mut other = Vec::with_capacity(other_len);
1046 // Unsafely `set_len` and copy items to `other`.
1049 other.set_len(other_len);
1051 ptr::copy_nonoverlapping(
1053 self.as_ptr().offset(at as isize),
1061 impl<T: Clone> Vec<T> {
1062 /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
1064 /// Calls either `extend()` or `truncate()` depending on whether `new_len`
1065 /// is larger than the current value of `len()` or not.
1070 /// # #![feature(collections)]
1071 /// let mut vec = vec!["hello"];
1072 /// vec.resize(3, "world");
1073 /// assert_eq!(vec, ["hello", "world", "world"]);
1075 /// let mut vec = vec![1, 2, 3, 4];
1076 /// vec.resize(2, 0);
1077 /// assert_eq!(vec, [1, 2]);
1079 #[unstable(feature = "collections",
1080 reason = "matches collection reform specification; waiting for dust to settle")]
1081 pub fn resize(&mut self, new_len: usize, value: T) {
1082 let len = self.len();
1085 self.extend(repeat(value).take(new_len - len));
1087 self.truncate(new_len);
1091 /// Appends all elements in a slice to the `Vec`.
1093 /// Iterates over the slice `other`, clones each element, and then appends
1094 /// it to this `Vec`. The `other` vector is traversed in-order.
1099 /// # #![feature(collections)]
1100 /// let mut vec = vec![1];
1101 /// vec.push_all(&[2, 3, 4]);
1102 /// assert_eq!(vec, [1, 2, 3, 4]);
1105 #[unstable(feature = "collections",
1106 reason = "likely to be replaced by a more optimized extend")]
1107 pub fn push_all(&mut self, other: &[T]) {
1108 self.reserve(other.len());
1110 for i in 0..other.len() {
1111 let len = self.len();
1113 // Unsafe code so this can be optimised to a memcpy (or something similarly
1114 // fast) when T is Copy. LLVM is easily confused, so any extra operations
1115 // during the loop can prevent this optimisation.
1118 self.get_unchecked_mut(len),
1119 other.get_unchecked(i).clone());
1120 self.set_len(len + 1);
1126 impl<T: PartialEq> Vec<T> {
1127 /// Removes consecutive repeated elements in the vector.
1129 /// If the vector is sorted, this removes all duplicates.
1134 /// let mut vec = vec![1, 2, 2, 3, 2];
1138 /// assert_eq!(vec, [1, 2, 3, 2]);
1140 #[stable(feature = "rust1", since = "1.0.0")]
1141 pub fn dedup(&mut self) {
1143 // Although we have a mutable reference to `self`, we cannot make
1144 // *arbitrary* changes. The `PartialEq` comparisons could panic, so we
1145 // must ensure that the vector is in a valid state at all time.
1147 // The way that we handle this is by using swaps; we iterate
1148 // over all the elements, swapping as we go so that at the end
1149 // the elements we wish to keep are in the front, and those we
1150 // wish to reject are at the back. We can then truncate the
1151 // vector. This operation is still O(n).
1153 // Example: We start in this state, where `r` represents "next
1154 // read" and `w` represents "next_write`.
1157 // +---+---+---+---+---+---+
1158 // | 0 | 1 | 1 | 2 | 3 | 3 |
1159 // +---+---+---+---+---+---+
1162 // Comparing self[r] against self[w-1], this is not a duplicate, so
1163 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1164 // r and w, leaving us with:
1167 // +---+---+---+---+---+---+
1168 // | 0 | 1 | 1 | 2 | 3 | 3 |
1169 // +---+---+---+---+---+---+
1172 // Comparing self[r] against self[w-1], this value is a duplicate,
1173 // so we increment `r` but leave everything else unchanged:
1176 // +---+---+---+---+---+---+
1177 // | 0 | 1 | 1 | 2 | 3 | 3 |
1178 // +---+---+---+---+---+---+
1181 // Comparing self[r] against self[w-1], this is not a duplicate,
1182 // so swap self[r] and self[w] and advance r and w:
1185 // +---+---+---+---+---+---+
1186 // | 0 | 1 | 2 | 1 | 3 | 3 |
1187 // +---+---+---+---+---+---+
1190 // Not a duplicate, repeat:
1193 // +---+---+---+---+---+---+
1194 // | 0 | 1 | 2 | 3 | 1 | 3 |
1195 // +---+---+---+---+---+---+
1198 // Duplicate, advance r. End of vec. Truncate to w.
1200 let ln = self.len();
1201 if ln < 1 { return; }
1203 // Avoid bounds checks by using unsafe pointers.
1204 let p = self.as_mut_ptr();
1205 let mut r: usize = 1;
1206 let mut w: usize = 1;
1209 let p_r = p.offset(r as isize);
1210 let p_wm1 = p.offset((w - 1) as isize);
1213 let p_w = p_wm1.offset(1);
1214 mem::swap(&mut *p_r, &mut *p_w);
1226 ////////////////////////////////////////////////////////////////////////////////
1227 // Internal methods and functions
1228 ////////////////////////////////////////////////////////////////////////////////
1231 /// Reserves capacity for exactly `capacity` elements in the given vector.
1233 /// If the capacity for `self` is already equal to or greater than the
1234 /// requested capacity, then no action is taken.
1235 fn grow_capacity(&mut self, capacity: usize) {
1236 if mem::size_of::<T>() == 0 { return }
1238 if capacity > self.cap {
1239 let size = capacity.checked_mul(mem::size_of::<T>())
1240 .expect("capacity overflow");
1242 let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
1243 if ptr.is_null() { ::alloc::oom() }
1244 self.ptr = Unique::new(ptr);
1246 self.cap = capacity;
1251 // FIXME: #13996: need a way to mark the return value as `noalias`
1253 unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
1255 allocate(size, mem::min_align_of::<T>()) as *mut T
1257 reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
1262 unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
1263 if mem::size_of::<T>() != 0 {
1264 deallocate(ptr as *mut u8,
1265 len * mem::size_of::<T>(),
1266 mem::min_align_of::<T>())
1271 #[stable(feature = "rust1", since = "1.0.0")]
1272 pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
1274 let mut v = Vec::with_capacity(n);
1275 let mut ptr = v.as_mut_ptr();
1277 // Write all elements except the last one
1279 ptr::write(ptr, Clone::clone(&elem));
1280 ptr = ptr.offset(1);
1281 v.set_len(i); // Increment the length in every step in case Clone::clone() panics
1285 // We can write the last element directly without cloning needlessly
1286 ptr::write(ptr, elem);
1294 ////////////////////////////////////////////////////////////////////////////////
1295 // Common trait implementations for Vec
1296 ////////////////////////////////////////////////////////////////////////////////
1298 #[unstable(feature = "collections")]
1299 impl<T:Clone> Clone for Vec<T> {
1301 fn clone(&self) -> Vec<T> { <[T]>::to_vec(&**self) }
1303 // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
1304 // required for this method definition, is not available. Instead use the
1305 // `slice::to_vec` function which is only available with cfg(test)
1306 // NB see the slice::hack module in slice.rs for more information
1308 fn clone(&self) -> Vec<T> {
1309 ::slice::to_vec(&**self)
1312 fn clone_from(&mut self, other: &Vec<T>) {
1313 // drop anything in self that will not be overwritten
1314 if self.len() > other.len() {
1315 self.truncate(other.len())
1318 // reuse the contained values' allocations/resources.
1319 for (place, thing) in self.iter_mut().zip(other.iter()) {
1320 place.clone_from(thing)
1323 // self.len <= other.len due to the truncate above, so the
1324 // slice here is always in-bounds.
1325 let slice = &other[self.len()..];
1326 self.push_all(slice);
1330 #[stable(feature = "rust1", since = "1.0.0")]
1331 impl<T: Hash> Hash for Vec<T> {
1333 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1334 Hash::hash(&**self, state)
1338 #[stable(feature = "rust1", since = "1.0.0")]
1339 impl<T> Index<usize> for Vec<T> {
1343 fn index(&self, index: usize) -> &T {
1344 // NB built-in indexing via `&[T]`
1349 #[stable(feature = "rust1", since = "1.0.0")]
1350 impl<T> IndexMut<usize> for Vec<T> {
1352 fn index_mut(&mut self, index: usize) -> &mut T {
1353 // NB built-in indexing via `&mut [T]`
1354 &mut (**self)[index]
1359 #[stable(feature = "rust1", since = "1.0.0")]
1360 impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
1364 fn index(&self, index: ops::Range<usize>) -> &[T] {
1365 Index::index(&**self, index)
1368 #[stable(feature = "rust1", since = "1.0.0")]
1369 impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
1373 fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
1374 Index::index(&**self, index)
1377 #[stable(feature = "rust1", since = "1.0.0")]
1378 impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
1382 fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
1383 Index::index(&**self, index)
1386 #[stable(feature = "rust1", since = "1.0.0")]
1387 impl<T> ops::Index<ops::RangeFull> for Vec<T> {
1391 fn index(&self, _index: ops::RangeFull) -> &[T] {
1396 #[stable(feature = "rust1", since = "1.0.0")]
1397 impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
1400 fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
1401 IndexMut::index_mut(&mut **self, index)
1404 #[stable(feature = "rust1", since = "1.0.0")]
1405 impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
1408 fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
1409 IndexMut::index_mut(&mut **self, index)
1412 #[stable(feature = "rust1", since = "1.0.0")]
1413 impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
1416 fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
1417 IndexMut::index_mut(&mut **self, index)
1420 #[stable(feature = "rust1", since = "1.0.0")]
1421 impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
1424 fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [T] {
1429 #[stable(feature = "rust1", since = "1.0.0")]
1430 impl<T> ops::Deref for Vec<T> {
1433 fn deref(&self) -> &[T] {
1436 assume(p != 0 as *mut T);
1437 slice::from_raw_parts(p, self.len)
1442 #[stable(feature = "rust1", since = "1.0.0")]
1443 impl<T> ops::DerefMut for Vec<T> {
1444 fn deref_mut(&mut self) -> &mut [T] {
1446 let ptr = *self.ptr;
1447 assume(!ptr.is_null());
1448 slice::from_raw_parts_mut(ptr, self.len)
1453 #[stable(feature = "rust1", since = "1.0.0")]
1454 impl<T> FromIterator<T> for Vec<T> {
1456 fn from_iter<I: IntoIterator<Item=T>>(iterable: I) -> Vec<T> {
1457 let mut iterator = iterable.into_iter();
1458 let (lower, _) = iterator.size_hint();
1459 let mut vector = Vec::with_capacity(lower);
1461 // This function should be the moral equivalent of:
1463 // for item in iterator {
1464 // vector.push(item);
1467 // This equivalent crucially runs the iterator precisely once. Below we
1468 // actually in theory run the iterator twice (one without bounds checks
1469 // and one with). To achieve the "moral equivalent", we use the `if`
1470 // statement below to break out early.
1472 // If the first loop has terminated, then we have one of two conditions.
1474 // 1. The underlying iterator returned `None`. In this case we are
1475 // guaranteed that less than `vector.capacity()` elements have been
1476 // returned, so we break out early.
1477 // 2. The underlying iterator yielded `vector.capacity()` elements and
1478 // has not yielded `None` yet. In this case we run the iterator to
1480 for element in iterator.by_ref().take(vector.capacity()) {
1481 let len = vector.len();
1483 ptr::write(vector.get_unchecked_mut(len), element);
1484 vector.set_len(len + 1);
1488 if vector.len() == vector.capacity() {
1489 for element in iterator {
1490 vector.push(element);
1497 #[stable(feature = "rust1", since = "1.0.0")]
1498 impl<T> IntoIterator for Vec<T> {
1500 type IntoIter = IntoIter<T>;
1502 fn into_iter(self) -> IntoIter<T> {
1507 #[stable(feature = "rust1", since = "1.0.0")]
1508 impl<'a, T> IntoIterator for &'a Vec<T> {
1510 type IntoIter = slice::Iter<'a, T>;
1512 fn into_iter(self) -> slice::Iter<'a, T> {
1517 #[stable(feature = "rust1", since = "1.0.0")]
1518 impl<'a, T> IntoIterator for &'a mut Vec<T> {
1519 type Item = &'a mut T;
1520 type IntoIter = slice::IterMut<'a, T>;
1522 fn into_iter(mut self) -> slice::IterMut<'a, T> {
1527 #[unstable(feature = "collections", reason = "waiting on Extend stability")]
1528 impl<T> Extend<T> for Vec<T> {
1530 fn extend<I: IntoIterator<Item=T>>(&mut self, iterable: I) {
1531 let iterator = iterable.into_iter();
1532 let (lower, _) = iterator.size_hint();
1533 self.reserve(lower);
1534 for element in iterator {
1540 __impl_slice_eq1! { Vec<A>, Vec<B> }
1541 __impl_slice_eq2! { Vec<A>, &'b [B] }
1542 __impl_slice_eq2! { Vec<A>, &'b mut [B] }
1543 __impl_slice_eq2! { Cow<'a, [A]>, &'b [B], Clone }
1544 __impl_slice_eq2! { Cow<'a, [A]>, &'b mut [B], Clone }
1545 __impl_slice_eq2! { Cow<'a, [A]>, Vec<B>, Clone }
1547 macro_rules! array_impls {
1550 // NOTE: some less important impls are omitted to reduce code bloat
1551 __impl_slice_eq2! { Vec<A>, [B; $N] }
1552 __impl_slice_eq2! { Vec<A>, &'b [B; $N] }
1553 // __impl_slice_eq2! { Vec<A>, &'b mut [B; $N] }
1554 // __impl_slice_eq2! { Cow<'a, [A]>, [B; $N], Clone }
1555 // __impl_slice_eq2! { Cow<'a, [A]>, &'b [B; $N], Clone }
1556 // __impl_slice_eq2! { Cow<'a, [A]>, &'b mut [B; $N], Clone }
1563 10 11 12 13 14 15 16 17 18 19
1564 20 21 22 23 24 25 26 27 28 29
1568 #[stable(feature = "rust1", since = "1.0.0")]
1569 impl<T: PartialOrd> PartialOrd for Vec<T> {
1571 fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
1572 PartialOrd::partial_cmp(&**self, &**other)
1576 #[stable(feature = "rust1", since = "1.0.0")]
1577 impl<T: Eq> Eq for Vec<T> {}
1579 #[stable(feature = "rust1", since = "1.0.0")]
1580 impl<T: Ord> Ord for Vec<T> {
1582 fn cmp(&self, other: &Vec<T>) -> Ordering {
1583 Ord::cmp(&**self, &**other)
1587 #[unstable(feature = "collections",
1588 reason = "will be replaced by slice syntax")]
1589 #[deprecated(since = "1.0.0", reason = "use &mut s[..] instead")]
1590 #[allow(deprecated)]
1591 impl<T> AsSlice<T> for Vec<T> {
1592 /// Deprecated: use `&mut s[..]` instead.
1594 fn as_slice(&self) -> &[T] {
1599 #[unstable(feature = "collections",
1600 reason = "recent addition, needs more experience")]
1601 impl<'a, T: Clone> Add<&'a [T]> for Vec<T> {
1602 type Output = Vec<T>;
1605 fn add(mut self, rhs: &[T]) -> Vec<T> {
1611 #[unsafe_destructor]
1612 #[stable(feature = "rust1", since = "1.0.0")]
1613 impl<T> Drop for Vec<T> {
1614 fn drop(&mut self) {
1615 // This is (and should always remain) a no-op if the fields are
1616 // zeroed (when moving out, because of #[unsafe_no_drop_flag]).
1617 if self.cap != 0 && self.cap != mem::POST_DROP_USIZE {
1622 dealloc(*self.ptr, self.cap)
1628 #[stable(feature = "rust1", since = "1.0.0")]
1629 impl<T> Default for Vec<T> {
1630 #[stable(feature = "rust1", since = "1.0.0")]
1631 fn default() -> Vec<T> {
1636 #[stable(feature = "rust1", since = "1.0.0")]
1637 impl<T: fmt::Debug> fmt::Debug for Vec<T> {
1638 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1639 fmt::Debug::fmt(&**self, f)
1643 #[stable(feature = "rust1", since = "1.0.0")]
1644 impl<T> AsRef<Vec<T>> for Vec<T> {
1645 fn as_ref(&self) -> &Vec<T> {
1650 #[stable(feature = "rust1", since = "1.0.0")]
1651 impl<T> AsRef<[T]> for Vec<T> {
1652 fn as_ref(&self) -> &[T] {
1657 #[stable(feature = "rust1", since = "1.0.0")]
1658 impl<'a, T: Clone> From<&'a [T]> for Vec<T> {
1660 fn from(s: &'a [T]) -> Vec<T> {
1664 fn from(s: &'a [T]) -> Vec<T> {
1669 #[stable(feature = "rust1", since = "1.0.0")]
1670 impl<'a> From<&'a str> for Vec<u8> {
1671 fn from(s: &'a str) -> Vec<u8> {
1672 From::from(s.as_bytes())
1676 ////////////////////////////////////////////////////////////////////////////////
1678 ////////////////////////////////////////////////////////////////////////////////
1680 /// A clone-on-write vector
1681 #[deprecated(since = "1.0.0", reason = "use Cow<'a, [T]> instead")]
1682 #[unstable(feature = "collections")]
1683 pub type CowVec<'a, T> = Cow<'a, [T]>;
1685 #[unstable(feature = "collections")]
1686 impl<'a, T> FromIterator<T> for Cow<'a, [T]> where T: Clone {
1687 fn from_iter<I: IntoIterator<Item=T>>(it: I) -> Cow<'a, [T]> {
1688 Cow::Owned(FromIterator::from_iter(it))
1692 impl<'a, T: 'a> IntoCow<'a, [T]> for Vec<T> where T: Clone {
1693 fn into_cow(self) -> Cow<'a, [T]> {
1698 impl<'a, T> IntoCow<'a, [T]> for &'a [T] where T: Clone {
1699 fn into_cow(self) -> Cow<'a, [T]> {
1704 ////////////////////////////////////////////////////////////////////////////////
1706 ////////////////////////////////////////////////////////////////////////////////
1708 /// An iterator that moves out of a vector.
1709 #[stable(feature = "rust1", since = "1.0.0")]
1710 pub struct IntoIter<T> {
1711 allocation: *mut T, // the block of memory allocated for the vector
1712 cap: usize, // the capacity of the vector
1717 unsafe impl<T: Send> Send for IntoIter<T> { }
1718 unsafe impl<T: Sync> Sync for IntoIter<T> { }
1720 impl<T> IntoIter<T> {
1722 /// Drops all items that have not yet been moved and returns the empty vector.
1723 #[unstable(feature = "collections")]
1724 pub fn into_inner(mut self) -> Vec<T> {
1726 for _x in self.by_ref() { }
1727 let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
1729 Vec::from_raw_parts(allocation, 0, cap)
1734 #[stable(feature = "rust1", since = "1.0.0")]
1735 impl<T> Iterator for IntoIter<T> {
1739 fn next(&mut self) -> Option<T> {
1741 if self.ptr == self.end {
1744 if mem::size_of::<T>() == 0 {
1745 // purposefully don't use 'ptr.offset' because for
1746 // vectors with 0-size elements this would return the
1748 self.ptr = mem::transmute(self.ptr as usize + 1);
1750 // Use a non-null pointer value
1751 Some(ptr::read(EMPTY as *mut T))
1754 self.ptr = self.ptr.offset(1);
1756 Some(ptr::read(old))
1763 fn size_hint(&self) -> (usize, Option<usize>) {
1764 let diff = (self.end as usize) - (self.ptr as usize);
1765 let size = mem::size_of::<T>();
1766 let exact = diff / (if size == 0 {1} else {size});
1767 (exact, Some(exact))
1771 #[stable(feature = "rust1", since = "1.0.0")]
1772 impl<T> DoubleEndedIterator for IntoIter<T> {
1774 fn next_back(&mut self) -> Option<T> {
1776 if self.end == self.ptr {
1779 if mem::size_of::<T>() == 0 {
1780 // See above for why 'ptr.offset' isn't used
1781 self.end = mem::transmute(self.end as usize - 1);
1783 // Use a non-null pointer value
1784 Some(ptr::read(EMPTY as *mut T))
1786 self.end = self.end.offset(-1);
1788 Some(ptr::read(mem::transmute(self.end)))
1795 #[stable(feature = "rust1", since = "1.0.0")]
1796 impl<T> ExactSizeIterator for IntoIter<T> {}
1798 #[unsafe_destructor]
1799 #[stable(feature = "rust1", since = "1.0.0")]
1800 impl<T> Drop for IntoIter<T> {
1801 fn drop(&mut self) {
1802 // destroy the remaining elements
1804 for _x in self.by_ref() {}
1806 dealloc(self.allocation, self.cap);
1812 /// An iterator that drains a vector.
1813 #[unsafe_no_drop_flag]
1814 #[unstable(feature = "collections",
1815 reason = "recently added as part of collections reform 2")]
1816 pub struct Drain<'a, T:'a> {
1819 marker: PhantomData<&'a T>,
1822 unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
1823 unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
1825 #[stable(feature = "rust1", since = "1.0.0")]
1826 impl<'a, T> Iterator for Drain<'a, T> {
1830 fn next(&mut self) -> Option<T> {
1832 if self.ptr == self.end {
1835 if mem::size_of::<T>() == 0 {
1836 // purposefully don't use 'ptr.offset' because for
1837 // vectors with 0-size elements this would return the
1839 self.ptr = mem::transmute(self.ptr as usize + 1);
1841 // Use a non-null pointer value
1842 Some(ptr::read(EMPTY as *mut T))
1845 self.ptr = self.ptr.offset(1);
1847 Some(ptr::read(old))
1854 fn size_hint(&self) -> (usize, Option<usize>) {
1855 let diff = (self.end as usize) - (self.ptr as usize);
1856 let size = mem::size_of::<T>();
1857 let exact = diff / (if size == 0 {1} else {size});
1858 (exact, Some(exact))
1862 #[stable(feature = "rust1", since = "1.0.0")]
1863 impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1865 fn next_back(&mut self) -> Option<T> {
1867 if self.end == self.ptr {
1870 if mem::size_of::<T>() == 0 {
1871 // See above for why 'ptr.offset' isn't used
1872 self.end = mem::transmute(self.end as usize - 1);
1874 // Use a non-null pointer value
1875 Some(ptr::read(EMPTY as *mut T))
1877 self.end = self.end.offset(-1);
1879 Some(ptr::read(self.end))
1886 #[stable(feature = "rust1", since = "1.0.0")]
1887 impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1889 #[unsafe_destructor]
1890 #[stable(feature = "rust1", since = "1.0.0")]
1891 impl<'a, T> Drop for Drain<'a, T> {
1892 fn drop(&mut self) {
1893 // self.ptr == self.end == mem::POST_DROP_USIZE if drop has already been called,
1894 // so we can use #[unsafe_no_drop_flag].
1896 // destroy the remaining elements
1897 for _x in self.by_ref() {}
1901 ////////////////////////////////////////////////////////////////////////////////
1902 // Conversion from &[T] to &Vec<T>
1903 ////////////////////////////////////////////////////////////////////////////////
1905 /// Wrapper type providing a `&Vec<T>` reference via `Deref`.
1906 #[unstable(feature = "collections")]
1907 pub struct DerefVec<'a, T:'a> {
1909 l: PhantomData<&'a T>,
1912 #[unstable(feature = "collections")]
1913 impl<'a, T> Deref for DerefVec<'a, T> {
1914 type Target = Vec<T>;
1916 fn deref<'b>(&'b self) -> &'b Vec<T> {
1921 // Prevent the inner `Vec<T>` from attempting to deallocate memory.
1922 #[unsafe_destructor]
1923 #[stable(feature = "rust1", since = "1.0.0")]
1924 impl<'a, T> Drop for DerefVec<'a, T> {
1925 fn drop(&mut self) {
1931 /// Convert a slice to a wrapper type providing a `&Vec<T>` reference.
1932 #[unstable(feature = "collections")]
1933 pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
1936 x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
1942 ////////////////////////////////////////////////////////////////////////////////
1943 // Partial vec, used for map_in_place
1944 ////////////////////////////////////////////////////////////////////////////////
1946 /// An owned, partially type-converted vector of elements with non-zero size.
1948 /// `T` and `U` must have the same, non-zero size. They must also have the same
1951 /// When the destructor of this struct runs, all `U`s from `start_u` (incl.) to
1952 /// `end_u` (excl.) and all `T`s from `start_t` (incl.) to `end_t` (excl.) are
1953 /// destructed. Additionally the underlying storage of `vec` will be freed.
1954 struct PartialVecNonZeroSized<T,U> {
1962 _marker: PhantomData<U>,
1965 /// An owned, partially type-converted vector of zero-sized elements.
1967 /// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
1969 struct PartialVecZeroSized<T,U> {
1972 marker: PhantomData<::core::cell::Cell<(T,U)>>,
1975 #[unsafe_destructor]
1976 impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
1977 fn drop(&mut self) {
1979 // `vec` hasn't been modified until now. As it has a length
1980 // currently, this would run destructors of `T`s which might not be
1981 // there. So at first, set `vec`s length to `0`. This must be done
1982 // at first to remain memory-safe as the destructors of `U` or `T`
1983 // might cause unwinding where `vec`s destructor would be executed.
1984 self.vec.set_len(0);
1986 // We have instances of `U`s and `T`s in `vec`. Destruct them.
1987 while self.start_u != self.end_u {
1988 let _ = ptr::read(self.start_u); // Run a `U` destructor.
1989 self.start_u = self.start_u.offset(1);
1991 while self.start_t != self.end_t {
1992 let _ = ptr::read(self.start_t); // Run a `T` destructor.
1993 self.start_t = self.start_t.offset(1);
1995 // After this destructor ran, the destructor of `vec` will run,
1996 // deallocating the underlying memory.
2001 #[unsafe_destructor]
2002 impl<T,U> Drop for PartialVecZeroSized<T,U> {
2003 fn drop(&mut self) {
2005 // Destruct the instances of `T` and `U` this struct owns.
2006 while self.num_t != 0 {
2007 let _: T = mem::uninitialized(); // Run a `T` destructor.
2010 while self.num_u != 0 {
2011 let _: U = mem::uninitialized(); // Run a `U` destructor.