1 #[cfg(not(no_global_oom_handling))]
2 use super::AsVecIntoIter;
3 use crate::alloc::{Allocator, Global};
4 #[cfg(not(no_global_oom_handling))]
5 use crate::collections::VecDeque;
6 use crate::raw_vec::RawVec;
10 FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
12 use core::marker::PhantomData;
13 use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
14 #[cfg(not(no_global_oom_handling))]
16 use core::ptr::{self, NonNull};
17 use core::slice::{self};
19 /// An iterator that moves out of a vector.
21 /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
22 /// (provided by the [`IntoIterator`] trait).
27 /// let v = vec![0, 1, 2];
28 /// let iter: std::vec::IntoIter<_> = v.into_iter();
30 #[stable(feature = "rust1", since = "1.0.0")]
31 #[rustc_insignificant_dtor]
34 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
36 pub(super) buf: NonNull<T>,
37 pub(super) phantom: PhantomData<T>,
38 pub(super) cap: usize,
39 // the drop impl reconstructs a RawVec from buf, cap and alloc
40 // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
41 pub(super) alloc: ManuallyDrop<A>,
42 pub(super) ptr: *const T,
43 pub(super) end: *const T,
46 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
47 impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
48 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
49 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
53 impl<T, A: Allocator> IntoIter<T, A> {
54 /// Returns the remaining items of this iterator as a slice.
59 /// let vec = vec!['a', 'b', 'c'];
60 /// let mut into_iter = vec.into_iter();
61 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
62 /// let _ = into_iter.next().unwrap();
63 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
65 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
66 pub fn as_slice(&self) -> &[T] {
67 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
70 /// Returns the remaining items of this iterator as a mutable slice.
75 /// let vec = vec!['a', 'b', 'c'];
76 /// let mut into_iter = vec.into_iter();
77 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
78 /// into_iter.as_mut_slice()[2] = 'z';
79 /// assert_eq!(into_iter.next().unwrap(), 'a');
80 /// assert_eq!(into_iter.next().unwrap(), 'b');
81 /// assert_eq!(into_iter.next().unwrap(), 'z');
83 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
84 pub fn as_mut_slice(&mut self) -> &mut [T] {
85 unsafe { &mut *self.as_raw_mut_slice() }
88 /// Returns a reference to the underlying allocator.
89 #[unstable(feature = "allocator_api", issue = "32838")]
91 pub fn allocator(&self) -> &A {
95 fn as_raw_mut_slice(&mut self) -> *mut [T] {
96 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
99 /// Drops remaining elements and relinquishes the backing allocation.
100 /// This method guarantees it won't panic before relinquishing
101 /// the backing allocation.
103 /// This is roughly equivalent to the following, but more efficient
106 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
107 /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
108 /// (&mut into_iter).for_each(core::mem::drop);
109 /// std::mem::forget(into_iter);
112 /// This method is used by in-place iteration, refer to the vec::in_place_collect
113 /// documentation for an overview.
114 #[cfg(not(no_global_oom_handling))]
115 pub(super) fn forget_allocation_drop_remaining(&mut self) {
116 let remaining = self.as_raw_mut_slice();
118 // overwrite the individual fields instead of creating a new
119 // struct and then overwriting &mut self.
120 // this creates less assembly
122 self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
123 self.ptr = self.buf.as_ptr();
124 self.end = self.buf.as_ptr();
126 // Dropping the remaining elements can panic, so this needs to be
127 // done only after updating the other fields.
129 ptr::drop_in_place(remaining);
133 /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
134 pub(crate) fn forget_remaining_elements(&mut self) {
138 #[cfg(not(no_global_oom_handling))]
140 pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
141 // Keep our `Drop` impl from dropping the elements and the allocator
142 let mut this = ManuallyDrop::new(self);
144 // SAFETY: This allocation originally came from a `Vec`, so it passes
145 // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
146 // so the `sub_ptr`s below cannot wrap, and will produce a well-formed
147 // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
148 // Taking `alloc` is ok because nothing else is going to look at it,
149 // since our `Drop` impl isn't going to run so there's no more code.
151 let buf = this.buf.as_ptr();
152 let initialized = if T::IS_ZST {
153 // All the pointers are the same for ZSTs, so it's fine to
154 // say that they're all at the beginning of the "allocation".
157 this.ptr.sub_ptr(buf)..this.end.sub_ptr(buf)
160 let alloc = ManuallyDrop::take(&mut this.alloc);
161 VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
166 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
167 impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
168 fn as_ref(&self) -> &[T] {
173 #[stable(feature = "rust1", since = "1.0.0")]
174 unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
175 #[stable(feature = "rust1", since = "1.0.0")]
176 unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
178 #[stable(feature = "rust1", since = "1.0.0")]
179 impl<T, A: Allocator> Iterator for IntoIter<T, A> {
183 fn next(&mut self) -> Option<T> {
184 if self.ptr == self.end {
186 } else if T::IS_ZST {
187 // purposefully don't use 'ptr.offset' because for
188 // vectors with 0-size elements this would return the
190 self.ptr = self.ptr.wrapping_byte_add(1);
192 // Make up a value of this ZST.
193 Some(unsafe { mem::zeroed() })
196 self.ptr = unsafe { self.ptr.add(1) };
198 Some(unsafe { ptr::read(old) })
203 fn size_hint(&self) -> (usize, Option<usize>) {
204 let exact = if T::IS_ZST {
205 self.end.addr().wrapping_sub(self.ptr.addr())
207 unsafe { self.end.sub_ptr(self.ptr) }
213 fn advance_by(&mut self, n: usize) -> Result<(), usize> {
214 let step_size = self.len().min(n);
215 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
217 // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
218 // effectively results in unsigned pointers representing positions 0..usize::MAX,
219 // which is valid for ZSTs.
220 self.ptr = self.ptr.wrapping_byte_add(step_size);
222 // SAFETY: the min() above ensures that step_size is in bounds
223 self.ptr = unsafe { self.ptr.add(step_size) };
225 // SAFETY: the min() above ensures that step_size is in bounds
227 ptr::drop_in_place(to_drop);
230 return Err(step_size);
236 fn count(self) -> usize {
241 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
242 let mut raw_ary = MaybeUninit::uninit_array();
244 let len = self.len();
248 self.forget_remaining_elements();
249 // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
250 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
253 self.ptr = self.ptr.wrapping_byte_add(N);
255 return Ok(unsafe { raw_ary.transpose().assume_init() });
259 // Safety: `len` indicates that this many elements are available and we just checked that
260 // it fits into the array.
262 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len);
263 self.forget_remaining_elements();
264 return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
268 // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
271 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N);
272 self.ptr = self.ptr.add(N);
273 Ok(raw_ary.transpose().assume_init())
277 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
279 Self: TrustedRandomAccessNoCoerce,
281 // SAFETY: the caller must guarantee that `i` is in bounds of the
282 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
283 // is guaranteed to pointer to an element of the `Vec<T>` and
284 // thus guaranteed to be valid to dereference.
286 // Also note the implementation of `Self: TrustedRandomAccess` requires
287 // that `T: Copy` so reading elements from the buffer doesn't invalidate
290 if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
295 #[stable(feature = "rust1", since = "1.0.0")]
296 impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
298 fn next_back(&mut self) -> Option<T> {
299 if self.end == self.ptr {
301 } else if T::IS_ZST {
302 // See above for why 'ptr.offset' isn't used
303 self.end = self.end.wrapping_byte_sub(1);
305 // Make up a value of this ZST.
306 Some(unsafe { mem::zeroed() })
308 self.end = unsafe { self.end.sub(1) };
310 Some(unsafe { ptr::read(self.end) })
315 fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
316 let step_size = self.len().min(n);
318 // SAFETY: same as for advance_by()
319 self.end = self.end.wrapping_byte_sub(step_size);
321 // SAFETY: same as for advance_by()
322 self.end = unsafe { self.end.sub(step_size) };
324 let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
325 // SAFETY: same as for advance_by()
327 ptr::drop_in_place(to_drop);
330 return Err(step_size);
336 #[stable(feature = "rust1", since = "1.0.0")]
337 impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
338 fn is_empty(&self) -> bool {
343 #[stable(feature = "fused", since = "1.26.0")]
344 impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
346 #[unstable(feature = "trusted_len", issue = "37572")]
347 unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
350 #[unstable(issue = "none", feature = "std_internals")]
351 #[rustc_unsafe_specialization_marker]
354 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
355 // and thus we can't implement drop-handling
356 #[unstable(issue = "none", feature = "std_internals")]
357 impl<T: Copy> NonDrop for T {}
360 #[unstable(issue = "none", feature = "std_internals")]
361 // TrustedRandomAccess (without NoCoerce) must not be implemented because
362 // subtypes/supertypes of `T` might not be `NonDrop`
363 unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
367 const MAY_HAVE_SIDE_EFFECT: bool = false;
370 #[cfg(not(no_global_oom_handling))]
371 #[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
372 impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
374 fn clone(&self) -> Self {
375 self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
378 fn clone(&self) -> Self {
379 crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter()
383 #[stable(feature = "rust1", since = "1.0.0")]
384 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
386 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
388 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
391 // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
392 let alloc = ManuallyDrop::take(&mut self.0.alloc);
393 // RawVec handles deallocation
394 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
399 let guard = DropGuard(self);
400 // destroy the remaining elements
402 ptr::drop_in_place(guard.0.as_raw_mut_slice());
404 // now `guard` will be dropped and do the rest
408 // In addition to the SAFETY invariants of the following three unsafe traits
409 // also refer to the vec::in_place_collect module documentation to get an overview
410 #[unstable(issue = "none", feature = "inplace_iteration")]
412 unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
414 #[unstable(issue = "none", feature = "inplace_iteration")]
416 unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
420 unsafe fn as_inner(&mut self) -> &mut Self::Source {
425 #[cfg(not(no_global_oom_handling))]
426 unsafe impl<T> AsVecIntoIter for IntoIter<T> {
429 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {