1 #[cfg(not(no_global_oom_handling))]
2 use super::AsVecIntoIter;
3 use crate::alloc::{Allocator, Global};
4 use crate::raw_vec::RawVec;
8 FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
10 use core::marker::PhantomData;
11 use core::mem::{self, ManuallyDrop, MaybeUninit};
12 #[cfg(not(no_global_oom_handling))]
14 use core::ptr::{self, NonNull};
15 use core::slice::{self};
17 /// An iterator that moves out of a vector.
19 /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
20 /// (provided by the [`IntoIterator`] trait).
25 /// let v = vec![0, 1, 2];
26 /// let iter: std::vec::IntoIter<_> = v.into_iter();
28 #[stable(feature = "rust1", since = "1.0.0")]
29 #[rustc_insignificant_dtor]
32 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
34 pub(super) buf: NonNull<T>,
35 pub(super) phantom: PhantomData<T>,
36 pub(super) cap: usize,
37 // the drop impl reconstructs a RawVec from buf, cap and alloc
38 // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
39 pub(super) alloc: ManuallyDrop<A>,
40 pub(super) ptr: *const T,
41 pub(super) end: *const T,
44 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
45 impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
46 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
47 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
51 impl<T, A: Allocator> IntoIter<T, A> {
52 /// Returns the remaining items of this iterator as a slice.
57 /// let vec = vec!['a', 'b', 'c'];
58 /// let mut into_iter = vec.into_iter();
59 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
60 /// let _ = into_iter.next().unwrap();
61 /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
63 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
64 pub fn as_slice(&self) -> &[T] {
65 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
68 /// Returns the remaining items of this iterator as a mutable slice.
73 /// let vec = vec!['a', 'b', 'c'];
74 /// let mut into_iter = vec.into_iter();
75 /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
76 /// into_iter.as_mut_slice()[2] = 'z';
77 /// assert_eq!(into_iter.next().unwrap(), 'a');
78 /// assert_eq!(into_iter.next().unwrap(), 'b');
79 /// assert_eq!(into_iter.next().unwrap(), 'z');
81 #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
82 pub fn as_mut_slice(&mut self) -> &mut [T] {
83 unsafe { &mut *self.as_raw_mut_slice() }
86 /// Returns a reference to the underlying allocator.
87 #[unstable(feature = "allocator_api", issue = "32838")]
89 pub fn allocator(&self) -> &A {
93 fn as_raw_mut_slice(&mut self) -> *mut [T] {
94 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
97 /// Drops remaining elements and relinquishes the backing allocation.
99 /// This is roughly equivalent to the following, but more efficient
102 /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
103 /// (&mut into_iter).for_each(core::mem::drop);
104 /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
107 /// This method is used by in-place iteration, refer to the vec::in_place_collect
108 /// documentation for an overview.
109 #[cfg(not(no_global_oom_handling))]
110 pub(super) fn forget_allocation_drop_remaining(&mut self) {
111 let remaining = self.as_raw_mut_slice();
113 // overwrite the individual fields instead of creating a new
114 // struct and then overwriting &mut self.
115 // this creates less assembly
117 self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
118 self.ptr = self.buf.as_ptr();
119 self.end = self.buf.as_ptr();
122 ptr::drop_in_place(remaining);
126 /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
127 pub(crate) fn forget_remaining_elements(&mut self) {
132 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
133 impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
134 fn as_ref(&self) -> &[T] {
139 #[stable(feature = "rust1", since = "1.0.0")]
140 unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
141 #[stable(feature = "rust1", since = "1.0.0")]
142 unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
144 #[stable(feature = "rust1", since = "1.0.0")]
145 impl<T, A: Allocator> Iterator for IntoIter<T, A> {
149 fn next(&mut self) -> Option<T> {
150 if self.ptr == self.end {
152 } else if mem::size_of::<T>() == 0 {
153 // purposefully don't use 'ptr.offset' because for
154 // vectors with 0-size elements this would return the
156 self.ptr = self.ptr.wrapping_byte_add(1);
158 // Make up a value of this ZST.
159 Some(unsafe { mem::zeroed() })
162 self.ptr = unsafe { self.ptr.add(1) };
164 Some(unsafe { ptr::read(old) })
169 fn size_hint(&self) -> (usize, Option<usize>) {
170 let exact = if mem::size_of::<T>() == 0 {
171 self.end.addr().wrapping_sub(self.ptr.addr())
173 unsafe { self.end.sub_ptr(self.ptr) }
179 fn advance_by(&mut self, n: usize) -> Result<(), usize> {
180 let step_size = self.len().min(n);
181 let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
182 if mem::size_of::<T>() == 0 {
183 // SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
184 // effectively results in unsigned pointers representing positions 0..usize::MAX,
185 // which is valid for ZSTs.
186 self.ptr = self.ptr.wrapping_byte_add(step_size);
188 // SAFETY: the min() above ensures that step_size is in bounds
189 self.ptr = unsafe { self.ptr.add(step_size) };
191 // SAFETY: the min() above ensures that step_size is in bounds
193 ptr::drop_in_place(to_drop);
196 return Err(step_size);
202 fn count(self) -> usize {
207 fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
208 let mut raw_ary = MaybeUninit::uninit_array();
210 let len = self.len();
212 if mem::size_of::<T>() == 0 {
214 self.forget_remaining_elements();
215 // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
216 return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
219 self.ptr = self.ptr.wrapping_byte_add(N);
221 return Ok(unsafe { MaybeUninit::array_assume_init(raw_ary) });
225 // Safety: `len` indicates that this many elements are available and we just checked that
226 // it fits into the array.
228 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len);
229 self.forget_remaining_elements();
230 return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
234 // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
237 ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N);
238 self.ptr = self.ptr.add(N);
239 Ok(MaybeUninit::array_assume_init(raw_ary))
243 unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
245 Self: TrustedRandomAccessNoCoerce,
247 // SAFETY: the caller must guarantee that `i` is in bounds of the
248 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
249 // is guaranteed to pointer to an element of the `Vec<T>` and
250 // thus guaranteed to be valid to dereference.
252 // Also note the implementation of `Self: TrustedRandomAccess` requires
253 // that `T: Copy` so reading elements from the buffer doesn't invalidate
256 if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
261 #[stable(feature = "rust1", since = "1.0.0")]
262 impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
264 fn next_back(&mut self) -> Option<T> {
265 if self.end == self.ptr {
267 } else if mem::size_of::<T>() == 0 {
268 // See above for why 'ptr.offset' isn't used
269 self.end = self.end.wrapping_byte_sub(1);
271 // Make up a value of this ZST.
272 Some(unsafe { mem::zeroed() })
274 self.end = unsafe { self.end.sub(1) };
276 Some(unsafe { ptr::read(self.end) })
281 fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
282 let step_size = self.len().min(n);
283 if mem::size_of::<T>() == 0 {
284 // SAFETY: same as for advance_by()
285 self.end = self.end.wrapping_byte_sub(step_size);
287 // SAFETY: same as for advance_by()
288 self.end = unsafe { self.end.sub(step_size) };
290 let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
291 // SAFETY: same as for advance_by()
293 ptr::drop_in_place(to_drop);
296 return Err(step_size);
302 #[stable(feature = "rust1", since = "1.0.0")]
303 impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
304 fn is_empty(&self) -> bool {
309 #[stable(feature = "fused", since = "1.26.0")]
310 impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
312 #[unstable(feature = "trusted_len", issue = "37572")]
313 unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
316 #[unstable(issue = "none", feature = "std_internals")]
317 #[rustc_unsafe_specialization_marker]
320 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
321 // and thus we can't implement drop-handling
322 #[unstable(issue = "none", feature = "std_internals")]
323 impl<T: Copy> NonDrop for T {}
326 #[unstable(issue = "none", feature = "std_internals")]
327 // TrustedRandomAccess (without NoCoerce) must not be implemented because
328 // subtypes/supertypes of `T` might not be `NonDrop`
329 unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
333 const MAY_HAVE_SIDE_EFFECT: bool = false;
336 #[cfg(not(no_global_oom_handling))]
337 #[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
338 impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
340 fn clone(&self) -> Self {
341 self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
344 fn clone(&self) -> Self {
345 crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter()
349 #[stable(feature = "rust1", since = "1.0.0")]
350 unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
352 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
354 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
357 // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
358 let alloc = ManuallyDrop::take(&mut self.0.alloc);
359 // RawVec handles deallocation
360 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
365 let guard = DropGuard(self);
366 // destroy the remaining elements
368 ptr::drop_in_place(guard.0.as_raw_mut_slice());
370 // now `guard` will be dropped and do the rest
374 // In addition to the SAFETY invariants of the following three unsafe traits
375 // also refer to the vec::in_place_collect module documentation to get an overview
376 #[unstable(issue = "none", feature = "inplace_iteration")]
378 unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
380 #[unstable(issue = "none", feature = "inplace_iteration")]
382 unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
386 unsafe fn as_inner(&mut self) -> &mut Self::Source {
391 #[cfg(not(no_global_oom_handling))]
392 unsafe impl<T> AsVecIntoIter for IntoIter<T> {
395 fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {