1 //! Defines the `IntoIter` owned iterator for arrays.
5 iter::{ExactSizeIterator, FusedIterator, TrustedLen},
6 mem::{self, MaybeUninit},
10 use super::LengthAtMost32;
13 /// A by-value [array] iterator.
15 /// [array]: ../../std/primitive.array.html
16 #[unstable(feature = "array_value_iter", issue = "65798")]
17 pub struct IntoIter<T, const N: usize>
19 [T; N]: LengthAtMost32,
21 /// This is the array we are iterating over.
23 /// Elements with index `i` where `alive.start <= i < alive.end` have not
24 /// been yielded yet and are valid array entries. Elements with indices `i
25 /// < alive.start` or `i >= alive.end` have been yielded already and must
26 /// not be accessed anymore! Those dead elements might even be in a
27 /// completely uninitialized state!
29 /// So the invariants are:
30 /// - `data[alive]` is alive (i.e. contains valid elements)
31 /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
32 /// elements were already read and must not be touched anymore!)
33 data: [MaybeUninit<T>; N],
35 /// The elements in `data` that have not been yielded yet.
38 /// - `alive.start <= alive.end`
39 /// - `alive.end <= N`
43 impl<T, const N: usize> IntoIter<T, {N}>
45 [T; N]: LengthAtMost32,
47 /// Creates a new iterator over the given `array`.
49 /// *Note*: this method might never get stabilized and/or removed in the
50 /// future as there will likely be another, preferred way of obtaining this
51 /// iterator (either via `IntoIterator` for arrays or via another way).
52 #[unstable(feature = "array_value_iter", issue = "65798")]
53 pub fn new(array: [T; N]) -> Self {
54 // SAFETY: The transmute here is actually safe. The docs of `MaybeUninit`
57 // > `MaybeUninit<T>` is guaranteed to have the same size and alignment
60 // The docs even show a transmute from an array of `MaybeUninit<T>` to
63 // With that, this initialization satisfies the invariants.
65 // FIXME(LukasKalbertodt): actually use `mem::transmute` here, once it
66 // works with const generics:
67 // `mem::transmute::<[T; {N}], [MaybeUninit<T>; {N}]>(array)`
69 // Until then, we do it manually here. We first create a bitwise copy
70 // but cast the pointer so that it is treated as a different type. Then
71 // we forget `array` so that it is not dropped.
73 let data = ptr::read(&array as *const [T; N] as *const [MaybeUninit<T>; N]);
84 /// Returns an immutable slice of all elements that have not been yielded
86 fn as_slice(&self) -> &[T] {
87 let slice = &self.data[self.alive.clone()];
88 // SAFETY: This transmute is safe. As mentioned in `new`, `MaybeUninit` retains
89 // the size and alignment of `T`. Furthermore, we know that all
90 // elements within `alive` are properly initialized.
92 mem::transmute::<&[MaybeUninit<T>], &[T]>(slice)
98 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
99 impl<T, const N: usize> Iterator for IntoIter<T, {N}>
101 [T; N]: LengthAtMost32,
104 fn next(&mut self) -> Option<Self::Item> {
105 if self.alive.start == self.alive.end {
111 // From the check above we know that `alive.start != alive.end`.
112 // Combine this with the invariant `alive.start <= alive.end`, we know
113 // that `alive.start < alive.end`. Increasing `alive.start` by 1
114 // maintains the invariant regarding `alive`. However, due to this
115 // change, for a short time, the alive zone is not `data[alive]`
116 // anymore, but `data[idx..alive.end]`.
117 let idx = self.alive.start;
118 self.alive.start += 1;
120 // Read the element from the array.
121 // SAFETY: This is safe: `idx` is an index
122 // into the "alive" region of the array. Reading this element means
123 // that `data[idx]` is regarded as dead now (i.e. do not touch). As
124 // `idx` was the start of the alive-zone, the alive zone is now
125 // `data[alive]` again, restoring all invariants.
126 let out = unsafe { self.data.get_unchecked(idx).read() };
131 fn size_hint(&self) -> (usize, Option<usize>) {
132 let len = self.len();
136 fn count(self) -> usize {
140 fn last(mut self) -> Option<Self::Item> {
145 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
146 impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, {N}>
148 [T; N]: LengthAtMost32,
150 fn next_back(&mut self) -> Option<Self::Item> {
151 if self.alive.start == self.alive.end {
155 // Decrease end index.
157 // From the check above we know that `alive.start != alive.end`.
158 // Combine this with the invariant `alive.start <= alive.end`, we know
159 // that `alive.start < alive.end`. As `alive.start` cannot be negative,
160 // `alive.end` is at least 1, meaning that we can safely decrement it
161 // by one. This also maintains the invariant `alive.start <=
162 // alive.end`. However, due to this change, for a short time, the alive
163 // zone is not `data[alive]` anymore, but `data[alive.start..alive.end
167 // Read the element from the array.
168 // SAFETY: This is safe: `alive.end` is an
169 // index into the "alive" region of the array. Compare the previous
170 // comment that states that the alive region is
171 // `data[alive.start..alive.end + 1]`. Reading this element means that
172 // `data[alive.end]` is regarded as dead now (i.e. do not touch). As
173 // `alive.end` was the end of the alive-zone, the alive zone is now
174 // `data[alive]` again, restoring all invariants.
175 let out = unsafe { self.data.get_unchecked(self.alive.end).read() };
181 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
182 impl<T, const N: usize> Drop for IntoIter<T, {N}>
184 [T; N]: LengthAtMost32,
187 // We simply drop each element via `for_each`. This should not incur
188 // any significant runtime overhead and avoids adding another `unsafe`
190 self.by_ref().for_each(drop);
194 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
195 impl<T, const N: usize> ExactSizeIterator for IntoIter<T, {N}>
197 [T; N]: LengthAtMost32,
199 fn len(&self) -> usize {
200 // Will never underflow due to the invariant `alive.start <=
202 self.alive.end - self.alive.start
204 fn is_empty(&self) -> bool {
205 self.alive.is_empty()
209 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
210 impl<T, const N: usize> FusedIterator for IntoIter<T, {N}>
212 [T; N]: LengthAtMost32,
215 // The iterator indeed reports the correct length. The number of "alive"
216 // elements (that will still be yielded) is the length of the range `alive`.
217 // This range is decremented in length in either `next` or `next_back`. It is
218 // always decremented by 1 in those methods, but only if `Some(_)` is returned.
219 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
220 unsafe impl<T, const N: usize> TrustedLen for IntoIter<T, {N}>
222 [T; N]: LengthAtMost32,
225 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
226 impl<T: Clone, const N: usize> Clone for IntoIter<T, {N}>
228 [T; N]: LengthAtMost32,
230 fn clone(&self) -> Self {
231 // SAFETY: each point of unsafety is documented inside the unsafe block
233 // This creates a new uninitialized array. Note that the `assume_init`
234 // refers to the array, not the individual elements. And it is Ok if
235 // the array is in an uninitialized state as all elements may be
236 // uninitialized (all bit patterns are valid). Compare the
237 // `MaybeUninit` docs for more information.
238 let mut new_data: [MaybeUninit<T>; N] = MaybeUninit::uninit().assume_init();
240 // Clone all alive elements.
241 for idx in self.alive.clone() {
242 // The element at `idx` in the old array is alive, so we can
243 // safely call `get_ref()`. We then clone it, and write the
244 // clone into the new array.
245 let clone = self.data.get_unchecked(idx).get_ref().clone();
246 new_data.get_unchecked_mut(idx).write(clone);
251 alive: self.alive.clone(),
257 #[stable(feature = "array_value_iter_impls", since = "1.40.0")]
258 impl<T: fmt::Debug, const N: usize> fmt::Debug for IntoIter<T, {N}>
260 [T; N]: LengthAtMost32,
262 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
263 // Only print the elements that were not yielded yet: we cannot
264 // access the yielded elements anymore.
265 f.debug_tuple("IntoIter")
266 .field(&self.as_slice())