1 // Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 use ops::{self, Add, Sub};
16 use super::{FusedIterator, TrustedLen};
18 /// Objects that can be stepped over in both directions.
20 /// The `steps_between` function provides a way to efficiently compare
21 /// two `Step` objects.
22 #[unstable(feature = "step_trait",
23 reason = "likely to be replaced by finer-grained traits",
25 pub trait Step: Clone + PartialOrd + Sized {
26 /// Returns the number of steps between two step objects. The count is
27 /// inclusive of `start` and exclusive of `end`.
29 /// Returns `None` if it is not possible to calculate `steps_between`
31 fn steps_between(start: &Self, end: &Self) -> Option<usize>;
33 /// Replaces this step with `1`, returning itself
34 fn replace_one(&mut self) -> Self;
36 /// Replaces this step with `0`, returning itself
37 fn replace_zero(&mut self) -> Self;
39 /// Adds one to this step, returning the result
40 fn add_one(&self) -> Self;
42 /// Subtracts one to this step, returning the result
43 fn sub_one(&self) -> Self;
45 /// Add an usize, returning None on overflow
46 fn add_usize(&self, n: usize) -> Option<Self>;
49 // These are still macro-generated because the integer literals resolve to different types.
50 macro_rules! step_identical_methods {
53 fn replace_one(&mut self) -> Self {
58 fn replace_zero(&mut self) -> Self {
63 fn add_one(&self) -> Self {
68 fn sub_one(&self) -> Self {
74 macro_rules! step_impl_unsigned {
76 #[unstable(feature = "step_trait",
77 reason = "likely to be replaced by finer-grained traits",
81 #[allow(trivial_numeric_casts)]
82 fn steps_between(start: &$t, end: &$t) -> Option<usize> {
84 // Note: We assume $t <= usize here
85 Some((*end - *start) as usize)
92 #[allow(unreachable_patterns)]
93 fn add_usize(&self, n: usize) -> Option<Self> {
94 match <$t>::try_from(n) {
95 Ok(n_as_t) => self.checked_add(n_as_t),
100 step_identical_methods!();
104 macro_rules! step_impl_signed {
105 ($( [$t:ty : $unsigned:ty] )*) => ($(
106 #[unstable(feature = "step_trait",
107 reason = "likely to be replaced by finer-grained traits",
111 #[allow(trivial_numeric_casts)]
112 fn steps_between(start: &$t, end: &$t) -> Option<usize> {
114 // Note: We assume $t <= isize here
115 // Use .wrapping_sub and cast to usize to compute the
116 // difference that may not fit inside the range of isize.
117 Some((*end as isize).wrapping_sub(*start as isize) as usize)
124 #[allow(unreachable_patterns)]
125 fn add_usize(&self, n: usize) -> Option<Self> {
126 match <$unsigned>::try_from(n) {
127 Ok(n_as_unsigned) => {
128 // Wrapping in unsigned space handles cases like
129 // `-120_i8.add_usize(200) == Some(80_i8)`,
130 // even though 200_usize is out of range for i8.
131 let wrapped = (*self as $unsigned).wrapping_add(n_as_unsigned) as $t;
132 if wrapped >= *self {
135 None // Addition overflowed
142 step_identical_methods!();
147 macro_rules! step_impl_no_between {
149 #[unstable(feature = "step_trait",
150 reason = "likely to be replaced by finer-grained traits",
154 fn steps_between(_start: &Self, _end: &Self) -> Option<usize> {
159 fn add_usize(&self, n: usize) -> Option<Self> {
160 self.checked_add(n as $t)
163 step_identical_methods!();
168 step_impl_unsigned!(usize u8 u16);
169 #[cfg(not(target_pointer_witdth = "16"))]
170 step_impl_unsigned!(u32);
171 #[cfg(target_pointer_witdth = "16")]
172 step_impl_no_between!(u32);
173 step_impl_signed!([isize: usize] [i8: u8] [i16: u16]);
174 #[cfg(not(target_pointer_witdth = "16"))]
175 step_impl_signed!([i32: u32]);
176 #[cfg(target_pointer_witdth = "16")]
177 step_impl_no_between!(i32);
178 #[cfg(target_pointer_width = "64")]
179 step_impl_unsigned!(u64);
180 #[cfg(target_pointer_width = "64")]
181 step_impl_signed!([i64: u64]);
182 // If the target pointer width is not 64-bits, we
183 // assume here that it is less than 64-bits.
184 #[cfg(not(target_pointer_width = "64"))]
185 step_impl_no_between!(u64 i64);
186 step_impl_no_between!(u128 i128);
188 macro_rules! range_exact_iter_impl {
190 #[stable(feature = "rust1", since = "1.0.0")]
191 impl ExactSizeIterator for ops::Range<$t> { }
195 macro_rules! range_incl_exact_iter_impl {
197 #[stable(feature = "inclusive_range", since = "1.26.0")]
198 impl ExactSizeIterator for ops::RangeInclusive<$t> { }
202 macro_rules! range_trusted_len_impl {
204 #[unstable(feature = "trusted_len", issue = "37572")]
205 unsafe impl TrustedLen for ops::Range<$t> { }
209 macro_rules! range_incl_trusted_len_impl {
211 #[unstable(feature = "trusted_len", issue = "37572")]
212 unsafe impl TrustedLen for ops::RangeInclusive<$t> { }
216 #[stable(feature = "rust1", since = "1.0.0")]
217 impl<A: Step> Iterator for ops::Range<A> {
221 fn next(&mut self) -> Option<A> {
222 if self.start < self.end {
223 // We check for overflow here, even though it can't actually
224 // happen. Adding this check does however help llvm vectorize loops
225 // for some ranges that don't get vectorized otherwise,
226 // and this won't actually result in an extra check in an optimized build.
227 if let Some(mut n) = self.start.add_usize(1) {
228 mem::swap(&mut n, &mut self.start);
239 fn size_hint(&self) -> (usize, Option<usize>) {
240 match Step::steps_between(&self.start, &self.end) {
241 Some(hint) => (hint, Some(hint)),
247 fn nth(&mut self, n: usize) -> Option<A> {
248 if let Some(plus_n) = self.start.add_usize(n) {
249 if plus_n < self.end {
250 self.start = plus_n.add_one();
255 self.start = self.end.clone();
260 fn last(mut self) -> Option<A> {
265 fn min(mut self) -> Option<A> {
270 fn max(mut self) -> Option<A> {
275 // These macros generate `ExactSizeIterator` impls for various range types.
276 // Range<{u,i}64> and RangeInclusive<{u,i}{32,64,size}> are excluded
277 // because they cannot guarantee having a length <= usize::MAX, which is
278 // required by ExactSizeIterator.
279 range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32);
280 range_incl_exact_iter_impl!(u8 u16 i8 i16);
282 // These macros generate `TrustedLen` impls.
284 // They need to guarantee that .size_hint() is either exact, or that
285 // the upper bound is None when it does not fit the type limits.
286 range_trusted_len_impl!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
287 range_incl_trusted_len_impl!(usize isize u8 i8 u16 i16 u32 i32 i64 u64);
289 #[stable(feature = "rust1", since = "1.0.0")]
290 impl<A: Step> DoubleEndedIterator for ops::Range<A> {
292 fn next_back(&mut self) -> Option<A> {
293 if self.start < self.end {
294 self.end = self.end.sub_one();
295 Some(self.end.clone())
302 #[stable(feature = "fused", since = "1.26.0")]
303 impl<A: Step> FusedIterator for ops::Range<A> {}
305 #[stable(feature = "rust1", since = "1.0.0")]
306 impl<A: Step> Iterator for ops::RangeFrom<A> {
310 fn next(&mut self) -> Option<A> {
311 let mut n = self.start.add_one();
312 mem::swap(&mut n, &mut self.start);
317 fn size_hint(&self) -> (usize, Option<usize>) {
322 fn nth(&mut self, n: usize) -> Option<A> {
323 let plus_n = self.start.add_usize(n).expect("overflow in RangeFrom::nth");
324 self.start = plus_n.add_one();
329 #[stable(feature = "fused", since = "1.26.0")]
330 impl<A: Step> FusedIterator for ops::RangeFrom<A> {}
332 #[unstable(feature = "trusted_len", issue = "37572")]
333 unsafe impl<A: Step> TrustedLen for ops::RangeFrom<A> {}
335 #[stable(feature = "inclusive_range", since = "1.26.0")]
336 impl<A: Step> Iterator for ops::RangeInclusive<A> {
340 fn next(&mut self) -> Option<A> {
341 self.compute_is_empty();
342 if self.is_empty.unwrap_or_default() {
345 let is_iterating = self.start < self.end;
346 self.is_empty = Some(!is_iterating);
347 Some(if is_iterating {
348 let n = self.start.add_one();
349 mem::replace(&mut self.start, n)
356 fn size_hint(&self) -> (usize, Option<usize>) {
361 match Step::steps_between(&self.start, &self.end) {
362 Some(hint) => (hint.saturating_add(1), hint.checked_add(1)),
368 fn nth(&mut self, n: usize) -> Option<A> {
369 self.compute_is_empty();
370 if self.is_empty.unwrap_or_default() {
374 if let Some(plus_n) = self.start.add_usize(n) {
375 use cmp::Ordering::*;
377 match plus_n.partial_cmp(&self.end) {
379 self.is_empty = Some(false);
380 self.start = plus_n.add_one();
384 self.is_empty = Some(true);
391 self.is_empty = Some(true);
396 fn last(mut self) -> Option<A> {
401 fn min(mut self) -> Option<A> {
406 fn max(mut self) -> Option<A> {
411 #[stable(feature = "inclusive_range", since = "1.26.0")]
412 impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
414 fn next_back(&mut self) -> Option<A> {
415 self.compute_is_empty();
416 if self.is_empty.unwrap_or_default() {
419 let is_iterating = self.start < self.end;
420 self.is_empty = Some(!is_iterating);
421 Some(if is_iterating {
422 let n = self.end.sub_one();
423 mem::replace(&mut self.end, n)
430 #[stable(feature = "fused", since = "1.26.0")]
431 impl<A: Step> FusedIterator for ops::RangeInclusive<A> {}