1 // ignore-tidy-filelength
3 //! Slice management and manipulation.
5 //! For more details see [`std::slice`].
7 //! [`std::slice`]: ../../std/slice/index.html
9 #![stable(feature = "rust1", since = "1.0.0")]
11 // How this module is organized.
13 // The library infrastructure for slices is fairly messy. There's
14 // a lot of stuff defined here. Let's keep it clean.
16 // The layout of this file is thus:
18 // * Inherent methods. This is where most of the slice API resides.
19 // * Implementations of a few common traits with important slice ops.
20 // * Definitions of a bunch of iterators.
22 // * The `raw` and `bytes` submodules.
23 // * Boilerplate trait implementations.
26 use crate::cmp::Ordering::{self, Equal, Greater, Less};
28 use crate::intrinsics::{assume, exact_div, is_aligned_and_not_null, unchecked_sub};
30 use crate::marker::{self, Copy, Send, Sized, Sync};
32 use crate::ops::{self, FnMut, Range};
33 use crate::option::Option;
34 use crate::option::Option::{None, Some};
35 use crate::ptr::{self, NonNull};
36 use crate::result::Result;
37 use crate::result::Result::{Err, Ok};
40 feature = "slice_internals",
42 reason = "exposed from core to be reused in std; use the memchr crate"
44 /// Pure rust memchr implementation, taken from rust-memchr
57 /// Returns the number of elements in the slice.
62 /// let a = [1, 2, 3];
63 /// assert_eq!(a.len(), 3);
65 #[stable(feature = "rust1", since = "1.0.0")]
66 #[rustc_const_stable(feature = "const_slice_len", since = "1.32.0")]
68 // SAFETY: const sound because we transmute out the length field as a usize (which it must be)
69 #[allow(unused_attributes)]
70 #[allow_internal_unstable(const_fn_union)]
71 pub const fn len(&self) -> usize {
72 // SAFETY: this is safe because `&[T]` and `FatPtr<T>` have the same layout.
73 // Only `std` can make this guarantee.
74 unsafe { crate::ptr::Repr { rust: self }.raw.len }
77 /// Returns `true` if the slice has a length of 0.
82 /// let a = [1, 2, 3];
83 /// assert!(!a.is_empty());
85 #[stable(feature = "rust1", since = "1.0.0")]
86 #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.32.0")]
88 pub const fn is_empty(&self) -> bool {
92 /// Returns the first element of the slice, or `None` if it is empty.
97 /// let v = [10, 40, 30];
98 /// assert_eq!(Some(&10), v.first());
100 /// let w: &[i32] = &[];
101 /// assert_eq!(None, w.first());
103 #[stable(feature = "rust1", since = "1.0.0")]
105 pub fn first(&self) -> Option<&T> {
106 if let [first, ..] = self { Some(first) } else { None }
109 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
114 /// let x = &mut [0, 1, 2];
116 /// if let Some(first) = x.first_mut() {
119 /// assert_eq!(x, &[5, 1, 2]);
121 #[stable(feature = "rust1", since = "1.0.0")]
123 pub fn first_mut(&mut self) -> Option<&mut T> {
124 if let [first, ..] = self { Some(first) } else { None }
127 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
132 /// let x = &[0, 1, 2];
134 /// if let Some((first, elements)) = x.split_first() {
135 /// assert_eq!(first, &0);
136 /// assert_eq!(elements, &[1, 2]);
139 #[stable(feature = "slice_splits", since = "1.5.0")]
141 pub fn split_first(&self) -> Option<(&T, &[T])> {
142 if let [first, tail @ ..] = self { Some((first, tail)) } else { None }
145 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
150 /// let x = &mut [0, 1, 2];
152 /// if let Some((first, elements)) = x.split_first_mut() {
157 /// assert_eq!(x, &[3, 4, 5]);
159 #[stable(feature = "slice_splits", since = "1.5.0")]
161 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
162 if let [first, tail @ ..] = self { Some((first, tail)) } else { None }
165 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
170 /// let x = &[0, 1, 2];
172 /// if let Some((last, elements)) = x.split_last() {
173 /// assert_eq!(last, &2);
174 /// assert_eq!(elements, &[0, 1]);
177 #[stable(feature = "slice_splits", since = "1.5.0")]
179 pub fn split_last(&self) -> Option<(&T, &[T])> {
180 if let [init @ .., last] = self { Some((last, init)) } else { None }
183 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
188 /// let x = &mut [0, 1, 2];
190 /// if let Some((last, elements)) = x.split_last_mut() {
195 /// assert_eq!(x, &[4, 5, 3]);
197 #[stable(feature = "slice_splits", since = "1.5.0")]
199 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
200 if let [init @ .., last] = self { Some((last, init)) } else { None }
203 /// Returns the last element of the slice, or `None` if it is empty.
208 /// let v = [10, 40, 30];
209 /// assert_eq!(Some(&30), v.last());
211 /// let w: &[i32] = &[];
212 /// assert_eq!(None, w.last());
214 #[stable(feature = "rust1", since = "1.0.0")]
216 pub fn last(&self) -> Option<&T> {
217 if let [.., last] = self { Some(last) } else { None }
220 /// Returns a mutable pointer to the last item in the slice.
225 /// let x = &mut [0, 1, 2];
227 /// if let Some(last) = x.last_mut() {
230 /// assert_eq!(x, &[0, 1, 10]);
232 #[stable(feature = "rust1", since = "1.0.0")]
234 pub fn last_mut(&mut self) -> Option<&mut T> {
235 if let [.., last] = self { Some(last) } else { None }
238 /// Returns a reference to an element or subslice depending on the type of
241 /// - If given a position, returns a reference to the element at that
242 /// position or `None` if out of bounds.
243 /// - If given a range, returns the subslice corresponding to that range,
244 /// or `None` if out of bounds.
249 /// let v = [10, 40, 30];
250 /// assert_eq!(Some(&40), v.get(1));
251 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
252 /// assert_eq!(None, v.get(3));
253 /// assert_eq!(None, v.get(0..4));
255 #[stable(feature = "rust1", since = "1.0.0")]
257 pub fn get<I>(&self, index: I) -> Option<&I::Output>
264 /// Returns a mutable reference to an element or subslice depending on the
265 /// type of index (see [`get`]) or `None` if the index is out of bounds.
267 /// [`get`]: #method.get
272 /// let x = &mut [0, 1, 2];
274 /// if let Some(elem) = x.get_mut(1) {
277 /// assert_eq!(x, &[0, 42, 2]);
279 #[stable(feature = "rust1", since = "1.0.0")]
281 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
288 /// Returns a reference to an element or subslice, without doing bounds
291 /// This is generally not recommended, use with caution!
292 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
293 /// even if the resulting reference is not used.
294 /// For a safe alternative see [`get`].
296 /// [`get`]: #method.get
297 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
302 /// let x = &[1, 2, 4];
305 /// assert_eq!(x.get_unchecked(1), &2);
308 #[stable(feature = "rust1", since = "1.0.0")]
310 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
314 // SAFETY: the caller must uphold most of the safety requirements for `get_unchecked`;
315 // the slice is dereferencable because `self` is a safe reference.
316 // The returned pointer is safe because impls of `SliceIndex` have to guarantee that it is.
317 unsafe { &*index.get_unchecked(self) }
320 /// Returns a mutable reference to an element or subslice, without doing
323 /// This is generally not recommended, use with caution!
324 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
325 /// even if the resulting reference is not used.
326 /// For a safe alternative see [`get_mut`].
328 /// [`get_mut`]: #method.get_mut
329 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
334 /// let x = &mut [1, 2, 4];
337 /// let elem = x.get_unchecked_mut(1);
340 /// assert_eq!(x, &[1, 13, 4]);
342 #[stable(feature = "rust1", since = "1.0.0")]
344 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
348 // SAFETY: the caller must uphold the safety requirements for `get_unchecked_mut`;
349 // the slice is dereferencable because `self` is a safe reference.
350 // The returned pointer is safe because impls of `SliceIndex` have to guarantee that it is.
351 unsafe { &mut *index.get_unchecked_mut(self) }
354 /// Returns a raw pointer to the slice's buffer.
356 /// The caller must ensure that the slice outlives the pointer this
357 /// function returns, or else it will end up pointing to garbage.
359 /// The caller must also ensure that the memory the pointer (non-transitively) points to
360 /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
361 /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
363 /// Modifying the container referenced by this slice may cause its buffer
364 /// to be reallocated, which would also make any pointers to it invalid.
369 /// let x = &[1, 2, 4];
370 /// let x_ptr = x.as_ptr();
373 /// for i in 0..x.len() {
374 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
379 /// [`as_mut_ptr`]: #method.as_mut_ptr
380 #[stable(feature = "rust1", since = "1.0.0")]
381 #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")]
383 pub const fn as_ptr(&self) -> *const T {
384 self as *const [T] as *const T
387 /// Returns an unsafe mutable pointer to the slice's buffer.
389 /// The caller must ensure that the slice outlives the pointer this
390 /// function returns, or else it will end up pointing to garbage.
392 /// Modifying the container referenced by this slice may cause its buffer
393 /// to be reallocated, which would also make any pointers to it invalid.
398 /// let x = &mut [1, 2, 4];
399 /// let x_ptr = x.as_mut_ptr();
402 /// for i in 0..x.len() {
403 /// *x_ptr.add(i) += 2;
406 /// assert_eq!(x, &[3, 4, 6]);
408 #[stable(feature = "rust1", since = "1.0.0")]
410 pub fn as_mut_ptr(&mut self) -> *mut T {
411 self as *mut [T] as *mut T
414 /// Returns the two raw pointers spanning the slice.
416 /// The returned range is half-open, which means that the end pointer
417 /// points *one past* the last element of the slice. This way, an empty
418 /// slice is represented by two equal pointers, and the difference between
419 /// the two pointers represents the size of the slice.
421 /// See [`as_ptr`] for warnings on using these pointers. The end pointer
422 /// requires extra caution, as it does not point to a valid element in the
425 /// This function is useful for interacting with foreign interfaces which
426 /// use two pointers to refer to a range of elements in memory, as is
429 /// It can also be useful to check if a pointer to an element refers to an
430 /// element of this slice:
433 /// #![feature(slice_ptr_range)]
435 /// let a = [1, 2, 3];
436 /// let x = &a[1] as *const _;
437 /// let y = &5 as *const _;
439 /// assert!(a.as_ptr_range().contains(&x));
440 /// assert!(!a.as_ptr_range().contains(&y));
443 /// [`as_ptr`]: #method.as_ptr
444 #[unstable(feature = "slice_ptr_range", issue = "65807")]
446 pub fn as_ptr_range(&self) -> Range<*const T> {
447 let start = self.as_ptr();
448 // SAFETY: The `add` here is safe, because:
450 // - Both pointers are part of the same object, as pointing directly
451 // past the object also counts.
453 // - The size of the slice is never larger than isize::MAX bytes, as
455 // - https://github.com/rust-lang/unsafe-code-guidelines/issues/102#issuecomment-473340447
456 // - https://doc.rust-lang.org/reference/behavior-considered-undefined.html
457 // - https://doc.rust-lang.org/core/slice/fn.from_raw_parts.html#safety
458 // (This doesn't seem normative yet, but the very same assumption is
459 // made in many places, including the Index implementation of slices.)
461 // - There is no wrapping around involved, as slices do not wrap past
462 // the end of the address space.
464 // See the documentation of pointer::add.
465 let end = unsafe { start.add(self.len()) };
469 /// Returns the two unsafe mutable pointers spanning the slice.
471 /// The returned range is half-open, which means that the end pointer
472 /// points *one past* the last element of the slice. This way, an empty
473 /// slice is represented by two equal pointers, and the difference between
474 /// the two pointers represents the size of the slice.
476 /// See [`as_mut_ptr`] for warnings on using these pointers. The end
477 /// pointer requires extra caution, as it does not point to a valid element
480 /// This function is useful for interacting with foreign interfaces which
481 /// use two pointers to refer to a range of elements in memory, as is
484 /// [`as_mut_ptr`]: #method.as_mut_ptr
485 #[unstable(feature = "slice_ptr_range", issue = "65807")]
487 pub fn as_mut_ptr_range(&mut self) -> Range<*mut T> {
488 let start = self.as_mut_ptr();
489 // SAFETY: See as_ptr_range() above for why `add` here is safe.
490 let end = unsafe { start.add(self.len()) };
494 /// Swaps two elements in the slice.
498 /// * a - The index of the first element
499 /// * b - The index of the second element
503 /// Panics if `a` or `b` are out of bounds.
508 /// let mut v = ["a", "b", "c", "d"];
510 /// assert!(v == ["a", "d", "c", "b"]);
512 #[stable(feature = "rust1", since = "1.0.0")]
514 pub fn swap(&mut self, a: usize, b: usize) {
515 // Can't take two mutable loans from one vector, so instead just cast
516 // them to their raw pointers to do the swap.
517 let pa: *mut T = &mut self[a];
518 let pb: *mut T = &mut self[b];
519 // SAFETY: `pa` and `pb` have been created from safe mutable references and refer
520 // to elements in the slice and therefore are guaranteed to be valid and aligned.
521 // Note that accessing the elements behind `a` and `b` is checked and will
522 // panic when out of bounds.
528 /// Reverses the order of elements in the slice, in place.
533 /// let mut v = [1, 2, 3];
535 /// assert!(v == [3, 2, 1]);
537 #[stable(feature = "rust1", since = "1.0.0")]
539 pub fn reverse(&mut self) {
540 let mut i: usize = 0;
543 // For very small types, all the individual reads in the normal
544 // path perform poorly. We can do better, given efficient unaligned
545 // load/store, by loading a larger chunk and reversing a register.
547 // Ideally LLVM would do this for us, as it knows better than we do
548 // whether unaligned reads are efficient (since that changes between
549 // different ARM versions, for example) and what the best chunk size
550 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
551 // the loop, so we need to do this ourselves. (Hypothesis: reverse
552 // is troublesome because the sides can be aligned differently --
553 // will be, when the length is odd -- so there's no way of emitting
554 // pre- and postludes to use fully-aligned SIMD in the middle.)
556 let fast_unaligned = cfg!(any(target_arch = "x86", target_arch = "x86_64"));
558 if fast_unaligned && mem::size_of::<T>() == 1 {
559 // Use the llvm.bswap intrinsic to reverse u8s in a usize
560 let chunk = mem::size_of::<usize>();
561 while i + chunk - 1 < ln / 2 {
562 // SAFETY: There are several things to check here:
564 // - Note that `chunk` is either 4 or 8 due to the cfg check
565 // above. So `chunk - 1` is positive.
566 // - Indexing with index `i` is fine as the loop check guarantees
567 // `i + chunk - 1 < ln / 2`
568 // <=> `i < ln / 2 - (chunk - 1) < ln / 2 < ln`.
569 // - Indexing with index `ln - i - chunk = ln - (i + chunk)` is fine:
570 // - `i + chunk > 0` is trivially true.
571 // - The loop check guarantees:
572 // `i + chunk - 1 < ln / 2`
573 // <=> `i + chunk ≤ ln / 2 ≤ ln`, thus subtraction does not underflow.
574 // - The `read_unaligned` and `write_unaligned` calls are fine:
575 // - `pa` points to index `i` where `i < ln / 2 - (chunk - 1)`
576 // (see above) and `pb` points to index `ln - i - chunk`, so
577 // both are at least `chunk`
578 // many bytes away from the end of `self`.
579 // - Any initialized memory is valid `usize`.
581 let pa: *mut T = self.get_unchecked_mut(i);
582 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
583 let va = ptr::read_unaligned(pa as *mut usize);
584 let vb = ptr::read_unaligned(pb as *mut usize);
585 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
586 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
592 if fast_unaligned && mem::size_of::<T>() == 2 {
593 // Use rotate-by-16 to reverse u16s in a u32
594 let chunk = mem::size_of::<u32>() / 2;
595 while i + chunk - 1 < ln / 2 {
596 // SAFETY: An unaligned u32 can be read from `i` if `i + 1 < ln`
597 // (and obviously `i < ln`), because each element is 2 bytes and
600 // `i + chunk - 1 < ln / 2` # while condition
601 // `i + 2 - 1 < ln / 2`
604 // Since it's less than the length divided by 2, then it must be
607 // This also means that the condition `0 < i + chunk <= ln` is
608 // always respected, ensuring the `pb` pointer can be used
611 let pa: *mut T = self.get_unchecked_mut(i);
612 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
613 let va = ptr::read_unaligned(pa as *mut u32);
614 let vb = ptr::read_unaligned(pb as *mut u32);
615 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
616 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
623 // SAFETY: `i` is inferior to half the length of the slice so
624 // accessing `i` and `ln - i - 1` is safe (`i` starts at 0 and
625 // will not go further than `ln / 2 - 1`).
626 // The resulting pointers `pa` and `pb` are therefore valid and
627 // aligned, and can be read from and written to.
629 // Unsafe swap to avoid the bounds check in safe swap.
630 let pa: *mut T = self.get_unchecked_mut(i);
631 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
638 /// Returns an iterator over the slice.
643 /// let x = &[1, 2, 4];
644 /// let mut iterator = x.iter();
646 /// assert_eq!(iterator.next(), Some(&1));
647 /// assert_eq!(iterator.next(), Some(&2));
648 /// assert_eq!(iterator.next(), Some(&4));
649 /// assert_eq!(iterator.next(), None);
651 #[stable(feature = "rust1", since = "1.0.0")]
653 pub fn iter(&self) -> Iter<'_, T> {
654 let ptr = self.as_ptr();
655 // SAFETY: There are several things here:
657 // `ptr` has been obtained by `self.as_ptr()` where `self` is a valid
658 // reference thus it is non-NUL and safe to use and pass to
659 // `NonNull::new_unchecked` .
661 // Adding `self.len()` to the starting pointer gives a pointer
662 // at the end of `self`. `end` will never be dereferenced, only checked
663 // for direct pointer equality with `ptr` to check if the iterator is
666 // In the case of a ZST, the end pointer is just the start pointer plus
667 // the length, to also allows for the fast `ptr == end` check.
669 // See the `next_unchecked!` and `is_empty!` macros as well as the
670 // `post_inc_start` method for more informations.
672 assume(!ptr.is_null());
674 let end = if mem::size_of::<T>() == 0 {
675 (ptr as *const u8).wrapping_add(self.len()) as *const T
680 Iter { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: marker::PhantomData }
684 /// Returns an iterator that allows modifying each value.
689 /// let x = &mut [1, 2, 4];
690 /// for elem in x.iter_mut() {
693 /// assert_eq!(x, &[3, 4, 6]);
695 #[stable(feature = "rust1", since = "1.0.0")]
697 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
698 let ptr = self.as_mut_ptr();
699 // SAFETY: There are several things here:
701 // `ptr` has been obtained by `self.as_ptr()` where `self` is a valid
702 // reference thus it is non-NUL and safe to use and pass to
703 // `NonNull::new_unchecked` .
705 // Adding `self.len()` to the starting pointer gives a pointer
706 // at the end of `self`. `end` will never be dereferenced, only checked
707 // for direct pointer equality with `ptr` to check if the iterator is
710 // In the case of a ZST, the end pointer is just the start pointer plus
711 // the length, to also allows for the fast `ptr == end` check.
713 // See the `next_unchecked!` and `is_empty!` macros as well as the
714 // `post_inc_start` method for more informations.
716 assume(!ptr.is_null());
718 let end = if mem::size_of::<T>() == 0 {
719 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
724 IterMut { ptr: NonNull::new_unchecked(ptr), end, _marker: marker::PhantomData }
728 /// Returns an iterator over all contiguous windows of length
729 /// `size`. The windows overlap. If the slice is shorter than
730 /// `size`, the iterator returns no values.
734 /// Panics if `size` is 0.
739 /// let slice = ['r', 'u', 's', 't'];
740 /// let mut iter = slice.windows(2);
741 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
742 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
743 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
744 /// assert!(iter.next().is_none());
747 /// If the slice is shorter than `size`:
750 /// let slice = ['f', 'o', 'o'];
751 /// let mut iter = slice.windows(4);
752 /// assert!(iter.next().is_none());
754 #[stable(feature = "rust1", since = "1.0.0")]
756 pub fn windows(&self, size: usize) -> Windows<'_, T> {
758 Windows { v: self, size }
761 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
762 /// beginning of the slice.
764 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
765 /// slice, then the last chunk will not have length `chunk_size`.
767 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
768 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
773 /// Panics if `chunk_size` is 0.
778 /// let slice = ['l', 'o', 'r', 'e', 'm'];
779 /// let mut iter = slice.chunks(2);
780 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
781 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
782 /// assert_eq!(iter.next().unwrap(), &['m']);
783 /// assert!(iter.next().is_none());
786 /// [`chunks_exact`]: #method.chunks_exact
787 /// [`rchunks`]: #method.rchunks
788 #[stable(feature = "rust1", since = "1.0.0")]
790 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
791 assert_ne!(chunk_size, 0);
792 Chunks { v: self, chunk_size }
795 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
796 /// beginning of the slice.
798 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
799 /// length of the slice, then the last chunk will not have length `chunk_size`.
801 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
802 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
803 /// the end of the slice.
807 /// Panics if `chunk_size` is 0.
812 /// let v = &mut [0, 0, 0, 0, 0];
813 /// let mut count = 1;
815 /// for chunk in v.chunks_mut(2) {
816 /// for elem in chunk.iter_mut() {
821 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
824 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
825 /// [`rchunks_mut`]: #method.rchunks_mut
826 #[stable(feature = "rust1", since = "1.0.0")]
828 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> {
829 assert_ne!(chunk_size, 0);
830 ChunksMut { v: self, chunk_size }
833 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
834 /// beginning of the slice.
836 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
837 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
838 /// from the `remainder` function of the iterator.
840 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
841 /// resulting code better than in the case of [`chunks`].
843 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
844 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
848 /// Panics if `chunk_size` is 0.
853 /// let slice = ['l', 'o', 'r', 'e', 'm'];
854 /// let mut iter = slice.chunks_exact(2);
855 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
856 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
857 /// assert!(iter.next().is_none());
858 /// assert_eq!(iter.remainder(), &['m']);
861 /// [`chunks`]: #method.chunks
862 /// [`rchunks_exact`]: #method.rchunks_exact
863 #[stable(feature = "chunks_exact", since = "1.31.0")]
865 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
866 assert_ne!(chunk_size, 0);
867 let rem = self.len() % chunk_size;
868 let len = self.len() - rem;
869 let (fst, snd) = self.split_at(len);
870 ChunksExact { v: fst, rem: snd, chunk_size }
873 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
874 /// beginning of the slice.
876 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
877 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
878 /// retrieved from the `into_remainder` function of the iterator.
880 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
881 /// resulting code better than in the case of [`chunks_mut`].
883 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
884 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
889 /// Panics if `chunk_size` is 0.
894 /// let v = &mut [0, 0, 0, 0, 0];
895 /// let mut count = 1;
897 /// for chunk in v.chunks_exact_mut(2) {
898 /// for elem in chunk.iter_mut() {
903 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
906 /// [`chunks_mut`]: #method.chunks_mut
907 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
908 #[stable(feature = "chunks_exact", since = "1.31.0")]
910 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> {
911 assert_ne!(chunk_size, 0);
912 let rem = self.len() % chunk_size;
913 let len = self.len() - rem;
914 let (fst, snd) = self.split_at_mut(len);
915 ChunksExactMut { v: fst, rem: snd, chunk_size }
918 /// Returns an iterator over `N` elements of the slice at a time, starting at the
919 /// beginning of the slice.
921 /// The chunks are slices and do not overlap. If `N` does not divide the length of the
922 /// slice, then the last up to `N-1` elements will be omitted and can be retrieved
923 /// from the `remainder` function of the iterator.
925 /// This method is the const generic equivalent of [`chunks_exact`].
929 /// Panics if `N` is 0. This check will most probably get changed to a compile time
930 /// error before this method gets stabilized.
935 /// #![feature(array_chunks)]
936 /// let slice = ['l', 'o', 'r', 'e', 'm'];
937 /// let mut iter = slice.array_chunks();
938 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
939 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
940 /// assert!(iter.next().is_none());
941 /// assert_eq!(iter.remainder(), &['m']);
944 /// [`chunks_exact`]: #method.chunks_exact
945 #[unstable(feature = "array_chunks", issue = "74985")]
947 pub fn array_chunks<const N: usize>(&self) -> ArrayChunks<'_, T, N> {
949 let len = self.len() / N;
950 let (fst, snd) = self.split_at(len * N);
951 // SAFETY: We cast a slice of `len * N` elements into
952 // a slice of `len` many `N` elements chunks.
953 let array_slice: &[[T; N]] = unsafe { from_raw_parts(fst.as_ptr().cast(), len) };
954 ArrayChunks { iter: array_slice.iter(), rem: snd }
957 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
960 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
961 /// slice, then the last chunk will not have length `chunk_size`.
963 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
964 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
969 /// Panics if `chunk_size` is 0.
974 /// let slice = ['l', 'o', 'r', 'e', 'm'];
975 /// let mut iter = slice.rchunks(2);
976 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
977 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
978 /// assert_eq!(iter.next().unwrap(), &['l']);
979 /// assert!(iter.next().is_none());
982 /// [`rchunks_exact`]: #method.rchunks_exact
983 /// [`chunks`]: #method.chunks
984 #[stable(feature = "rchunks", since = "1.31.0")]
986 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
987 assert!(chunk_size != 0);
988 RChunks { v: self, chunk_size }
991 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
994 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
995 /// length of the slice, then the last chunk will not have length `chunk_size`.
997 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
998 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
999 /// beginning of the slice.
1003 /// Panics if `chunk_size` is 0.
1008 /// let v = &mut [0, 0, 0, 0, 0];
1009 /// let mut count = 1;
1011 /// for chunk in v.rchunks_mut(2) {
1012 /// for elem in chunk.iter_mut() {
1017 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
1020 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
1021 /// [`chunks_mut`]: #method.chunks_mut
1022 #[stable(feature = "rchunks", since = "1.31.0")]
1024 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> {
1025 assert!(chunk_size != 0);
1026 RChunksMut { v: self, chunk_size }
1029 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
1030 /// end of the slice.
1032 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
1033 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
1034 /// from the `remainder` function of the iterator.
1036 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
1037 /// resulting code better than in the case of [`chunks`].
1039 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
1040 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
1045 /// Panics if `chunk_size` is 0.
1050 /// let slice = ['l', 'o', 'r', 'e', 'm'];
1051 /// let mut iter = slice.rchunks_exact(2);
1052 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
1053 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
1054 /// assert!(iter.next().is_none());
1055 /// assert_eq!(iter.remainder(), &['l']);
1058 /// [`chunks`]: #method.chunks
1059 /// [`rchunks`]: #method.rchunks
1060 /// [`chunks_exact`]: #method.chunks_exact
1061 #[stable(feature = "rchunks", since = "1.31.0")]
1063 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> {
1064 assert!(chunk_size != 0);
1065 let rem = self.len() % chunk_size;
1066 let (fst, snd) = self.split_at(rem);
1067 RChunksExact { v: snd, rem: fst, chunk_size }
1070 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
1073 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
1074 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
1075 /// retrieved from the `into_remainder` function of the iterator.
1077 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
1078 /// resulting code better than in the case of [`chunks_mut`].
1080 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
1081 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
1086 /// Panics if `chunk_size` is 0.
1091 /// let v = &mut [0, 0, 0, 0, 0];
1092 /// let mut count = 1;
1094 /// for chunk in v.rchunks_exact_mut(2) {
1095 /// for elem in chunk.iter_mut() {
1100 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
1103 /// [`chunks_mut`]: #method.chunks_mut
1104 /// [`rchunks_mut`]: #method.rchunks_mut
1105 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
1106 #[stable(feature = "rchunks", since = "1.31.0")]
1108 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> {
1109 assert!(chunk_size != 0);
1110 let rem = self.len() % chunk_size;
1111 let (fst, snd) = self.split_at_mut(rem);
1112 RChunksExactMut { v: snd, rem: fst, chunk_size }
1115 /// Divides one slice into two at an index.
1117 /// The first will contain all indices from `[0, mid)` (excluding
1118 /// the index `mid` itself) and the second will contain all
1119 /// indices from `[mid, len)` (excluding the index `len` itself).
1123 /// Panics if `mid > len`.
1128 /// let v = [1, 2, 3, 4, 5, 6];
1131 /// let (left, right) = v.split_at(0);
1132 /// assert!(left == []);
1133 /// assert!(right == [1, 2, 3, 4, 5, 6]);
1137 /// let (left, right) = v.split_at(2);
1138 /// assert!(left == [1, 2]);
1139 /// assert!(right == [3, 4, 5, 6]);
1143 /// let (left, right) = v.split_at(6);
1144 /// assert!(left == [1, 2, 3, 4, 5, 6]);
1145 /// assert!(right == []);
1148 #[stable(feature = "rust1", since = "1.0.0")]
1150 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
1151 (&self[..mid], &self[mid..])
1154 /// Divides one mutable slice into two at an index.
1156 /// The first will contain all indices from `[0, mid)` (excluding
1157 /// the index `mid` itself) and the second will contain all
1158 /// indices from `[mid, len)` (excluding the index `len` itself).
1162 /// Panics if `mid > len`.
1167 /// let mut v = [1, 0, 3, 0, 5, 6];
1168 /// // scoped to restrict the lifetime of the borrows
1170 /// let (left, right) = v.split_at_mut(2);
1171 /// assert!(left == [1, 0]);
1172 /// assert!(right == [3, 0, 5, 6]);
1176 /// assert!(v == [1, 2, 3, 4, 5, 6]);
1178 #[stable(feature = "rust1", since = "1.0.0")]
1180 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
1181 let len = self.len();
1182 let ptr = self.as_mut_ptr();
1184 // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which
1185 // fulfills the requirements of `from_raw_parts_mut`.
1187 assert!(mid <= len);
1189 (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
1193 /// Returns an iterator over subslices separated by elements that match
1194 /// `pred`. The matched element is not contained in the subslices.
1199 /// let slice = [10, 40, 33, 20];
1200 /// let mut iter = slice.split(|num| num % 3 == 0);
1202 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1203 /// assert_eq!(iter.next().unwrap(), &[20]);
1204 /// assert!(iter.next().is_none());
1207 /// If the first element is matched, an empty slice will be the first item
1208 /// returned by the iterator. Similarly, if the last element in the slice
1209 /// is matched, an empty slice will be the last item returned by the
1213 /// let slice = [10, 40, 33];
1214 /// let mut iter = slice.split(|num| num % 3 == 0);
1216 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1217 /// assert_eq!(iter.next().unwrap(), &[]);
1218 /// assert!(iter.next().is_none());
1221 /// If two matched elements are directly adjacent, an empty slice will be
1222 /// present between them:
1225 /// let slice = [10, 6, 33, 20];
1226 /// let mut iter = slice.split(|num| num % 3 == 0);
1228 /// assert_eq!(iter.next().unwrap(), &[10]);
1229 /// assert_eq!(iter.next().unwrap(), &[]);
1230 /// assert_eq!(iter.next().unwrap(), &[20]);
1231 /// assert!(iter.next().is_none());
1233 #[stable(feature = "rust1", since = "1.0.0")]
1235 pub fn split<F>(&self, pred: F) -> Split<'_, T, F>
1237 F: FnMut(&T) -> bool,
1239 Split { v: self, pred, finished: false }
1242 /// Returns an iterator over mutable subslices separated by elements that
1243 /// match `pred`. The matched element is not contained in the subslices.
1248 /// let mut v = [10, 40, 30, 20, 60, 50];
1250 /// for group in v.split_mut(|num| *num % 3 == 0) {
1253 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1255 #[stable(feature = "rust1", since = "1.0.0")]
1257 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<'_, T, F>
1259 F: FnMut(&T) -> bool,
1261 SplitMut { v: self, pred, finished: false }
1264 /// Returns an iterator over subslices separated by elements that match
1265 /// `pred`. The matched element is contained in the end of the previous
1266 /// subslice as a terminator.
1271 /// #![feature(split_inclusive)]
1272 /// let slice = [10, 40, 33, 20];
1273 /// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
1275 /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
1276 /// assert_eq!(iter.next().unwrap(), &[20]);
1277 /// assert!(iter.next().is_none());
1280 /// If the last element of the slice is matched,
1281 /// that element will be considered the terminator of the preceding slice.
1282 /// That slice will be the last item returned by the iterator.
1285 /// #![feature(split_inclusive)]
1286 /// let slice = [3, 10, 40, 33];
1287 /// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
1289 /// assert_eq!(iter.next().unwrap(), &[3]);
1290 /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
1291 /// assert!(iter.next().is_none());
1293 #[unstable(feature = "split_inclusive", issue = "72360")]
1295 pub fn split_inclusive<F>(&self, pred: F) -> SplitInclusive<'_, T, F>
1297 F: FnMut(&T) -> bool,
1299 SplitInclusive { v: self, pred, finished: false }
1302 /// Returns an iterator over mutable subslices separated by elements that
1303 /// match `pred`. The matched element is contained in the previous
1304 /// subslice as a terminator.
1309 /// #![feature(split_inclusive)]
1310 /// let mut v = [10, 40, 30, 20, 60, 50];
1312 /// for group in v.split_inclusive_mut(|num| *num % 3 == 0) {
1313 /// let terminator_idx = group.len()-1;
1314 /// group[terminator_idx] = 1;
1316 /// assert_eq!(v, [10, 40, 1, 20, 1, 1]);
1318 #[unstable(feature = "split_inclusive", issue = "72360")]
1320 pub fn split_inclusive_mut<F>(&mut self, pred: F) -> SplitInclusiveMut<'_, T, F>
1322 F: FnMut(&T) -> bool,
1324 SplitInclusiveMut { v: self, pred, finished: false }
1327 /// Returns an iterator over subslices separated by elements that match
1328 /// `pred`, starting at the end of the slice and working backwards.
1329 /// The matched element is not contained in the subslices.
1334 /// let slice = [11, 22, 33, 0, 44, 55];
1335 /// let mut iter = slice.rsplit(|num| *num == 0);
1337 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1338 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1339 /// assert_eq!(iter.next(), None);
1342 /// As with `split()`, if the first or last element is matched, an empty
1343 /// slice will be the first (or last) item returned by the iterator.
1346 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1347 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1348 /// assert_eq!(it.next().unwrap(), &[]);
1349 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1350 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1351 /// assert_eq!(it.next().unwrap(), &[]);
1352 /// assert_eq!(it.next(), None);
1354 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1356 pub fn rsplit<F>(&self, pred: F) -> RSplit<'_, T, F>
1358 F: FnMut(&T) -> bool,
1360 RSplit { inner: self.split(pred) }
1363 /// Returns an iterator over mutable subslices separated by elements that
1364 /// match `pred`, starting at the end of the slice and working
1365 /// backwards. The matched element is not contained in the subslices.
1370 /// let mut v = [100, 400, 300, 200, 600, 500];
1372 /// let mut count = 0;
1373 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1375 /// group[0] = count;
1377 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1380 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1382 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<'_, T, F>
1384 F: FnMut(&T) -> bool,
1386 RSplitMut { inner: self.split_mut(pred) }
1389 /// Returns an iterator over subslices separated by elements that match
1390 /// `pred`, limited to returning at most `n` items. The matched element is
1391 /// not contained in the subslices.
1393 /// The last element returned, if any, will contain the remainder of the
1398 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1399 /// `[20, 60, 50]`):
1402 /// let v = [10, 40, 30, 20, 60, 50];
1404 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1405 /// println!("{:?}", group);
1408 #[stable(feature = "rust1", since = "1.0.0")]
1410 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<'_, T, F>
1412 F: FnMut(&T) -> bool,
1414 SplitN { inner: GenericSplitN { iter: self.split(pred), count: n } }
1417 /// Returns an iterator over subslices separated by elements that match
1418 /// `pred`, limited to returning at most `n` items. The matched element is
1419 /// not contained in the subslices.
1421 /// The last element returned, if any, will contain the remainder of the
1427 /// let mut v = [10, 40, 30, 20, 60, 50];
1429 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1432 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1434 #[stable(feature = "rust1", since = "1.0.0")]
1436 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F>
1438 F: FnMut(&T) -> bool,
1440 SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n } }
1443 /// Returns an iterator over subslices separated by elements that match
1444 /// `pred` limited to returning at most `n` items. This starts at the end of
1445 /// the slice and works backwards. The matched element is not contained in
1448 /// The last element returned, if any, will contain the remainder of the
1453 /// Print the slice split once, starting from the end, by numbers divisible
1454 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1457 /// let v = [10, 40, 30, 20, 60, 50];
1459 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1460 /// println!("{:?}", group);
1463 #[stable(feature = "rust1", since = "1.0.0")]
1465 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<'_, T, F>
1467 F: FnMut(&T) -> bool,
1469 RSplitN { inner: GenericSplitN { iter: self.rsplit(pred), count: n } }
1472 /// Returns an iterator over subslices separated by elements that match
1473 /// `pred` limited to returning at most `n` items. This starts at the end of
1474 /// the slice and works backwards. The matched element is not contained in
1477 /// The last element returned, if any, will contain the remainder of the
1483 /// let mut s = [10, 40, 30, 20, 60, 50];
1485 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1488 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1490 #[stable(feature = "rust1", since = "1.0.0")]
1492 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F>
1494 F: FnMut(&T) -> bool,
1496 RSplitNMut { inner: GenericSplitN { iter: self.rsplit_mut(pred), count: n } }
1499 /// Returns `true` if the slice contains an element with the given value.
1504 /// let v = [10, 40, 30];
1505 /// assert!(v.contains(&30));
1506 /// assert!(!v.contains(&50));
1509 /// If you do not have an `&T`, but just an `&U` such that `T: Borrow<U>`
1510 /// (e.g. `String: Borrow<str>`), you can use `iter().any`:
1513 /// let v = [String::from("hello"), String::from("world")]; // slice of `String`
1514 /// assert!(v.iter().any(|e| e == "hello")); // search with `&str`
1515 /// assert!(!v.iter().any(|e| e == "hi"));
1517 #[stable(feature = "rust1", since = "1.0.0")]
1518 pub fn contains(&self, x: &T) -> bool
1522 x.slice_contains(self)
1525 /// Returns `true` if `needle` is a prefix of the slice.
1530 /// let v = [10, 40, 30];
1531 /// assert!(v.starts_with(&[10]));
1532 /// assert!(v.starts_with(&[10, 40]));
1533 /// assert!(!v.starts_with(&[50]));
1534 /// assert!(!v.starts_with(&[10, 50]));
1537 /// Always returns `true` if `needle` is an empty slice:
1540 /// let v = &[10, 40, 30];
1541 /// assert!(v.starts_with(&[]));
1542 /// let v: &[u8] = &[];
1543 /// assert!(v.starts_with(&[]));
1545 #[stable(feature = "rust1", since = "1.0.0")]
1546 pub fn starts_with(&self, needle: &[T]) -> bool
1550 let n = needle.len();
1551 self.len() >= n && needle == &self[..n]
1554 /// Returns `true` if `needle` is a suffix of the slice.
1559 /// let v = [10, 40, 30];
1560 /// assert!(v.ends_with(&[30]));
1561 /// assert!(v.ends_with(&[40, 30]));
1562 /// assert!(!v.ends_with(&[50]));
1563 /// assert!(!v.ends_with(&[50, 30]));
1566 /// Always returns `true` if `needle` is an empty slice:
1569 /// let v = &[10, 40, 30];
1570 /// assert!(v.ends_with(&[]));
1571 /// let v: &[u8] = &[];
1572 /// assert!(v.ends_with(&[]));
1574 #[stable(feature = "rust1", since = "1.0.0")]
1575 pub fn ends_with(&self, needle: &[T]) -> bool
1579 let (m, n) = (self.len(), needle.len());
1580 m >= n && needle == &self[m - n..]
1583 /// Returns a subslice with the prefix removed.
1585 /// This method returns [`None`] if slice does not start with `prefix`.
1586 /// Also it returns the original slice if `prefix` is an empty slice.
1591 /// #![feature(slice_strip)]
1592 /// let v = &[10, 40, 30];
1593 /// assert_eq!(v.strip_prefix(&[10]), Some(&[40, 30][..]));
1594 /// assert_eq!(v.strip_prefix(&[10, 40]), Some(&[30][..]));
1595 /// assert_eq!(v.strip_prefix(&[50]), None);
1596 /// assert_eq!(v.strip_prefix(&[10, 50]), None);
1598 #[must_use = "returns the subslice without modifying the original"]
1599 #[unstable(feature = "slice_strip", issue = "73413")]
1600 pub fn strip_prefix(&self, prefix: &[T]) -> Option<&[T]>
1604 let n = prefix.len();
1605 if n <= self.len() {
1606 let (head, tail) = self.split_at(n);
1614 /// Returns a subslice with the suffix removed.
1616 /// This method returns [`None`] if slice does not end with `suffix`.
1617 /// Also it returns the original slice if `suffix` is an empty slice
1622 /// #![feature(slice_strip)]
1623 /// let v = &[10, 40, 30];
1624 /// assert_eq!(v.strip_suffix(&[30]), Some(&[10, 40][..]));
1625 /// assert_eq!(v.strip_suffix(&[40, 30]), Some(&[10][..]));
1626 /// assert_eq!(v.strip_suffix(&[50]), None);
1627 /// assert_eq!(v.strip_suffix(&[50, 30]), None);
1629 #[must_use = "returns the subslice without modifying the original"]
1630 #[unstable(feature = "slice_strip", issue = "73413")]
1631 pub fn strip_suffix(&self, suffix: &[T]) -> Option<&[T]>
1635 let (len, n) = (self.len(), suffix.len());
1637 let (head, tail) = self.split_at(len - n);
1645 /// Binary searches this sorted slice for a given element.
1647 /// If the value is found then [`Result::Ok`] is returned, containing the
1648 /// index of the matching element. If there are multiple matches, then any
1649 /// one of the matches could be returned. If the value is not found then
1650 /// [`Result::Err`] is returned, containing the index where a matching
1651 /// element could be inserted while maintaining sorted order.
1655 /// Looks up a series of four elements. The first is found, with a
1656 /// uniquely determined position; the second and third are not
1657 /// found; the fourth could match any position in `[1, 4]`.
1660 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1662 /// assert_eq!(s.binary_search(&13), Ok(9));
1663 /// assert_eq!(s.binary_search(&4), Err(7));
1664 /// assert_eq!(s.binary_search(&100), Err(13));
1665 /// let r = s.binary_search(&1);
1666 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1669 /// If you want to insert an item to a sorted vector, while maintaining
1673 /// let mut s = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1675 /// let idx = s.binary_search(&num).unwrap_or_else(|x| x);
1676 /// s.insert(idx, num);
1677 /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]);
1679 #[stable(feature = "rust1", since = "1.0.0")]
1680 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1684 self.binary_search_by(|p| p.cmp(x))
1687 /// Binary searches this sorted slice with a comparator function.
1689 /// The comparator function should implement an order consistent
1690 /// with the sort order of the underlying slice, returning an
1691 /// order code that indicates whether its argument is `Less`,
1692 /// `Equal` or `Greater` the desired target.
1694 /// If the value is found then [`Result::Ok`] is returned, containing the
1695 /// index of the matching element. If there are multiple matches, then any
1696 /// one of the matches could be returned. If the value is not found then
1697 /// [`Result::Err`] is returned, containing the index where a matching
1698 /// element could be inserted while maintaining sorted order.
1702 /// Looks up a series of four elements. The first is found, with a
1703 /// uniquely determined position; the second and third are not
1704 /// found; the fourth could match any position in `[1, 4]`.
1707 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1710 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1712 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1714 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1716 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1717 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1719 #[stable(feature = "rust1", since = "1.0.0")]
1721 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1723 F: FnMut(&'a T) -> Ordering,
1726 let mut size = s.len();
1730 let mut base = 0usize;
1732 let half = size / 2;
1733 let mid = base + half;
1734 // SAFETY: the call is made safe by the following inconstants:
1735 // - `mid >= 0`: by definition
1736 // - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...`
1737 let cmp = f(unsafe { s.get_unchecked(mid) });
1738 base = if cmp == Greater { base } else { mid };
1741 // SAFETY: base is always in [0, size) because base <= mid.
1742 let cmp = f(unsafe { s.get_unchecked(base) });
1743 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1746 /// Binary searches this sorted slice with a key extraction function.
1748 /// Assumes that the slice is sorted by the key, for instance with
1749 /// [`sort_by_key`] using the same key extraction function.
1751 /// If the value is found then [`Result::Ok`] is returned, containing the
1752 /// index of the matching element. If there are multiple matches, then any
1753 /// one of the matches could be returned. If the value is not found then
1754 /// [`Result::Err`] is returned, containing the index where a matching
1755 /// element could be inserted while maintaining sorted order.
1757 /// [`sort_by_key`]: #method.sort_by_key
1761 /// Looks up a series of four elements in a slice of pairs sorted by
1762 /// their second elements. The first is found, with a uniquely
1763 /// determined position; the second and third are not found; the
1764 /// fourth could match any position in `[1, 4]`.
1767 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1768 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1769 /// (1, 21), (2, 34), (4, 55)];
1771 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1772 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1773 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1774 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1775 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1777 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1779 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1781 F: FnMut(&'a T) -> B,
1784 self.binary_search_by(|k| f(k).cmp(b))
1787 /// Sorts the slice, but may not preserve the order of equal elements.
1789 /// This sort is unstable (i.e., may reorder equal elements), in-place
1790 /// (i.e., does not allocate), and *O*(*n* \* log(*n*)) worst-case.
1792 /// # Current implementation
1794 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1795 /// which combines the fast average case of randomized quicksort with the fast worst case of
1796 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1797 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1798 /// deterministic behavior.
1800 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1801 /// slice consists of several concatenated sorted sequences.
1806 /// let mut v = [-5, 4, 1, -3, 2];
1808 /// v.sort_unstable();
1809 /// assert!(v == [-5, -3, 1, 2, 4]);
1812 /// [pdqsort]: https://github.com/orlp/pdqsort
1813 #[stable(feature = "sort_unstable", since = "1.20.0")]
1815 pub fn sort_unstable(&mut self)
1819 sort::quicksort(self, |a, b| a.lt(b));
1822 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1825 /// This sort is unstable (i.e., may reorder equal elements), in-place
1826 /// (i.e., does not allocate), and *O*(*n* \* log(*n*)) worst-case.
1828 /// The comparator function must define a total ordering for the elements in the slice. If
1829 /// the ordering is not total, the order of the elements is unspecified. An order is a
1830 /// total order if it is (for all a, b and c):
1832 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1833 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1835 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1836 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1839 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1840 /// floats.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap());
1841 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1844 /// # Current implementation
1846 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1847 /// which combines the fast average case of randomized quicksort with the fast worst case of
1848 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1849 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1850 /// deterministic behavior.
1852 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1853 /// slice consists of several concatenated sorted sequences.
1858 /// let mut v = [5, 4, 1, 3, 2];
1859 /// v.sort_unstable_by(|a, b| a.cmp(b));
1860 /// assert!(v == [1, 2, 3, 4, 5]);
1862 /// // reverse sorting
1863 /// v.sort_unstable_by(|a, b| b.cmp(a));
1864 /// assert!(v == [5, 4, 3, 2, 1]);
1867 /// [pdqsort]: https://github.com/orlp/pdqsort
1868 #[stable(feature = "sort_unstable", since = "1.20.0")]
1870 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1872 F: FnMut(&T, &T) -> Ordering,
1874 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1877 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1880 /// This sort is unstable (i.e., may reorder equal elements), in-place
1881 /// (i.e., does not allocate), and *O*(m \* *n* \* log(*n*)) worst-case, where the key function is
1884 /// # Current implementation
1886 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1887 /// which combines the fast average case of randomized quicksort with the fast worst case of
1888 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1889 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1890 /// deterministic behavior.
1892 /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
1893 /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
1894 /// cases where the key function is expensive.
1899 /// let mut v = [-5i32, 4, 1, -3, 2];
1901 /// v.sort_unstable_by_key(|k| k.abs());
1902 /// assert!(v == [1, 2, -3, 4, -5]);
1905 /// [pdqsort]: https://github.com/orlp/pdqsort
1906 #[stable(feature = "sort_unstable", since = "1.20.0")]
1908 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1913 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1916 /// Reorder the slice such that the element at `index` is at its final sorted position.
1918 /// This reordering has the additional property that any value at position `i < index` will be
1919 /// less than or equal to any value at a position `j > index`. Additionally, this reordering is
1920 /// unstable (i.e. any number of equal elements may end up at position `index`), in-place
1921 /// (i.e. does not allocate), and *O*(*n*) worst-case. This function is also/ known as "kth
1922 /// element" in other libraries. It returns a triplet of the following values: all elements less
1923 /// than the one at the given index, the value at the given index, and all elements greater than
1924 /// the one at the given index.
1926 /// # Current implementation
1928 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1929 /// used for [`sort_unstable`].
1931 /// [`sort_unstable`]: #method.sort_unstable
1935 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1940 /// #![feature(slice_partition_at_index)]
1942 /// let mut v = [-5i32, 4, 1, -3, 2];
1944 /// // Find the median
1945 /// v.partition_at_index(2);
1947 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1948 /// // about the specified index.
1949 /// assert!(v == [-3, -5, 1, 2, 4] ||
1950 /// v == [-5, -3, 1, 2, 4] ||
1951 /// v == [-3, -5, 1, 4, 2] ||
1952 /// v == [-5, -3, 1, 4, 2]);
1954 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1956 pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T])
1960 let mut f = |a: &T, b: &T| a.lt(b);
1961 sort::partition_at_index(self, index, &mut f)
1964 /// Reorder the slice with a comparator function such that the element at `index` is at its
1965 /// final sorted position.
1967 /// This reordering has the additional property that any value at position `i < index` will be
1968 /// less than or equal to any value at a position `j > index` using the comparator function.
1969 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1970 /// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
1971 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1972 /// values: all elements less than the one at the given index, the value at the given index,
1973 /// and all elements greater than the one at the given index, using the provided comparator
1976 /// # Current implementation
1978 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1979 /// used for [`sort_unstable`].
1981 /// [`sort_unstable`]: #method.sort_unstable
1985 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1990 /// #![feature(slice_partition_at_index)]
1992 /// let mut v = [-5i32, 4, 1, -3, 2];
1994 /// // Find the median as if the slice were sorted in descending order.
1995 /// v.partition_at_index_by(2, |a, b| b.cmp(a));
1997 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1998 /// // about the specified index.
1999 /// assert!(v == [2, 4, 1, -5, -3] ||
2000 /// v == [2, 4, 1, -3, -5] ||
2001 /// v == [4, 2, 1, -5, -3] ||
2002 /// v == [4, 2, 1, -3, -5]);
2004 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
2006 pub fn partition_at_index_by<F>(
2010 ) -> (&mut [T], &mut T, &mut [T])
2012 F: FnMut(&T, &T) -> Ordering,
2014 let mut f = |a: &T, b: &T| compare(a, b) == Less;
2015 sort::partition_at_index(self, index, &mut f)
2018 /// Reorder the slice with a key extraction function such that the element at `index` is at its
2019 /// final sorted position.
2021 /// This reordering has the additional property that any value at position `i < index` will be
2022 /// less than or equal to any value at a position `j > index` using the key extraction function.
2023 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
2024 /// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
2025 /// is also known as "kth element" in other libraries. It returns a triplet of the following
2026 /// values: all elements less than the one at the given index, the value at the given index, and
2027 /// all elements greater than the one at the given index, using the provided key extraction
2030 /// # Current implementation
2032 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
2033 /// used for [`sort_unstable`].
2035 /// [`sort_unstable`]: #method.sort_unstable
2039 /// Panics when `index >= len()`, meaning it always panics on empty slices.
2044 /// #![feature(slice_partition_at_index)]
2046 /// let mut v = [-5i32, 4, 1, -3, 2];
2048 /// // Return the median as if the array were sorted according to absolute value.
2049 /// v.partition_at_index_by_key(2, |a| a.abs());
2051 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
2052 /// // about the specified index.
2053 /// assert!(v == [1, 2, -3, 4, -5] ||
2054 /// v == [1, 2, -3, -5, 4] ||
2055 /// v == [2, 1, -3, 4, -5] ||
2056 /// v == [2, 1, -3, -5, 4]);
2058 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
2060 pub fn partition_at_index_by_key<K, F>(
2064 ) -> (&mut [T], &mut T, &mut [T])
2069 let mut g = |a: &T, b: &T| f(a).lt(&f(b));
2070 sort::partition_at_index(self, index, &mut g)
2073 /// Moves all consecutive repeated elements to the end of the slice according to the
2074 /// [`PartialEq`] trait implementation.
2076 /// Returns two slices. The first contains no consecutive repeated elements.
2077 /// The second contains all the duplicates in no specified order.
2079 /// If the slice is sorted, the first returned slice contains no duplicates.
2084 /// #![feature(slice_partition_dedup)]
2086 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
2088 /// let (dedup, duplicates) = slice.partition_dedup();
2090 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
2091 /// assert_eq!(duplicates, [2, 3, 1]);
2093 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
2095 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
2099 self.partition_dedup_by(|a, b| a == b)
2102 /// Moves all but the first of consecutive elements to the end of the slice satisfying
2103 /// a given equality relation.
2105 /// Returns two slices. The first contains no consecutive repeated elements.
2106 /// The second contains all the duplicates in no specified order.
2108 /// The `same_bucket` function is passed references to two elements from the slice and
2109 /// must determine if the elements compare equal. The elements are passed in opposite order
2110 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
2111 /// at the end of the slice.
2113 /// If the slice is sorted, the first returned slice contains no duplicates.
2118 /// #![feature(slice_partition_dedup)]
2120 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
2122 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
2124 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
2125 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
2127 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
2129 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
2131 F: FnMut(&mut T, &mut T) -> bool,
2133 // Although we have a mutable reference to `self`, we cannot make
2134 // *arbitrary* changes. The `same_bucket` calls could panic, so we
2135 // must ensure that the slice is in a valid state at all times.
2137 // The way that we handle this is by using swaps; we iterate
2138 // over all the elements, swapping as we go so that at the end
2139 // the elements we wish to keep are in the front, and those we
2140 // wish to reject are at the back. We can then split the slice.
2141 // This operation is still `O(n)`.
2143 // Example: We start in this state, where `r` represents "next
2144 // read" and `w` represents "next_write`.
2147 // +---+---+---+---+---+---+
2148 // | 0 | 1 | 1 | 2 | 3 | 3 |
2149 // +---+---+---+---+---+---+
2152 // Comparing self[r] against self[w-1], this is not a duplicate, so
2153 // we swap self[r] and self[w] (no effect as r==w) and then increment both
2154 // r and w, leaving us with:
2157 // +---+---+---+---+---+---+
2158 // | 0 | 1 | 1 | 2 | 3 | 3 |
2159 // +---+---+---+---+---+---+
2162 // Comparing self[r] against self[w-1], this value is a duplicate,
2163 // so we increment `r` but leave everything else unchanged:
2166 // +---+---+---+---+---+---+
2167 // | 0 | 1 | 1 | 2 | 3 | 3 |
2168 // +---+---+---+---+---+---+
2171 // Comparing self[r] against self[w-1], this is not a duplicate,
2172 // so swap self[r] and self[w] and advance r and w:
2175 // +---+---+---+---+---+---+
2176 // | 0 | 1 | 2 | 1 | 3 | 3 |
2177 // +---+---+---+---+---+---+
2180 // Not a duplicate, repeat:
2183 // +---+---+---+---+---+---+
2184 // | 0 | 1 | 2 | 3 | 1 | 3 |
2185 // +---+---+---+---+---+---+
2188 // Duplicate, advance r. End of slice. Split at w.
2190 let len = self.len();
2192 return (self, &mut []);
2195 let ptr = self.as_mut_ptr();
2196 let mut next_read: usize = 1;
2197 let mut next_write: usize = 1;
2199 // SAFETY: the `while` condition guarantees `next_read` and `next_write`
2200 // are less than `len`, thus are inside `self`. `prev_ptr_write` points to
2201 // one element before `ptr_write`, but `next_write` starts at 1, so
2202 // `prev_ptr_write` is never less than 0 and is inside the slice.
2203 // This fulfils the requirements for dereferencing `ptr_read`, `prev_ptr_write`
2204 // and `ptr_write`, and for using `ptr.add(next_read)`, `ptr.add(next_write - 1)`
2205 // and `prev_ptr_write.offset(1)`.
2207 // `next_write` is also incremented at most once per loop at most meaning
2208 // no element is skipped when it may need to be swapped.
2210 // `ptr_read` and `prev_ptr_write` never point to the same element. This
2211 // is required for `&mut *ptr_read`, `&mut *prev_ptr_write` to be safe.
2212 // The explanation is simply that `next_read >= next_write` is always true,
2213 // thus `next_read > next_write - 1` is too.
2215 // Avoid bounds checks by using raw pointers.
2216 while next_read < len {
2217 let ptr_read = ptr.add(next_read);
2218 let prev_ptr_write = ptr.add(next_write - 1);
2219 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
2220 if next_read != next_write {
2221 let ptr_write = prev_ptr_write.offset(1);
2222 mem::swap(&mut *ptr_read, &mut *ptr_write);
2230 self.split_at_mut(next_write)
2233 /// Moves all but the first of consecutive elements to the end of the slice that resolve
2234 /// to the same key.
2236 /// Returns two slices. The first contains no consecutive repeated elements.
2237 /// The second contains all the duplicates in no specified order.
2239 /// If the slice is sorted, the first returned slice contains no duplicates.
2244 /// #![feature(slice_partition_dedup)]
2246 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
2248 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
2250 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
2251 /// assert_eq!(duplicates, [21, 30, 13]);
2253 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
2255 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
2257 F: FnMut(&mut T) -> K,
2260 self.partition_dedup_by(|a, b| key(a) == key(b))
2263 /// Rotates the slice in-place such that the first `mid` elements of the
2264 /// slice move to the end while the last `self.len() - mid` elements move to
2265 /// the front. After calling `rotate_left`, the element previously at index
2266 /// `mid` will become the first element in the slice.
2270 /// This function will panic if `mid` is greater than the length of the
2271 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
2276 /// Takes linear (in `self.len()`) time.
2281 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2282 /// a.rotate_left(2);
2283 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
2286 /// Rotating a subslice:
2289 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2290 /// a[1..5].rotate_left(1);
2291 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
2293 #[stable(feature = "slice_rotate", since = "1.26.0")]
2294 pub fn rotate_left(&mut self, mid: usize) {
2295 assert!(mid <= self.len());
2296 let k = self.len() - mid;
2297 let p = self.as_mut_ptr();
2299 // SAFETY: The range `[p.add(mid) - mid, p.add(mid) + k)` is trivially
2300 // valid for reading and writing, as required by `ptr_rotate`.
2302 rotate::ptr_rotate(mid, p.add(mid), k);
2306 /// Rotates the slice in-place such that the first `self.len() - k`
2307 /// elements of the slice move to the end while the last `k` elements move
2308 /// to the front. After calling `rotate_right`, the element previously at
2309 /// index `self.len() - k` will become the first element in the slice.
2313 /// This function will panic if `k` is greater than the length of the
2314 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
2319 /// Takes linear (in `self.len()`) time.
2324 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2325 /// a.rotate_right(2);
2326 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
2329 /// Rotate a subslice:
2332 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2333 /// a[1..5].rotate_right(1);
2334 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
2336 #[stable(feature = "slice_rotate", since = "1.26.0")]
2337 pub fn rotate_right(&mut self, k: usize) {
2338 assert!(k <= self.len());
2339 let mid = self.len() - k;
2340 let p = self.as_mut_ptr();
2342 // SAFETY: The range `[p.add(mid) - mid, p.add(mid) + k)` is trivially
2343 // valid for reading and writing, as required by `ptr_rotate`.
2345 rotate::ptr_rotate(mid, p.add(mid), k);
2349 /// Fills `self` with elements by cloning `value`.
2354 /// #![feature(slice_fill)]
2356 /// let mut buf = vec![0; 10];
2358 /// assert_eq!(buf, vec![1; 10]);
2360 #[unstable(feature = "slice_fill", issue = "70758")]
2361 pub fn fill(&mut self, value: T)
2365 if let Some((last, elems)) = self.split_last_mut() {
2367 el.clone_from(&value);
2374 /// Copies the elements from `src` into `self`.
2376 /// The length of `src` must be the same as `self`.
2378 /// If `T` implements `Copy`, it can be more performant to use
2379 /// [`copy_from_slice`].
2383 /// This function will panic if the two slices have different lengths.
2387 /// Cloning two elements from a slice into another:
2390 /// let src = [1, 2, 3, 4];
2391 /// let mut dst = [0, 0];
2393 /// // Because the slices have to be the same length,
2394 /// // we slice the source slice from four elements
2395 /// // to two. It will panic if we don't do this.
2396 /// dst.clone_from_slice(&src[2..]);
2398 /// assert_eq!(src, [1, 2, 3, 4]);
2399 /// assert_eq!(dst, [3, 4]);
2402 /// Rust enforces that there can only be one mutable reference with no
2403 /// immutable references to a particular piece of data in a particular
2404 /// scope. Because of this, attempting to use `clone_from_slice` on a
2405 /// single slice will result in a compile failure:
2408 /// let mut slice = [1, 2, 3, 4, 5];
2410 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
2413 /// To work around this, we can use [`split_at_mut`] to create two distinct
2414 /// sub-slices from a slice:
2417 /// let mut slice = [1, 2, 3, 4, 5];
2420 /// let (left, right) = slice.split_at_mut(2);
2421 /// left.clone_from_slice(&right[1..]);
2424 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2427 /// [`copy_from_slice`]: #method.copy_from_slice
2428 /// [`split_at_mut`]: #method.split_at_mut
2429 #[stable(feature = "clone_from_slice", since = "1.7.0")]
2430 pub fn clone_from_slice(&mut self, src: &[T])
2434 assert!(self.len() == src.len(), "destination and source slices have different lengths");
2435 // NOTE: We need to explicitly slice them to the same length
2436 // for bounds checking to be elided, and the optimizer will
2437 // generate memcpy for simple cases (for example T = u8).
2438 let len = self.len();
2439 let src = &src[..len];
2441 self[i].clone_from(&src[i]);
2445 /// Copies all elements from `src` into `self`, using a memcpy.
2447 /// The length of `src` must be the same as `self`.
2449 /// If `T` does not implement `Copy`, use [`clone_from_slice`].
2453 /// This function will panic if the two slices have different lengths.
2457 /// Copying two elements from a slice into another:
2460 /// let src = [1, 2, 3, 4];
2461 /// let mut dst = [0, 0];
2463 /// // Because the slices have to be the same length,
2464 /// // we slice the source slice from four elements
2465 /// // to two. It will panic if we don't do this.
2466 /// dst.copy_from_slice(&src[2..]);
2468 /// assert_eq!(src, [1, 2, 3, 4]);
2469 /// assert_eq!(dst, [3, 4]);
2472 /// Rust enforces that there can only be one mutable reference with no
2473 /// immutable references to a particular piece of data in a particular
2474 /// scope. Because of this, attempting to use `copy_from_slice` on a
2475 /// single slice will result in a compile failure:
2478 /// let mut slice = [1, 2, 3, 4, 5];
2480 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
2483 /// To work around this, we can use [`split_at_mut`] to create two distinct
2484 /// sub-slices from a slice:
2487 /// let mut slice = [1, 2, 3, 4, 5];
2490 /// let (left, right) = slice.split_at_mut(2);
2491 /// left.copy_from_slice(&right[1..]);
2494 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2497 /// [`clone_from_slice`]: #method.clone_from_slice
2498 /// [`split_at_mut`]: #method.split_at_mut
2499 #[stable(feature = "copy_from_slice", since = "1.9.0")]
2500 pub fn copy_from_slice(&mut self, src: &[T])
2504 // The panic code path was put into a cold function to not bloat the
2509 fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! {
2511 "source slice length ({}) does not match destination slice length ({})",
2516 if self.len() != src.len() {
2517 len_mismatch_fail(self.len(), src.len());
2520 // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was
2521 // checked to have the same length. The slices cannot overlap because
2522 // mutable references are exclusive.
2524 ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len());
2528 /// Copies elements from one part of the slice to another part of itself,
2529 /// using a memmove.
2531 /// `src` is the range within `self` to copy from. `dest` is the starting
2532 /// index of the range within `self` to copy to, which will have the same
2533 /// length as `src`. The two ranges may overlap. The ends of the two ranges
2534 /// must be less than or equal to `self.len()`.
2538 /// This function will panic if either range exceeds the end of the slice,
2539 /// or if the end of `src` is before the start.
2543 /// Copying four bytes within a slice:
2546 /// let mut bytes = *b"Hello, World!";
2548 /// bytes.copy_within(1..5, 8);
2550 /// assert_eq!(&bytes, b"Hello, Wello!");
2552 #[stable(feature = "copy_within", since = "1.37.0")]
2554 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2558 let src_start = match src.start_bound() {
2559 ops::Bound::Included(&n) => n,
2560 ops::Bound::Excluded(&n) => {
2561 n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
2563 ops::Bound::Unbounded => 0,
2565 let src_end = match src.end_bound() {
2566 ops::Bound::Included(&n) => {
2567 n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
2569 ops::Bound::Excluded(&n) => n,
2570 ops::Bound::Unbounded => self.len(),
2572 assert!(src_start <= src_end, "src end is before src start");
2573 assert!(src_end <= self.len(), "src is out of bounds");
2574 let count = src_end - src_start;
2575 assert!(dest <= self.len() - count, "dest is out of bounds");
2576 // SAFETY: the conditions for `ptr::copy` have all been checked above,
2577 // as have those for `ptr::add`.
2579 ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count);
2583 /// Swaps all elements in `self` with those in `other`.
2585 /// The length of `other` must be the same as `self`.
2589 /// This function will panic if the two slices have different lengths.
2593 /// Swapping two elements across slices:
2596 /// let mut slice1 = [0, 0];
2597 /// let mut slice2 = [1, 2, 3, 4];
2599 /// slice1.swap_with_slice(&mut slice2[2..]);
2601 /// assert_eq!(slice1, [3, 4]);
2602 /// assert_eq!(slice2, [1, 2, 0, 0]);
2605 /// Rust enforces that there can only be one mutable reference to a
2606 /// particular piece of data in a particular scope. Because of this,
2607 /// attempting to use `swap_with_slice` on a single slice will result in
2608 /// a compile failure:
2611 /// let mut slice = [1, 2, 3, 4, 5];
2612 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2615 /// To work around this, we can use [`split_at_mut`] to create two distinct
2616 /// mutable sub-slices from a slice:
2619 /// let mut slice = [1, 2, 3, 4, 5];
2622 /// let (left, right) = slice.split_at_mut(2);
2623 /// left.swap_with_slice(&mut right[1..]);
2626 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2629 /// [`split_at_mut`]: #method.split_at_mut
2630 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2631 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2632 assert!(self.len() == other.len(), "destination and source slices have different lengths");
2633 // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was
2634 // checked to have the same length. The slices cannot overlap because
2635 // mutable references are exclusive.
2637 ptr::swap_nonoverlapping(self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2641 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2642 fn align_to_offsets<U>(&self) -> (usize, usize) {
2643 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2644 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2646 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2647 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2648 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2650 // Formula to calculate this is:
2652 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2653 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2655 // Expanded and simplified:
2657 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2658 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2660 // Luckily since all this is constant-evaluated... performance here matters not!
2662 fn gcd(a: usize, b: usize) -> usize {
2663 use crate::intrinsics;
2664 // iterative stein’s algorithm
2665 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2666 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2668 // SAFETY: `a` and `b` are checked to be non-zero values.
2669 let (ctz_a, mut ctz_b) = unsafe {
2676 (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
2678 let k = ctz_a.min(ctz_b);
2679 let mut a = a >> ctz_a;
2682 // remove all factors of 2 from b
2685 mem::swap(&mut a, &mut b);
2688 // SAFETY: `b` is checked to be non-zero.
2693 ctz_b = intrinsics::cttz_nonzero(b);
2698 let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
2699 let ts: usize = mem::size_of::<U>() / gcd;
2700 let us: usize = mem::size_of::<T>() / gcd;
2702 // Armed with this knowledge, we can find how many `U`s we can fit!
2703 let us_len = self.len() / ts * us;
2704 // And how many `T`s will be in the trailing slice!
2705 let ts_len = self.len() % ts;
2709 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2712 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2713 /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
2714 /// length possible for a given type and input slice, but only your algorithm's performance
2715 /// should depend on that, not its correctness. It is permissible for all of the input data to
2716 /// be returned as the prefix or suffix slice.
2718 /// This method has no purpose when either input element `T` or output element `U` are
2719 /// zero-sized and will return the original slice without splitting anything.
2723 /// This method is essentially a `transmute` with respect to the elements in the returned
2724 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2732 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2733 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2734 /// // less_efficient_algorithm_for_bytes(prefix);
2735 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2736 /// // less_efficient_algorithm_for_bytes(suffix);
2739 #[stable(feature = "slice_align_to", since = "1.30.0")]
2740 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2741 // Note that most of this function will be constant-evaluated,
2742 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2743 // handle ZSTs specially, which is – don't handle them at all.
2744 return (self, &[], &[]);
2747 // First, find at what point do we split between the first and 2nd slice. Easy with
2748 // ptr.align_offset.
2749 let ptr = self.as_ptr();
2750 // SAFETY: See the `align_to_mut` method for the detailed safety comment.
2751 let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::<U>()) };
2752 if offset > self.len() {
2755 let (left, rest) = self.split_at(offset);
2756 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2757 // SAFETY: now `rest` is definitely aligned, so `from_raw_parts` below is okay,
2758 // since the caller guarantees that we can transmute `T` to `U` safely.
2762 from_raw_parts(rest.as_ptr() as *const U, us_len),
2763 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len),
2769 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2772 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2773 /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
2774 /// length possible for a given type and input slice, but only your algorithm's performance
2775 /// should depend on that, not its correctness. It is permissible for all of the input data to
2776 /// be returned as the prefix or suffix slice.
2778 /// This method has no purpose when either input element `T` or output element `U` are
2779 /// zero-sized and will return the original slice without splitting anything.
2783 /// This method is essentially a `transmute` with respect to the elements in the returned
2784 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2792 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2793 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2794 /// // less_efficient_algorithm_for_bytes(prefix);
2795 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2796 /// // less_efficient_algorithm_for_bytes(suffix);
2799 #[stable(feature = "slice_align_to", since = "1.30.0")]
2800 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2801 // Note that most of this function will be constant-evaluated,
2802 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2803 // handle ZSTs specially, which is – don't handle them at all.
2804 return (self, &mut [], &mut []);
2807 // First, find at what point do we split between the first and 2nd slice. Easy with
2808 // ptr.align_offset.
2809 let ptr = self.as_ptr();
2810 // SAFETY: Here we are ensuring we will use aligned pointers for U for the
2811 // rest of the method. This is done by passing a pointer to &[T] with an
2812 // alignment targeted for U.
2813 // `crate::ptr::align_offset` is called with a correctly aligned and
2814 // valid pointer `ptr` (it comes from a reference to `self`) and with
2815 // a size that is a power of two (since it comes from the alignement for U),
2816 // satisfying its safety constraints.
2817 let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::<U>()) };
2818 if offset > self.len() {
2819 (self, &mut [], &mut [])
2821 let (left, rest) = self.split_at_mut(offset);
2822 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2823 let rest_len = rest.len();
2824 let mut_ptr = rest.as_mut_ptr();
2825 // We can't use `rest` again after this, that would invalidate its alias `mut_ptr`!
2826 // SAFETY: see comments for `align_to`.
2830 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2831 from_raw_parts_mut(mut_ptr.add(rest_len - ts_len), ts_len),
2837 /// Checks if the elements of this slice are sorted.
2839 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2840 /// slice yields exactly zero or one element, `true` is returned.
2842 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2843 /// implies that this function returns `false` if any two consecutive items are not
2849 /// #![feature(is_sorted)]
2850 /// let empty: [i32; 0] = [];
2852 /// assert!([1, 2, 2, 9].is_sorted());
2853 /// assert!(![1, 3, 2, 4].is_sorted());
2854 /// assert!([0].is_sorted());
2855 /// assert!(empty.is_sorted());
2856 /// assert!(![0.0, 1.0, f32::NAN].is_sorted());
2859 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2860 pub fn is_sorted(&self) -> bool
2864 self.is_sorted_by(|a, b| a.partial_cmp(b))
2867 /// Checks if the elements of this slice are sorted using the given comparator function.
2869 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2870 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2871 /// [`is_sorted`]; see its documentation for more information.
2873 /// [`is_sorted`]: #method.is_sorted
2874 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2875 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2877 F: FnMut(&T, &T) -> Option<Ordering>,
2879 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2882 /// Checks if the elements of this slice are sorted using the given key extraction function.
2884 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2885 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2886 /// documentation for more information.
2888 /// [`is_sorted`]: #method.is_sorted
2893 /// #![feature(is_sorted)]
2895 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2896 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2899 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2900 pub fn is_sorted_by_key<F, K>(&self, f: F) -> bool
2905 self.iter().is_sorted_by_key(f)
2908 /// Returns the index of the partition point according to the given predicate
2909 /// (the index of the first element of the second partition).
2911 /// The slice is assumed to be partitioned according to the given predicate.
2912 /// This means that all elements for which the predicate returns true are at the start of the slice
2913 /// and all elements for which the predicate returns false are at the end.
2914 /// For example, [7, 15, 3, 5, 4, 12, 6] is a partitioned under the predicate x % 2 != 0
2915 /// (all odd numbers are at the start, all even at the end).
2917 /// If this slice is not partitioned, the returned result is unspecified and meaningless,
2918 /// as this method performs a kind of binary search.
2923 /// #![feature(partition_point)]
2925 /// let v = [1, 2, 3, 3, 5, 6, 7];
2926 /// let i = v.partition_point(|&x| x < 5);
2928 /// assert_eq!(i, 4);
2929 /// assert!(v[..i].iter().all(|&x| x < 5));
2930 /// assert!(v[i..].iter().all(|&x| !(x < 5)));
2932 #[unstable(feature = "partition_point", reason = "new API", issue = "73831")]
2933 pub fn partition_point<P>(&self, mut pred: P) -> usize
2935 P: FnMut(&T) -> bool,
2938 let mut right = self.len();
2940 while left != right {
2941 let mid = left + (right - left) / 2;
2942 // SAFETY: When `left < right`, `left <= mid < right`.
2943 // Therefore `left` always increases and `right` always decreases,
2944 // and either of them is selected. In both cases `left <= right` is
2945 // satisfied. Therefore if `left < right` in a step, `left <= right`
2946 // is satisfied in the next step. Therefore as long as `left != right`,
2947 // `0 <= left < right <= len` is satisfied and if this case
2948 // `0 <= mid < len` is satisfied too.
2949 let value = unsafe { self.get_unchecked(mid) };
2961 #[lang = "slice_u8"]
2964 /// Checks if all bytes in this slice are within the ASCII range.
2965 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2967 pub fn is_ascii(&self) -> bool {
2971 /// Checks that two slices are an ASCII case-insensitive match.
2973 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2974 /// but without allocating and copying temporaries.
2975 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2977 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2978 self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a.eq_ignore_ascii_case(b))
2981 /// Converts this slice to its ASCII upper case equivalent in-place.
2983 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2984 /// but non-ASCII letters are unchanged.
2986 /// To return a new uppercased value without modifying the existing one, use
2987 /// [`to_ascii_uppercase`].
2989 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2990 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2992 pub fn make_ascii_uppercase(&mut self) {
2994 byte.make_ascii_uppercase();
2998 /// Converts this slice to its ASCII lower case equivalent in-place.
3000 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
3001 /// but non-ASCII letters are unchanged.
3003 /// To return a new lowercased value without modifying the existing one, use
3004 /// [`to_ascii_lowercase`].
3006 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
3007 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
3009 pub fn make_ascii_lowercase(&mut self) {
3011 byte.make_ascii_lowercase();
3016 /// Returns `true` if any byte in the word `v` is nonascii (>= 128). Snarfed
3017 /// from `../str/mod.rs`, which does something similar for utf8 validation.
3019 fn contains_nonascii(v: usize) -> bool {
3020 const NONASCII_MASK: usize = 0x80808080_80808080u64 as usize;
3021 (NONASCII_MASK & v) != 0
3024 /// Optimized ASCII test that will use usize-at-a-time operations instead of
3025 /// byte-at-a-time operations (when possible).
3027 /// The algorithm we use here is pretty simple. If `s` is too short, we just
3028 /// check each byte and be done with it. Otherwise:
3030 /// - Read the first word with an unaligned load.
3031 /// - Align the pointer, read subsequent words until end with aligned loads.
3032 /// - Read the last `usize` from `s` with an unaligned load.
3034 /// If any of these loads produces something for which `contains_nonascii`
3035 /// (above) returns true, then we know the answer is false.
3037 fn is_ascii(s: &[u8]) -> bool {
3038 const USIZE_SIZE: usize = mem::size_of::<usize>();
3041 let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
3043 // If we wouldn't gain anything from the word-at-a-time implementation, fall
3044 // back to a scalar loop.
3046 // We also do this for architectures where `size_of::<usize>()` isn't
3047 // sufficient alignment for `usize`, because it's a weird edge case.
3048 if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
3049 return s.iter().all(|b| b.is_ascii());
3052 // We always read the first word unaligned, which means `align_offset` is
3053 // 0, we'd read the same value again for the aligned read.
3054 let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
3056 let start = s.as_ptr();
3057 // SAFETY: We verify `len < USIZE_SIZE` above.
3058 let first_word = unsafe { (start as *const usize).read_unaligned() };
3060 if contains_nonascii(first_word) {
3063 // We checked this above, somewhat implicitly. Note that `offset_to_aligned`
3064 // is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
3066 debug_assert!(offset_to_aligned <= len);
3068 // SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
3069 // middle chunk of the slice.
3070 let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
3072 // `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
3073 let mut byte_pos = offset_to_aligned;
3075 // Paranoia check about alignment, since we're about to do a bunch of
3076 // unaligned loads. In practice this should be impossible barring a bug in
3077 // `align_offset` though.
3078 debug_assert_eq!((word_ptr as usize) % mem::align_of::<usize>(), 0);
3080 // Read subsequent words until the last aligned word, excluding the last
3081 // aligned word by itself to be done in tail check later, to ensure that
3082 // tail is always one `usize` at most to extra branch `byte_pos == len`.
3083 while byte_pos < len - USIZE_SIZE {
3085 // Sanity check that the read is in bounds
3086 (word_ptr as usize + USIZE_SIZE) <= (start.wrapping_add(len) as usize) &&
3087 // And that our assumptions about `byte_pos` hold.
3088 (word_ptr as usize) - (start as usize) == byte_pos
3091 // Safety: We know `word_ptr` is properly aligned (because of
3092 // `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
3093 let word = unsafe { word_ptr.read() };
3094 if contains_nonascii(word) {
3098 byte_pos += USIZE_SIZE;
3099 // SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
3100 // after this `add`, `word_ptr` will be at most one-past-the-end.
3101 word_ptr = unsafe { word_ptr.add(1) };
3104 // Sanity check to ensure there really is only one `usize` left. This should
3105 // be guaranteed by our loop condition.
3106 debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
3108 // SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
3109 let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
3111 !contains_nonascii(last_word)
3114 #[stable(feature = "rust1", since = "1.0.0")]
3115 impl<T, I> ops::Index<I> for [T]
3119 type Output = I::Output;
3122 fn index(&self, index: I) -> &I::Output {
3127 #[stable(feature = "rust1", since = "1.0.0")]
3128 impl<T, I> ops::IndexMut<I> for [T]
3133 fn index_mut(&mut self, index: I) -> &mut I::Output {
3134 index.index_mut(self)
3141 fn slice_start_index_len_fail(index: usize, len: usize) -> ! {
3142 panic!("range start index {} out of range for slice of length {}", index, len);
3148 fn slice_end_index_len_fail(index: usize, len: usize) -> ! {
3149 panic!("range end index {} out of range for slice of length {}", index, len);
3155 fn slice_index_order_fail(index: usize, end: usize) -> ! {
3156 panic!("slice index starts at {} but ends at {}", index, end);
3162 fn slice_index_overflow_fail() -> ! {
3163 panic!("attempted to index slice up to maximum usize");
3166 mod private_slice_index {
3168 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3171 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3172 impl Sealed for usize {}
3173 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3174 impl Sealed for ops::Range<usize> {}
3175 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3176 impl Sealed for ops::RangeTo<usize> {}
3177 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3178 impl Sealed for ops::RangeFrom<usize> {}
3179 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3180 impl Sealed for ops::RangeFull {}
3181 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3182 impl Sealed for ops::RangeInclusive<usize> {}
3183 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3184 impl Sealed for ops::RangeToInclusive<usize> {}
3187 /// A helper trait used for indexing operations.
3189 /// Implementations of this trait have to promise that if the argument
3190 /// to `get_(mut_)unchecked` is a safe reference, then so is the result.
3191 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3192 #[rustc_on_unimplemented(
3193 on(T = "str", label = "string indices are ranges of `usize`",),
3195 all(any(T = "str", T = "&str", T = "std::string::String"), _Self = "{integer}"),
3196 note = "you can use `.chars().nth()` or `.bytes().nth()`
3197 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
3199 message = "the type `{T}` cannot be indexed by `{Self}`",
3200 label = "slice indices are of type `usize` or ranges of `usize`"
3202 pub unsafe trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
3203 /// The output type returned by methods.
3204 #[stable(feature = "slice_get_slice", since = "1.28.0")]
3205 type Output: ?Sized;
3207 /// Returns a shared reference to the output at this location, if in
3209 #[unstable(feature = "slice_index_methods", issue = "none")]
3210 fn get(self, slice: &T) -> Option<&Self::Output>;
3212 /// Returns a mutable reference to the output at this location, if in
3214 #[unstable(feature = "slice_index_methods", issue = "none")]
3215 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
3217 /// Returns a shared reference to the output at this location, without
3218 /// performing any bounds checking.
3219 /// Calling this method with an out-of-bounds index or a dangling `slice` pointer
3220 /// is *[undefined behavior]* even if the resulting reference is not used.
3222 /// [undefined behavior]: ../../reference/behavior-considered-undefined.html
3223 #[unstable(feature = "slice_index_methods", issue = "none")]
3224 unsafe fn get_unchecked(self, slice: *const T) -> *const Self::Output;
3226 /// Returns a mutable reference to the output at this location, without
3227 /// performing any bounds checking.
3228 /// Calling this method with an out-of-bounds index or a dangling `slice` pointer
3229 /// is *[undefined behavior]* even if the resulting reference is not used.
3231 /// [undefined behavior]: ../../reference/behavior-considered-undefined.html
3232 #[unstable(feature = "slice_index_methods", issue = "none")]
3233 unsafe fn get_unchecked_mut(self, slice: *mut T) -> *mut Self::Output;
3235 /// Returns a shared reference to the output at this location, panicking
3236 /// if out of bounds.
3237 #[unstable(feature = "slice_index_methods", issue = "none")]
3239 fn index(self, slice: &T) -> &Self::Output;
3241 /// Returns a mutable reference to the output at this location, panicking
3242 /// if out of bounds.
3243 #[unstable(feature = "slice_index_methods", issue = "none")]
3245 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
3248 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3249 unsafe impl<T> SliceIndex<[T]> for usize {
3253 fn get(self, slice: &[T]) -> Option<&T> {
3254 // SAFETY: `self` is checked to be in bounds.
3255 if self < slice.len() { unsafe { Some(&*self.get_unchecked(slice)) } } else { None }
3259 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
3260 // SAFETY: `self` is checked to be in bounds.
3261 if self < slice.len() { unsafe { Some(&mut *self.get_unchecked_mut(slice)) } } else { None }
3265 unsafe fn get_unchecked(self, slice: *const [T]) -> *const T {
3266 // SAFETY: the caller guarantees that `slice` is not dangling, so it
3267 // cannot be longer than `isize::MAX`. They also guarantee that
3268 // `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
3269 // so the call to `add` is safe.
3270 unsafe { slice.as_ptr().add(self) }
3274 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
3275 // SAFETY: see comments for `get_unchecked` above.
3276 unsafe { slice.as_mut_ptr().add(self) }
3280 fn index(self, slice: &[T]) -> &T {
3281 // N.B., use intrinsic indexing
3286 fn index_mut(self, slice: &mut [T]) -> &mut T {
3287 // N.B., use intrinsic indexing
3292 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3293 unsafe impl<T> SliceIndex<[T]> for ops::Range<usize> {
3297 fn get(self, slice: &[T]) -> Option<&[T]> {
3298 if self.start > self.end || self.end > slice.len() {
3301 // SAFETY: `self` is checked to be valid and in bounds above.
3302 unsafe { Some(&*self.get_unchecked(slice)) }
3307 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3308 if self.start > self.end || self.end > slice.len() {
3311 // SAFETY: `self` is checked to be valid and in bounds above.
3312 unsafe { Some(&mut *self.get_unchecked_mut(slice)) }
3317 unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
3318 // SAFETY: the caller guarantees that `slice` is not dangling, so it
3319 // cannot be longer than `isize::MAX`. They also guarantee that
3320 // `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
3321 // so the call to `add` is safe.
3322 unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) }
3326 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
3327 // SAFETY: see comments for `get_unchecked` above.
3329 ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
3334 fn index(self, slice: &[T]) -> &[T] {
3335 if self.start > self.end {
3336 slice_index_order_fail(self.start, self.end);
3337 } else if self.end > slice.len() {
3338 slice_end_index_len_fail(self.end, slice.len());
3340 // SAFETY: `self` is checked to be valid and in bounds above.
3341 unsafe { &*self.get_unchecked(slice) }
3345 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3346 if self.start > self.end {
3347 slice_index_order_fail(self.start, self.end);
3348 } else if self.end > slice.len() {
3349 slice_end_index_len_fail(self.end, slice.len());
3351 // SAFETY: `self` is checked to be valid and in bounds above.
3352 unsafe { &mut *self.get_unchecked_mut(slice) }
3356 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3357 unsafe impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
3361 fn get(self, slice: &[T]) -> Option<&[T]> {
3362 (0..self.end).get(slice)
3366 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3367 (0..self.end).get_mut(slice)
3371 unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
3372 // SAFETY: the caller has to uphold the safety contract for `get_unchecked`.
3373 unsafe { (0..self.end).get_unchecked(slice) }
3377 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
3378 // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`.
3379 unsafe { (0..self.end).get_unchecked_mut(slice) }
3383 fn index(self, slice: &[T]) -> &[T] {
3384 (0..self.end).index(slice)
3388 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3389 (0..self.end).index_mut(slice)
3393 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3394 unsafe impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
3398 fn get(self, slice: &[T]) -> Option<&[T]> {
3399 (self.start..slice.len()).get(slice)
3403 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3404 (self.start..slice.len()).get_mut(slice)
3408 unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
3409 // SAFETY: the caller has to uphold the safety contract for `get_unchecked`.
3410 unsafe { (self.start..slice.len()).get_unchecked(slice) }
3414 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
3415 // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`.
3416 unsafe { (self.start..slice.len()).get_unchecked_mut(slice) }
3420 fn index(self, slice: &[T]) -> &[T] {
3421 if self.start > slice.len() {
3422 slice_start_index_len_fail(self.start, slice.len());
3424 // SAFETY: `self` is checked to be valid and in bounds above.
3425 unsafe { &*self.get_unchecked(slice) }
3429 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3430 if self.start > slice.len() {
3431 slice_start_index_len_fail(self.start, slice.len());
3433 // SAFETY: `self` is checked to be valid and in bounds above.
3434 unsafe { &mut *self.get_unchecked_mut(slice) }
3438 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
3439 unsafe impl<T> SliceIndex<[T]> for ops::RangeFull {
3443 fn get(self, slice: &[T]) -> Option<&[T]> {
3448 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3453 unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
3458 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
3463 fn index(self, slice: &[T]) -> &[T] {
3468 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3473 #[stable(feature = "inclusive_range", since = "1.26.0")]
3474 unsafe impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
3478 fn get(self, slice: &[T]) -> Option<&[T]> {
3479 if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) }
3483 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3484 if *self.end() == usize::MAX {
3487 (*self.start()..self.end() + 1).get_mut(slice)
3492 unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
3493 // SAFETY: the caller has to uphold the safety contract for `get_unchecked`.
3494 unsafe { (*self.start()..self.end() + 1).get_unchecked(slice) }
3498 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
3499 // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`.
3500 unsafe { (*self.start()..self.end() + 1).get_unchecked_mut(slice) }
3504 fn index(self, slice: &[T]) -> &[T] {
3505 if *self.end() == usize::MAX {
3506 slice_index_overflow_fail();
3508 (*self.start()..self.end() + 1).index(slice)
3512 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3513 if *self.end() == usize::MAX {
3514 slice_index_overflow_fail();
3516 (*self.start()..self.end() + 1).index_mut(slice)
3520 #[stable(feature = "inclusive_range", since = "1.26.0")]
3521 unsafe impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
3525 fn get(self, slice: &[T]) -> Option<&[T]> {
3526 (0..=self.end).get(slice)
3530 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3531 (0..=self.end).get_mut(slice)
3535 unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
3536 // SAFETY: the caller has to uphold the safety contract for `get_unchecked`.
3537 unsafe { (0..=self.end).get_unchecked(slice) }
3541 unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
3542 // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`.
3543 unsafe { (0..=self.end).get_unchecked_mut(slice) }
3547 fn index(self, slice: &[T]) -> &[T] {
3548 (0..=self.end).index(slice)
3552 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3553 (0..=self.end).index_mut(slice)
3557 ////////////////////////////////////////////////////////////////////////////////
3559 ////////////////////////////////////////////////////////////////////////////////
3561 #[stable(feature = "rust1", since = "1.0.0")]
3562 impl<T> Default for &[T] {
3563 /// Creates an empty slice.
3564 fn default() -> Self {
3569 #[stable(feature = "mut_slice_default", since = "1.5.0")]
3570 impl<T> Default for &mut [T] {
3571 /// Creates a mutable empty slice.
3572 fn default() -> Self {
3581 #[stable(feature = "rust1", since = "1.0.0")]
3582 impl<'a, T> IntoIterator for &'a [T] {
3584 type IntoIter = Iter<'a, T>;
3586 fn into_iter(self) -> Iter<'a, T> {
3591 #[stable(feature = "rust1", since = "1.0.0")]
3592 impl<'a, T> IntoIterator for &'a mut [T] {
3593 type Item = &'a mut T;
3594 type IntoIter = IterMut<'a, T>;
3596 fn into_iter(self) -> IterMut<'a, T> {
3601 // Macro helper functions
3603 fn size_from_ptr<T>(_: *const T) -> usize {
3607 // Inlining is_empty and len makes a huge performance difference
3608 macro_rules! is_empty {
3609 // The way we encode the length of a ZST iterator, this works both for ZST
3612 $self.ptr.as_ptr() as *const T == $self.end
3616 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
3617 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
3619 ($self: ident) => {{
3620 #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
3622 let start = $self.ptr;
3623 let size = size_from_ptr(start.as_ptr());
3625 // This _cannot_ use `unchecked_sub` because we depend on wrapping
3626 // to represent the length of long ZST slice iterators.
3627 ($self.end as usize).wrapping_sub(start.as_ptr() as usize)
3629 // We know that `start <= end`, so can do better than `offset_from`,
3630 // which needs to deal in signed. By setting appropriate flags here
3631 // we can tell LLVM this, which helps it remove bounds checks.
3632 // SAFETY: By the type invariant, `start <= end`
3633 let diff = unsafe { unchecked_sub($self.end as usize, start.as_ptr() as usize) };
3634 // By also telling LLVM that the pointers are apart by an exact
3635 // multiple of the type size, it can optimize `len() == 0` down to
3636 // `start == end` instead of `(end - start) < size`.
3637 // SAFETY: By the type invariant, the pointers are aligned so the
3638 // distance between them must be a multiple of pointee size
3639 unsafe { exact_div(diff, size) }
3644 // The shared definition of the `Iter` and `IterMut` iterators
3645 macro_rules! iterator {
3647 struct $name:ident -> $ptr:ty,
3653 // Returns the first element and moves the start of the iterator forwards by 1.
3654 // Greatly improves performance compared to an inlined function. The iterator
3655 // must not be empty.
3656 macro_rules! next_unchecked {
3657 ($self: ident) => {& $( $mut_ )* *$self.post_inc_start(1)}
3660 // Returns the last element and moves the end of the iterator backwards by 1.
3661 // Greatly improves performance compared to an inlined function. The iterator
3662 // must not be empty.
3663 macro_rules! next_back_unchecked {
3664 ($self: ident) => {& $( $mut_ )* *$self.pre_dec_end(1)}
3667 // Shrinks the iterator when T is a ZST, by moving the end of the iterator
3668 // backwards by `n`. `n` must not exceed `self.len()`.
3669 macro_rules! zst_shrink {
3670 ($self: ident, $n: ident) => {
3671 $self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T;
3675 impl<'a, T> $name<'a, T> {
3676 // Helper function for creating a slice from the iterator.
3678 fn make_slice(&self) -> &'a [T] {
3679 // SAFETY: the iterator was created from a slice with pointer
3680 // `self.ptr` and length `len!(self)`. This guarantees that all
3681 // the prerequisites for `from_raw_parts` are fulfilled.
3682 unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) }
3685 // Helper function for moving the start of the iterator forwards by `offset` elements,
3686 // returning the old start.
3687 // Unsafe because the offset must not exceed `self.len()`.
3689 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
3690 if mem::size_of::<T>() == 0 {
3691 zst_shrink!(self, offset);
3694 let old = self.ptr.as_ptr();
3695 // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`,
3696 // so this new pointer is inside `self` and thus guaranteed to be non-null.
3697 self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) };
3702 // Helper function for moving the end of the iterator backwards by `offset` elements,
3703 // returning the new end.
3704 // Unsafe because the offset must not exceed `self.len()`.
3706 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
3707 if mem::size_of::<T>() == 0 {
3708 zst_shrink!(self, offset);
3711 // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`,
3712 // which is guaranteed to not overflow an `isize`. Also, the resulting pointer
3713 // is in bounds of `slice`, which fulfills the other requirements for `offset`.
3714 self.end = unsafe { self.end.offset(-offset) };
3720 #[stable(feature = "rust1", since = "1.0.0")]
3721 impl<T> ExactSizeIterator for $name<'_, T> {
3723 fn len(&self) -> usize {
3728 fn is_empty(&self) -> bool {
3733 #[stable(feature = "rust1", since = "1.0.0")]
3734 impl<'a, T> Iterator for $name<'a, T> {
3738 fn next(&mut self) -> Option<$elem> {
3739 // could be implemented with slices, but this avoids bounds checks
3741 // SAFETY: `assume` calls are safe since a slice's start pointer
3742 // must be non-null, and slices over non-ZSTs must also have a
3743 // non-null end pointer. The call to `next_unchecked!` is safe
3744 // since we check if the iterator is empty first.
3746 assume(!self.ptr.as_ptr().is_null());
3747 if mem::size_of::<T>() != 0 {
3748 assume(!self.end.is_null());
3750 if is_empty!(self) {
3753 Some(next_unchecked!(self))
3759 fn size_hint(&self) -> (usize, Option<usize>) {
3760 let exact = len!(self);
3761 (exact, Some(exact))
3765 fn count(self) -> usize {
3770 fn nth(&mut self, n: usize) -> Option<$elem> {
3771 if n >= len!(self) {
3772 // This iterator is now empty.
3773 if mem::size_of::<T>() == 0 {
3774 // We have to do it this way as `ptr` may never be 0, but `end`
3775 // could be (due to wrapping).
3776 self.end = self.ptr.as_ptr();
3778 // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
3780 self.ptr = NonNull::new_unchecked(self.end as *mut T);
3785 // SAFETY: We are in bounds. `post_inc_start` does the right thing even for ZSTs.
3787 self.post_inc_start(n as isize);
3788 Some(next_unchecked!(self))
3793 fn last(mut self) -> Option<$elem> {
3797 // We override the default implementation, which uses `try_fold`,
3798 // because this simple implementation generates less LLVM IR and is
3799 // faster to compile.
3801 fn for_each<F>(mut self, mut f: F)
3804 F: FnMut(Self::Item),
3806 while let Some(x) = self.next() {
3811 // We override the default implementation, which uses `try_fold`,
3812 // because this simple implementation generates less LLVM IR and is
3813 // faster to compile.
3815 fn all<F>(&mut self, mut f: F) -> bool
3818 F: FnMut(Self::Item) -> bool,
3820 while let Some(x) = self.next() {
3828 // We override the default implementation, which uses `try_fold`,
3829 // because this simple implementation generates less LLVM IR and is
3830 // faster to compile.
3832 fn any<F>(&mut self, mut f: F) -> bool
3835 F: FnMut(Self::Item) -> bool,
3837 while let Some(x) = self.next() {
3845 // We override the default implementation, which uses `try_fold`,
3846 // because this simple implementation generates less LLVM IR and is
3847 // faster to compile.
3849 fn find<P>(&mut self, mut predicate: P) -> Option<Self::Item>
3852 P: FnMut(&Self::Item) -> bool,
3854 while let Some(x) = self.next() {
3862 // We override the default implementation, which uses `try_fold`,
3863 // because this simple implementation generates less LLVM IR and is
3864 // faster to compile.
3866 fn find_map<B, F>(&mut self, mut f: F) -> Option<B>
3869 F: FnMut(Self::Item) -> Option<B>,
3871 while let Some(x) = self.next() {
3872 if let Some(y) = f(x) {
3879 // We override the default implementation, which uses `try_fold`,
3880 // because this simple implementation generates less LLVM IR and is
3881 // faster to compile. Also, the `assume` avoids a bounds check.
3883 #[rustc_inherit_overflow_checks]
3884 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3886 P: FnMut(Self::Item) -> bool,
3890 while let Some(x) = self.next() {
3892 // SAFETY: we are guaranteed to be in bounds by the loop invariant:
3893 // when `i >= n`, `self.next()` returns `None` and the loop breaks.
3894 unsafe { assume(i < n) };
3902 // We override the default implementation, which uses `try_fold`,
3903 // because this simple implementation generates less LLVM IR and is
3904 // faster to compile. Also, the `assume` avoids a bounds check.
3906 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3907 P: FnMut(Self::Item) -> bool,
3908 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3912 while let Some(x) = self.next_back() {
3915 // SAFETY: `i` must be lower than `n` since it starts at `n`
3916 // and is only decreasing.
3917 unsafe { assume(i < n) };
3927 #[stable(feature = "rust1", since = "1.0.0")]
3928 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3930 fn next_back(&mut self) -> Option<$elem> {
3931 // could be implemented with slices, but this avoids bounds checks
3933 // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null,
3934 // and slices over non-ZSTs must also have a non-null end pointer.
3935 // The call to `next_back_unchecked!` is safe since we check if the iterator is
3938 assume(!self.ptr.as_ptr().is_null());
3939 if mem::size_of::<T>() != 0 {
3940 assume(!self.end.is_null());
3942 if is_empty!(self) {
3945 Some(next_back_unchecked!(self))
3951 fn nth_back(&mut self, n: usize) -> Option<$elem> {
3952 if n >= len!(self) {
3953 // This iterator is now empty.
3954 self.end = self.ptr.as_ptr();
3957 // SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
3959 self.pre_dec_end(n as isize);
3960 Some(next_back_unchecked!(self))
3965 #[stable(feature = "fused", since = "1.26.0")]
3966 impl<T> FusedIterator for $name<'_, T> {}
3968 #[unstable(feature = "trusted_len", issue = "37572")]
3969 unsafe impl<T> TrustedLen for $name<'_, T> {}
3973 /// Immutable slice iterator
3975 /// This struct is created by the [`iter`] method on [slices].
3982 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3983 /// let slice = &[1, 2, 3];
3985 /// // Then, we iterate over it:
3986 /// for element in slice.iter() {
3987 /// println!("{}", element);
3991 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3992 /// [slices]: ../../std/primitive.slice.html
3993 #[stable(feature = "rust1", since = "1.0.0")]
3994 pub struct Iter<'a, T: 'a> {
3996 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3997 // ptr == end is a quick test for the Iterator being empty, that works
3998 // for both ZST and non-ZST.
3999 _marker: marker::PhantomData<&'a T>,
4002 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4003 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
4004 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4005 f.debug_tuple("Iter").field(&self.as_slice()).finish()
4009 #[stable(feature = "rust1", since = "1.0.0")]
4010 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
4011 #[stable(feature = "rust1", since = "1.0.0")]
4012 unsafe impl<T: Sync> Send for Iter<'_, T> {}
4014 impl<'a, T> Iter<'a, T> {
4015 /// Views the underlying data as a subslice of the original data.
4017 /// This has the same lifetime as the original slice, and so the
4018 /// iterator can continue to be used while this exists.
4025 /// // First, we declare a type which has the `iter` method to get the `Iter`
4026 /// // struct (&[usize here]):
4027 /// let slice = &[1, 2, 3];
4029 /// // Then, we get the iterator:
4030 /// let mut iter = slice.iter();
4031 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
4032 /// println!("{:?}", iter.as_slice());
4034 /// // Next, we move to the second element of the slice:
4036 /// // Now `as_slice` returns "[2, 3]":
4037 /// println!("{:?}", iter.as_slice());
4039 #[stable(feature = "iter_to_slice", since = "1.4.0")]
4040 pub fn as_slice(&self) -> &'a [T] {
4045 iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
4046 fn is_sorted_by<F>(self, mut compare: F) -> bool
4049 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
4051 self.as_slice().windows(2).all(|w| {
4052 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
4057 #[stable(feature = "rust1", since = "1.0.0")]
4058 impl<T> Clone for Iter<'_, T> {
4059 fn clone(&self) -> Self {
4060 Iter { ptr: self.ptr, end: self.end, _marker: self._marker }
4064 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
4065 impl<T> AsRef<[T]> for Iter<'_, T> {
4066 fn as_ref(&self) -> &[T] {
4071 /// Mutable slice iterator.
4073 /// This struct is created by the [`iter_mut`] method on [slices].
4080 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
4081 /// // struct (&[usize here]):
4082 /// let mut slice = &mut [1, 2, 3];
4084 /// // Then, we iterate over it and increment each element value:
4085 /// for element in slice.iter_mut() {
4089 /// // We now have "[2, 3, 4]":
4090 /// println!("{:?}", slice);
4093 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
4094 /// [slices]: ../../std/primitive.slice.html
4095 #[stable(feature = "rust1", since = "1.0.0")]
4096 pub struct IterMut<'a, T: 'a> {
4098 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
4099 // ptr == end is a quick test for the Iterator being empty, that works
4100 // for both ZST and non-ZST.
4101 _marker: marker::PhantomData<&'a mut T>,
4104 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4105 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
4106 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4107 f.debug_tuple("IterMut").field(&self.make_slice()).finish()
4111 #[stable(feature = "rust1", since = "1.0.0")]
4112 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
4113 #[stable(feature = "rust1", since = "1.0.0")]
4114 unsafe impl<T: Send> Send for IterMut<'_, T> {}
4116 impl<'a, T> IterMut<'a, T> {
4117 /// Views the underlying data as a subslice of the original data.
4119 /// To avoid creating `&mut` references that alias, this is forced
4120 /// to consume the iterator.
4127 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
4128 /// // struct (&[usize here]):
4129 /// let mut slice = &mut [1, 2, 3];
4132 /// // Then, we get the iterator:
4133 /// let mut iter = slice.iter_mut();
4134 /// // We move to next element:
4136 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
4137 /// println!("{:?}", iter.into_slice());
4140 /// // Now let's modify a value of the slice:
4142 /// // First we get back the iterator:
4143 /// let mut iter = slice.iter_mut();
4144 /// // We change the value of the first element of the slice returned by the `next` method:
4145 /// *iter.next().unwrap() += 1;
4147 /// // Now slice is "[2, 2, 3]":
4148 /// println!("{:?}", slice);
4150 #[stable(feature = "iter_to_slice", since = "1.4.0")]
4151 pub fn into_slice(self) -> &'a mut [T] {
4152 // SAFETY: the iterator was created from a mutable slice with pointer
4153 // `self.ptr` and length `len!(self)`. This guarantees that all the prerequisites
4154 // for `from_raw_parts_mut` are fulfilled.
4155 unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) }
4158 /// Views the underlying data as a subslice of the original data.
4160 /// To avoid creating `&mut [T]` references that alias, the returned slice
4161 /// borrows its lifetime from the iterator the method is applied on.
4168 /// # #![feature(slice_iter_mut_as_slice)]
4169 /// let mut slice: &mut [usize] = &mut [1, 2, 3];
4171 /// // First, we get the iterator:
4172 /// let mut iter = slice.iter_mut();
4173 /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
4174 /// assert_eq!(iter.as_slice(), &[1, 2, 3]);
4176 /// // Next, we move to the second element of the slice:
4178 /// // Now `as_slice` returns "[2, 3]":
4179 /// assert_eq!(iter.as_slice(), &[2, 3]);
4181 #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
4182 pub fn as_slice(&self) -> &[T] {
4187 iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
4189 /// An internal abstraction over the splitting iterators, so that
4190 /// splitn, splitn_mut etc can be implemented once.
4192 trait SplitIter: DoubleEndedIterator {
4193 /// Marks the underlying iterator as complete, extracting the remaining
4194 /// portion of the slice.
4195 fn finish(&mut self) -> Option<Self::Item>;
4198 /// An iterator over subslices separated by elements that match a predicate
4201 /// This struct is created by the [`split`] method on [slices].
4203 /// [`split`]: ../../std/primitive.slice.html#method.split
4204 /// [slices]: ../../std/primitive.slice.html
4205 #[stable(feature = "rust1", since = "1.0.0")]
4206 pub struct Split<'a, T: 'a, P>
4208 P: FnMut(&T) -> bool,
4215 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4216 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P>
4218 P: FnMut(&T) -> bool,
4220 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4221 f.debug_struct("Split").field("v", &self.v).field("finished", &self.finished).finish()
4225 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4226 #[stable(feature = "rust1", since = "1.0.0")]
4227 impl<T, P> Clone for Split<'_, T, P>
4229 P: Clone + FnMut(&T) -> bool,
4231 fn clone(&self) -> Self {
4232 Split { v: self.v, pred: self.pred.clone(), finished: self.finished }
4236 #[stable(feature = "rust1", since = "1.0.0")]
4237 impl<'a, T, P> Iterator for Split<'a, T, P>
4239 P: FnMut(&T) -> bool,
4241 type Item = &'a [T];
4244 fn next(&mut self) -> Option<&'a [T]> {
4249 match self.v.iter().position(|x| (self.pred)(x)) {
4250 None => self.finish(),
4252 let ret = Some(&self.v[..idx]);
4253 self.v = &self.v[idx + 1..];
4260 fn size_hint(&self) -> (usize, Option<usize>) {
4261 if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
4265 #[stable(feature = "rust1", since = "1.0.0")]
4266 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P>
4268 P: FnMut(&T) -> bool,
4271 fn next_back(&mut self) -> Option<&'a [T]> {
4276 match self.v.iter().rposition(|x| (self.pred)(x)) {
4277 None => self.finish(),
4279 let ret = Some(&self.v[idx + 1..]);
4280 self.v = &self.v[..idx];
4287 impl<'a, T, P> SplitIter for Split<'a, T, P>
4289 P: FnMut(&T) -> bool,
4292 fn finish(&mut self) -> Option<&'a [T]> {
4296 self.finished = true;
4302 #[stable(feature = "fused", since = "1.26.0")]
4303 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
4305 /// An iterator over subslices separated by elements that match a predicate
4306 /// function. Unlike `Split`, it contains the matched part as a terminator
4307 /// of the subslice.
4309 /// This struct is created by the [`split_inclusive`] method on [slices].
4311 /// [`split_inclusive`]: ../../std/primitive.slice.html#method.split_inclusive
4312 /// [slices]: ../../std/primitive.slice.html
4313 #[unstable(feature = "split_inclusive", issue = "72360")]
4314 pub struct SplitInclusive<'a, T: 'a, P>
4316 P: FnMut(&T) -> bool,
4323 #[unstable(feature = "split_inclusive", issue = "72360")]
4324 impl<T: fmt::Debug, P> fmt::Debug for SplitInclusive<'_, T, P>
4326 P: FnMut(&T) -> bool,
4328 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4329 f.debug_struct("SplitInclusive")
4330 .field("v", &self.v)
4331 .field("finished", &self.finished)
4336 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4337 #[unstable(feature = "split_inclusive", issue = "72360")]
4338 impl<T, P> Clone for SplitInclusive<'_, T, P>
4340 P: Clone + FnMut(&T) -> bool,
4342 fn clone(&self) -> Self {
4343 SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished }
4347 #[unstable(feature = "split_inclusive", issue = "72360")]
4348 impl<'a, T, P> Iterator for SplitInclusive<'a, T, P>
4350 P: FnMut(&T) -> bool,
4352 type Item = &'a [T];
4355 fn next(&mut self) -> Option<&'a [T]> {
4361 self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len());
4362 if idx == self.v.len() {
4363 self.finished = true;
4365 let ret = Some(&self.v[..idx]);
4366 self.v = &self.v[idx..];
4371 fn size_hint(&self) -> (usize, Option<usize>) {
4372 if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
4376 #[unstable(feature = "split_inclusive", issue = "72360")]
4377 impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P>
4379 P: FnMut(&T) -> bool,
4382 fn next_back(&mut self) -> Option<&'a [T]> {
4387 // The last index of self.v is already checked and found to match
4388 // by the last iteration, so we start searching a new match
4389 // one index to the left.
4390 let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] };
4391 let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
4393 self.finished = true;
4395 let ret = Some(&self.v[idx..]);
4396 self.v = &self.v[..idx];
4401 #[unstable(feature = "split_inclusive", issue = "72360")]
4402 impl<T, P> FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {}
4404 /// An iterator over the mutable subslices of the vector which are separated
4405 /// by elements that match `pred`.
4407 /// This struct is created by the [`split_mut`] method on [slices].
4409 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
4410 /// [slices]: ../../std/primitive.slice.html
4411 #[stable(feature = "rust1", since = "1.0.0")]
4412 pub struct SplitMut<'a, T: 'a, P>
4414 P: FnMut(&T) -> bool,
4421 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4422 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P>
4424 P: FnMut(&T) -> bool,
4426 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4427 f.debug_struct("SplitMut").field("v", &self.v).field("finished", &self.finished).finish()
4431 impl<'a, T, P> SplitIter for SplitMut<'a, T, P>
4433 P: FnMut(&T) -> bool,
4436 fn finish(&mut self) -> Option<&'a mut [T]> {
4440 self.finished = true;
4441 Some(mem::replace(&mut self.v, &mut []))
4446 #[stable(feature = "rust1", since = "1.0.0")]
4447 impl<'a, T, P> Iterator for SplitMut<'a, T, P>
4449 P: FnMut(&T) -> bool,
4451 type Item = &'a mut [T];
4454 fn next(&mut self) -> Option<&'a mut [T]> {
4460 // work around borrowck limitations
4461 let pred = &mut self.pred;
4462 self.v.iter().position(|x| (*pred)(x))
4465 None => self.finish(),
4467 let tmp = mem::replace(&mut self.v, &mut []);
4468 let (head, tail) = tmp.split_at_mut(idx);
4469 self.v = &mut tail[1..];
4476 fn size_hint(&self) -> (usize, Option<usize>) {
4480 // if the predicate doesn't match anything, we yield one slice
4481 // if it matches every element, we yield len+1 empty slices.
4482 (1, Some(self.v.len() + 1))
4487 #[stable(feature = "rust1", since = "1.0.0")]
4488 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P>
4490 P: FnMut(&T) -> bool,
4493 fn next_back(&mut self) -> Option<&'a mut [T]> {
4499 // work around borrowck limitations
4500 let pred = &mut self.pred;
4501 self.v.iter().rposition(|x| (*pred)(x))
4504 None => self.finish(),
4506 let tmp = mem::replace(&mut self.v, &mut []);
4507 let (head, tail) = tmp.split_at_mut(idx);
4509 Some(&mut tail[1..])
4515 #[stable(feature = "fused", since = "1.26.0")]
4516 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
4518 /// An iterator over the mutable subslices of the vector which are separated
4519 /// by elements that match `pred`. Unlike `SplitMut`, it contains the matched
4520 /// parts in the ends of the subslices.
4522 /// This struct is created by the [`split_inclusive_mut`] method on [slices].
4524 /// [`split_inclusive_mut`]: ../../std/primitive.slice.html#method.split_inclusive_mut
4525 /// [slices]: ../../std/primitive.slice.html
4526 #[unstable(feature = "split_inclusive", issue = "72360")]
4527 pub struct SplitInclusiveMut<'a, T: 'a, P>
4529 P: FnMut(&T) -> bool,
4536 #[unstable(feature = "split_inclusive", issue = "72360")]
4537 impl<T: fmt::Debug, P> fmt::Debug for SplitInclusiveMut<'_, T, P>
4539 P: FnMut(&T) -> bool,
4541 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4542 f.debug_struct("SplitInclusiveMut")
4543 .field("v", &self.v)
4544 .field("finished", &self.finished)
4549 #[unstable(feature = "split_inclusive", issue = "72360")]
4550 impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P>
4552 P: FnMut(&T) -> bool,
4554 type Item = &'a mut [T];
4557 fn next(&mut self) -> Option<&'a mut [T]> {
4563 // work around borrowck limitations
4564 let pred = &mut self.pred;
4565 self.v.iter().position(|x| (*pred)(x))
4567 let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len());
4568 if idx == self.v.len() {
4569 self.finished = true;
4571 let tmp = mem::replace(&mut self.v, &mut []);
4572 let (head, tail) = tmp.split_at_mut(idx);
4578 fn size_hint(&self) -> (usize, Option<usize>) {
4582 // if the predicate doesn't match anything, we yield one slice
4583 // if it matches every element, we yield len+1 empty slices.
4584 (1, Some(self.v.len() + 1))
4589 #[unstable(feature = "split_inclusive", issue = "72360")]
4590 impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P>
4592 P: FnMut(&T) -> bool,
4595 fn next_back(&mut self) -> Option<&'a mut [T]> {
4600 let idx_opt = if self.v.is_empty() {
4603 // work around borrowck limitations
4604 let pred = &mut self.pred;
4606 // The last index of self.v is already checked and found to match
4607 // by the last iteration, so we start searching a new match
4608 // one index to the left.
4609 let remainder = &self.v[..(self.v.len() - 1)];
4610 remainder.iter().rposition(|x| (*pred)(x))
4612 let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0);
4614 self.finished = true;
4616 let tmp = mem::replace(&mut self.v, &mut []);
4617 let (head, tail) = tmp.split_at_mut(idx);
4623 #[unstable(feature = "split_inclusive", issue = "72360")]
4624 impl<T, P> FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {}
4626 /// An iterator over subslices separated by elements that match a predicate
4627 /// function, starting from the end of the slice.
4629 /// This struct is created by the [`rsplit`] method on [slices].
4631 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
4632 /// [slices]: ../../std/primitive.slice.html
4633 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4634 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
4635 pub struct RSplit<'a, T: 'a, P>
4637 P: FnMut(&T) -> bool,
4639 inner: Split<'a, T, P>,
4642 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4643 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P>
4645 P: FnMut(&T) -> bool,
4647 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4648 f.debug_struct("RSplit")
4649 .field("v", &self.inner.v)
4650 .field("finished", &self.inner.finished)
4655 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4656 impl<'a, T, P> Iterator for RSplit<'a, T, P>
4658 P: FnMut(&T) -> bool,
4660 type Item = &'a [T];
4663 fn next(&mut self) -> Option<&'a [T]> {
4664 self.inner.next_back()
4668 fn size_hint(&self) -> (usize, Option<usize>) {
4669 self.inner.size_hint()
4673 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4674 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P>
4676 P: FnMut(&T) -> bool,
4679 fn next_back(&mut self) -> Option<&'a [T]> {
4684 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4685 impl<'a, T, P> SplitIter for RSplit<'a, T, P>
4687 P: FnMut(&T) -> bool,
4690 fn finish(&mut self) -> Option<&'a [T]> {
4695 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4696 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
4698 /// An iterator over the subslices of the vector which are separated
4699 /// by elements that match `pred`, starting from the end of the slice.
4701 /// This struct is created by the [`rsplit_mut`] method on [slices].
4703 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
4704 /// [slices]: ../../std/primitive.slice.html
4705 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4706 pub struct RSplitMut<'a, T: 'a, P>
4708 P: FnMut(&T) -> bool,
4710 inner: SplitMut<'a, T, P>,
4713 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4714 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P>
4716 P: FnMut(&T) -> bool,
4718 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4719 f.debug_struct("RSplitMut")
4720 .field("v", &self.inner.v)
4721 .field("finished", &self.inner.finished)
4726 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4727 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P>
4729 P: FnMut(&T) -> bool,
4732 fn finish(&mut self) -> Option<&'a mut [T]> {
4737 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4738 impl<'a, T, P> Iterator for RSplitMut<'a, T, P>
4740 P: FnMut(&T) -> bool,
4742 type Item = &'a mut [T];
4745 fn next(&mut self) -> Option<&'a mut [T]> {
4746 self.inner.next_back()
4750 fn size_hint(&self) -> (usize, Option<usize>) {
4751 self.inner.size_hint()
4755 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4756 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P>
4758 P: FnMut(&T) -> bool,
4761 fn next_back(&mut self) -> Option<&'a mut [T]> {
4766 #[stable(feature = "slice_rsplit", since = "1.27.0")]
4767 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
4769 /// An private iterator over subslices separated by elements that
4770 /// match a predicate function, splitting at most a fixed number of
4773 struct GenericSplitN<I> {
4778 impl<T, I: SplitIter<Item = T>> Iterator for GenericSplitN<I> {
4782 fn next(&mut self) -> Option<T> {
4797 fn size_hint(&self) -> (usize, Option<usize>) {
4798 let (lower, upper_opt) = self.iter.size_hint();
4799 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
4803 /// An iterator over subslices separated by elements that match a predicate
4804 /// function, limited to a given number of splits.
4806 /// This struct is created by the [`splitn`] method on [slices].
4808 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
4809 /// [slices]: ../../std/primitive.slice.html
4810 #[stable(feature = "rust1", since = "1.0.0")]
4811 pub struct SplitN<'a, T: 'a, P>
4813 P: FnMut(&T) -> bool,
4815 inner: GenericSplitN<Split<'a, T, P>>,
4818 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4819 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P>
4821 P: FnMut(&T) -> bool,
4823 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4824 f.debug_struct("SplitN").field("inner", &self.inner).finish()
4828 /// An iterator over subslices separated by elements that match a
4829 /// predicate function, limited to a given number of splits, starting
4830 /// from the end of the slice.
4832 /// This struct is created by the [`rsplitn`] method on [slices].
4834 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
4835 /// [slices]: ../../std/primitive.slice.html
4836 #[stable(feature = "rust1", since = "1.0.0")]
4837 pub struct RSplitN<'a, T: 'a, P>
4839 P: FnMut(&T) -> bool,
4841 inner: GenericSplitN<RSplit<'a, T, P>>,
4844 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4845 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P>
4847 P: FnMut(&T) -> bool,
4849 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4850 f.debug_struct("RSplitN").field("inner", &self.inner).finish()
4854 /// An iterator over subslices separated by elements that match a predicate
4855 /// function, limited to a given number of splits.
4857 /// This struct is created by the [`splitn_mut`] method on [slices].
4859 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
4860 /// [slices]: ../../std/primitive.slice.html
4861 #[stable(feature = "rust1", since = "1.0.0")]
4862 pub struct SplitNMut<'a, T: 'a, P>
4864 P: FnMut(&T) -> bool,
4866 inner: GenericSplitN<SplitMut<'a, T, P>>,
4869 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4870 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P>
4872 P: FnMut(&T) -> bool,
4874 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4875 f.debug_struct("SplitNMut").field("inner", &self.inner).finish()
4879 /// An iterator over subslices separated by elements that match a
4880 /// predicate function, limited to a given number of splits, starting
4881 /// from the end of the slice.
4883 /// This struct is created by the [`rsplitn_mut`] method on [slices].
4885 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
4886 /// [slices]: ../../std/primitive.slice.html
4887 #[stable(feature = "rust1", since = "1.0.0")]
4888 pub struct RSplitNMut<'a, T: 'a, P>
4890 P: FnMut(&T) -> bool,
4892 inner: GenericSplitN<RSplitMut<'a, T, P>>,
4895 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4896 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P>
4898 P: FnMut(&T) -> bool,
4900 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4901 f.debug_struct("RSplitNMut").field("inner", &self.inner).finish()
4905 macro_rules! forward_iterator {
4906 ($name:ident: $elem:ident, $iter_of:ty) => {
4907 #[stable(feature = "rust1", since = "1.0.0")]
4908 impl<'a, $elem, P> Iterator for $name<'a, $elem, P>
4910 P: FnMut(&T) -> bool,
4912 type Item = $iter_of;
4915 fn next(&mut self) -> Option<$iter_of> {
4920 fn size_hint(&self) -> (usize, Option<usize>) {
4921 self.inner.size_hint()
4925 #[stable(feature = "fused", since = "1.26.0")]
4926 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {}
4930 forward_iterator! { SplitN: T, &'a [T] }
4931 forward_iterator! { RSplitN: T, &'a [T] }
4932 forward_iterator! { SplitNMut: T, &'a mut [T] }
4933 forward_iterator! { RSplitNMut: T, &'a mut [T] }
4935 /// An iterator over overlapping subslices of length `size`.
4937 /// This struct is created by the [`windows`] method on [slices].
4939 /// [`windows`]: ../../std/primitive.slice.html#method.windows
4940 /// [slices]: ../../std/primitive.slice.html
4942 #[stable(feature = "rust1", since = "1.0.0")]
4943 pub struct Windows<'a, T: 'a> {
4948 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4949 #[stable(feature = "rust1", since = "1.0.0")]
4950 impl<T> Clone for Windows<'_, T> {
4951 fn clone(&self) -> Self {
4952 Windows { v: self.v, size: self.size }
4956 #[stable(feature = "rust1", since = "1.0.0")]
4957 impl<'a, T> Iterator for Windows<'a, T> {
4958 type Item = &'a [T];
4961 fn next(&mut self) -> Option<&'a [T]> {
4962 if self.size > self.v.len() {
4965 let ret = Some(&self.v[..self.size]);
4966 self.v = &self.v[1..];
4972 fn size_hint(&self) -> (usize, Option<usize>) {
4973 if self.size > self.v.len() {
4976 let size = self.v.len() - self.size + 1;
4982 fn count(self) -> usize {
4987 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4988 let (end, overflow) = self.size.overflowing_add(n);
4989 if end > self.v.len() || overflow {
4993 let nth = &self.v[n..end];
4994 self.v = &self.v[n + 1..];
5000 fn last(self) -> Option<Self::Item> {
5001 if self.size > self.v.len() {
5004 let start = self.v.len() - self.size;
5005 Some(&self.v[start..])
5010 #[stable(feature = "rust1", since = "1.0.0")]
5011 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
5013 fn next_back(&mut self) -> Option<&'a [T]> {
5014 if self.size > self.v.len() {
5017 let ret = Some(&self.v[self.v.len() - self.size..]);
5018 self.v = &self.v[..self.v.len() - 1];
5024 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5025 let (end, overflow) = self.v.len().overflowing_sub(n);
5026 if end < self.size || overflow {
5030 let ret = &self.v[end - self.size..end];
5031 self.v = &self.v[..end - 1];
5037 #[stable(feature = "rust1", since = "1.0.0")]
5038 impl<T> ExactSizeIterator for Windows<'_, T> {}
5040 #[unstable(feature = "trusted_len", issue = "37572")]
5041 unsafe impl<T> TrustedLen for Windows<'_, T> {}
5043 #[stable(feature = "fused", since = "1.26.0")]
5044 impl<T> FusedIterator for Windows<'_, T> {}
5047 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
5048 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5049 // SAFETY: since the caller guarantees that `i` is in bounds,
5050 // which means that `i` cannot overflow an `isize`, and the
5051 // slice created by `from_raw_parts` is a subslice of `self.v`
5052 // thus is guaranteed to be valid for the lifetime `'a` of `self.v`.
5053 unsafe { from_raw_parts(self.v.as_ptr().add(i), self.size) }
5055 fn may_have_side_effect() -> bool {
5060 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
5061 /// time), starting at the beginning of the slice.
5063 /// When the slice len is not evenly divided by the chunk size, the last slice
5064 /// of the iteration will be the remainder.
5066 /// This struct is created by the [`chunks`] method on [slices].
5068 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
5069 /// [slices]: ../../std/primitive.slice.html
5071 #[stable(feature = "rust1", since = "1.0.0")]
5072 pub struct Chunks<'a, T: 'a> {
5077 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5078 #[stable(feature = "rust1", since = "1.0.0")]
5079 impl<T> Clone for Chunks<'_, T> {
5080 fn clone(&self) -> Self {
5081 Chunks { v: self.v, chunk_size: self.chunk_size }
5085 #[stable(feature = "rust1", since = "1.0.0")]
5086 impl<'a, T> Iterator for Chunks<'a, T> {
5087 type Item = &'a [T];
5090 fn next(&mut self) -> Option<&'a [T]> {
5091 if self.v.is_empty() {
5094 let chunksz = cmp::min(self.v.len(), self.chunk_size);
5095 let (fst, snd) = self.v.split_at(chunksz);
5102 fn size_hint(&self) -> (usize, Option<usize>) {
5103 if self.v.is_empty() {
5106 let n = self.v.len() / self.chunk_size;
5107 let rem = self.v.len() % self.chunk_size;
5108 let n = if rem > 0 { n + 1 } else { n };
5114 fn count(self) -> usize {
5119 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5120 let (start, overflow) = n.overflowing_mul(self.chunk_size);
5121 if start >= self.v.len() || overflow {
5125 let end = match start.checked_add(self.chunk_size) {
5126 Some(sum) => cmp::min(self.v.len(), sum),
5127 None => self.v.len(),
5129 let nth = &self.v[start..end];
5130 self.v = &self.v[end..];
5136 fn last(self) -> Option<Self::Item> {
5137 if self.v.is_empty() {
5140 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
5141 Some(&self.v[start..])
5146 #[stable(feature = "rust1", since = "1.0.0")]
5147 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
5149 fn next_back(&mut self) -> Option<&'a [T]> {
5150 if self.v.is_empty() {
5153 let remainder = self.v.len() % self.chunk_size;
5154 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
5155 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
5162 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5163 let len = self.len();
5168 let start = (len - 1 - n) * self.chunk_size;
5169 let end = match start.checked_add(self.chunk_size) {
5170 Some(res) => cmp::min(res, self.v.len()),
5171 None => self.v.len(),
5173 let nth_back = &self.v[start..end];
5174 self.v = &self.v[..start];
5180 #[stable(feature = "rust1", since = "1.0.0")]
5181 impl<T> ExactSizeIterator for Chunks<'_, T> {}
5183 #[unstable(feature = "trusted_len", issue = "37572")]
5184 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
5186 #[stable(feature = "fused", since = "1.26.0")]
5187 impl<T> FusedIterator for Chunks<'_, T> {}
5190 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
5191 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5192 let start = i * self.chunk_size;
5193 let end = match start.checked_add(self.chunk_size) {
5194 None => self.v.len(),
5195 Some(end) => cmp::min(end, self.v.len()),
5197 // SAFETY: the caller guarantees that `i` is in bounds,
5198 // which means that `start` must be in bounds of the
5199 // underlying `self.v` slice, and we made sure that `end`
5200 // is also in bounds of `self.v`. Thus, `start` cannot overflow
5201 // an `isize`, and the slice constructed by `from_raw_parts`
5202 // is a subslice of `self.v` which is guaranteed to be valid
5203 // for the lifetime `'a` of `self.v`.
5204 unsafe { from_raw_parts(self.v.as_ptr().add(start), end - start) }
5206 fn may_have_side_effect() -> bool {
5211 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5212 /// elements at a time), starting at the beginning of the slice.
5214 /// When the slice len is not evenly divided by the chunk size, the last slice
5215 /// of the iteration will be the remainder.
5217 /// This struct is created by the [`chunks_mut`] method on [slices].
5219 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
5220 /// [slices]: ../../std/primitive.slice.html
5222 #[stable(feature = "rust1", since = "1.0.0")]
5223 pub struct ChunksMut<'a, T: 'a> {
5228 #[stable(feature = "rust1", since = "1.0.0")]
5229 impl<'a, T> Iterator for ChunksMut<'a, T> {
5230 type Item = &'a mut [T];
5233 fn next(&mut self) -> Option<&'a mut [T]> {
5234 if self.v.is_empty() {
5237 let sz = cmp::min(self.v.len(), self.chunk_size);
5238 let tmp = mem::replace(&mut self.v, &mut []);
5239 let (head, tail) = tmp.split_at_mut(sz);
5246 fn size_hint(&self) -> (usize, Option<usize>) {
5247 if self.v.is_empty() {
5250 let n = self.v.len() / self.chunk_size;
5251 let rem = self.v.len() % self.chunk_size;
5252 let n = if rem > 0 { n + 1 } else { n };
5258 fn count(self) -> usize {
5263 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5264 let (start, overflow) = n.overflowing_mul(self.chunk_size);
5265 if start >= self.v.len() || overflow {
5269 let end = match start.checked_add(self.chunk_size) {
5270 Some(sum) => cmp::min(self.v.len(), sum),
5271 None => self.v.len(),
5273 let tmp = mem::replace(&mut self.v, &mut []);
5274 let (head, tail) = tmp.split_at_mut(end);
5275 let (_, nth) = head.split_at_mut(start);
5282 fn last(self) -> Option<Self::Item> {
5283 if self.v.is_empty() {
5286 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
5287 Some(&mut self.v[start..])
5292 #[stable(feature = "rust1", since = "1.0.0")]
5293 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
5295 fn next_back(&mut self) -> Option<&'a mut [T]> {
5296 if self.v.is_empty() {
5299 let remainder = self.v.len() % self.chunk_size;
5300 let sz = if remainder != 0 { remainder } else { self.chunk_size };
5301 let tmp = mem::replace(&mut self.v, &mut []);
5302 let tmp_len = tmp.len();
5303 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
5310 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5311 let len = self.len();
5316 let start = (len - 1 - n) * self.chunk_size;
5317 let end = match start.checked_add(self.chunk_size) {
5318 Some(res) => cmp::min(res, self.v.len()),
5319 None => self.v.len(),
5321 let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5322 let (head, nth_back) = temp.split_at_mut(start);
5329 #[stable(feature = "rust1", since = "1.0.0")]
5330 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
5332 #[unstable(feature = "trusted_len", issue = "37572")]
5333 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
5335 #[stable(feature = "fused", since = "1.26.0")]
5336 impl<T> FusedIterator for ChunksMut<'_, T> {}
5339 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
5340 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5341 let start = i * self.chunk_size;
5342 let end = match start.checked_add(self.chunk_size) {
5343 None => self.v.len(),
5344 Some(end) => cmp::min(end, self.v.len()),
5346 // SAFETY: see comments for `Chunks::get_unchecked`.
5347 unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) }
5349 fn may_have_side_effect() -> bool {
5354 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
5355 /// time), starting at the beginning of the slice.
5357 /// When the slice len is not evenly divided by the chunk size, the last
5358 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
5359 /// the [`remainder`] function from the iterator.
5361 /// This struct is created by the [`chunks_exact`] method on [slices].
5363 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
5364 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
5365 /// [slices]: ../../std/primitive.slice.html
5367 #[stable(feature = "chunks_exact", since = "1.31.0")]
5368 pub struct ChunksExact<'a, T: 'a> {
5374 impl<'a, T> ChunksExact<'a, T> {
5375 /// Returns the remainder of the original slice that is not going to be
5376 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5378 #[stable(feature = "chunks_exact", since = "1.31.0")]
5379 pub fn remainder(&self) -> &'a [T] {
5384 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5385 #[stable(feature = "chunks_exact", since = "1.31.0")]
5386 impl<T> Clone for ChunksExact<'_, T> {
5387 fn clone(&self) -> Self {
5388 ChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
5392 #[stable(feature = "chunks_exact", since = "1.31.0")]
5393 impl<'a, T> Iterator for ChunksExact<'a, T> {
5394 type Item = &'a [T];
5397 fn next(&mut self) -> Option<&'a [T]> {
5398 if self.v.len() < self.chunk_size {
5401 let (fst, snd) = self.v.split_at(self.chunk_size);
5408 fn size_hint(&self) -> (usize, Option<usize>) {
5409 let n = self.v.len() / self.chunk_size;
5414 fn count(self) -> usize {
5419 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5420 let (start, overflow) = n.overflowing_mul(self.chunk_size);
5421 if start >= self.v.len() || overflow {
5425 let (_, snd) = self.v.split_at(start);
5432 fn last(mut self) -> Option<Self::Item> {
5437 #[stable(feature = "chunks_exact", since = "1.31.0")]
5438 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
5440 fn next_back(&mut self) -> Option<&'a [T]> {
5441 if self.v.len() < self.chunk_size {
5444 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
5451 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5452 let len = self.len();
5457 let start = (len - 1 - n) * self.chunk_size;
5458 let end = start + self.chunk_size;
5459 let nth_back = &self.v[start..end];
5460 self.v = &self.v[..start];
5466 #[stable(feature = "chunks_exact", since = "1.31.0")]
5467 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
5468 fn is_empty(&self) -> bool {
5473 #[unstable(feature = "trusted_len", issue = "37572")]
5474 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
5476 #[stable(feature = "chunks_exact", since = "1.31.0")]
5477 impl<T> FusedIterator for ChunksExact<'_, T> {}
5480 #[stable(feature = "chunks_exact", since = "1.31.0")]
5481 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
5482 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5483 let start = i * self.chunk_size;
5484 // SAFETY: mostly identical to `Chunks::get_unchecked`.
5485 unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) }
5487 fn may_have_side_effect() -> bool {
5492 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5493 /// elements at a time), starting at the beginning of the slice.
5495 /// When the slice len is not evenly divided by the chunk size, the last up to
5496 /// `chunk_size-1` elements will be omitted but can be retrieved from the
5497 /// [`into_remainder`] function from the iterator.
5499 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
5501 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
5502 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
5503 /// [slices]: ../../std/primitive.slice.html
5505 #[stable(feature = "chunks_exact", since = "1.31.0")]
5506 pub struct ChunksExactMut<'a, T: 'a> {
5512 impl<'a, T> ChunksExactMut<'a, T> {
5513 /// Returns the remainder of the original slice that is not going to be
5514 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5516 #[stable(feature = "chunks_exact", since = "1.31.0")]
5517 pub fn into_remainder(self) -> &'a mut [T] {
5522 #[stable(feature = "chunks_exact", since = "1.31.0")]
5523 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
5524 type Item = &'a mut [T];
5527 fn next(&mut self) -> Option<&'a mut [T]> {
5528 if self.v.len() < self.chunk_size {
5531 let tmp = mem::replace(&mut self.v, &mut []);
5532 let (head, tail) = tmp.split_at_mut(self.chunk_size);
5539 fn size_hint(&self) -> (usize, Option<usize>) {
5540 let n = self.v.len() / self.chunk_size;
5545 fn count(self) -> usize {
5550 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5551 let (start, overflow) = n.overflowing_mul(self.chunk_size);
5552 if start >= self.v.len() || overflow {
5556 let tmp = mem::replace(&mut self.v, &mut []);
5557 let (_, snd) = tmp.split_at_mut(start);
5564 fn last(mut self) -> Option<Self::Item> {
5569 #[stable(feature = "chunks_exact", since = "1.31.0")]
5570 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
5572 fn next_back(&mut self) -> Option<&'a mut [T]> {
5573 if self.v.len() < self.chunk_size {
5576 let tmp = mem::replace(&mut self.v, &mut []);
5577 let tmp_len = tmp.len();
5578 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
5585 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5586 let len = self.len();
5591 let start = (len - 1 - n) * self.chunk_size;
5592 let end = start + self.chunk_size;
5593 let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5594 let (head, nth_back) = temp.split_at_mut(start);
5601 #[stable(feature = "chunks_exact", since = "1.31.0")]
5602 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
5603 fn is_empty(&self) -> bool {
5608 #[unstable(feature = "trusted_len", issue = "37572")]
5609 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
5611 #[stable(feature = "chunks_exact", since = "1.31.0")]
5612 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
5615 #[stable(feature = "chunks_exact", since = "1.31.0")]
5616 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
5617 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5618 let start = i * self.chunk_size;
5619 // SAFETY: see comments for `ChunksExactMut::get_unchecked`.
5620 unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) }
5622 fn may_have_side_effect() -> bool {
5627 /// An iterator over a slice in (non-overlapping) chunks (`N` elements at a
5628 /// time), starting at the beginning of the slice.
5630 /// When the slice len is not evenly divided by the chunk size, the last
5631 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
5632 /// the [`remainder`] function from the iterator.
5634 /// This struct is created by the [`array_chunks`] method on [slices].
5636 /// [`array_chunks`]: ../../std/primitive.slice.html#method.array_chunks
5637 /// [`remainder`]: ../../std/slice/struct.ArrayChunks.html#method.remainder
5638 /// [slices]: ../../std/primitive.slice.html
5640 #[unstable(feature = "array_chunks", issue = "74985")]
5641 pub struct ArrayChunks<'a, T: 'a, const N: usize> {
5642 iter: Iter<'a, [T; N]>,
5646 impl<'a, T, const N: usize> ArrayChunks<'a, T, N> {
5647 /// Returns the remainder of the original slice that is not going to be
5648 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5650 #[unstable(feature = "array_chunks", issue = "74985")]
5651 pub fn remainder(&self) -> &'a [T] {
5656 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5657 #[unstable(feature = "array_chunks", issue = "74985")]
5658 impl<T, const N: usize> Clone for ArrayChunks<'_, T, N> {
5659 fn clone(&self) -> Self {
5660 ArrayChunks { iter: self.iter.clone(), rem: self.rem }
5664 #[unstable(feature = "array_chunks", issue = "74985")]
5665 impl<'a, T, const N: usize> Iterator for ArrayChunks<'a, T, N> {
5666 type Item = &'a [T; N];
5669 fn next(&mut self) -> Option<&'a [T; N]> {
5674 fn size_hint(&self) -> (usize, Option<usize>) {
5675 self.iter.size_hint()
5679 fn count(self) -> usize {
5684 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5689 fn last(self) -> Option<Self::Item> {
5694 #[unstable(feature = "array_chunks", issue = "74985")]
5695 impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunks<'a, T, N> {
5697 fn next_back(&mut self) -> Option<&'a [T; N]> {
5698 self.iter.next_back()
5702 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5703 self.iter.nth_back(n)
5707 #[unstable(feature = "array_chunks", issue = "74985")]
5708 impl<T, const N: usize> ExactSizeIterator for ArrayChunks<'_, T, N> {
5709 fn is_empty(&self) -> bool {
5710 self.iter.is_empty()
5714 #[unstable(feature = "trusted_len", issue = "37572")]
5715 unsafe impl<T, const N: usize> TrustedLen for ArrayChunks<'_, T, N> {}
5717 #[unstable(feature = "array_chunks", issue = "74985")]
5718 impl<T, const N: usize> FusedIterator for ArrayChunks<'_, T, N> {}
5721 #[unstable(feature = "array_chunks", issue = "74985")]
5722 unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunks<'a, T, N> {
5723 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T; N] {
5724 // SAFETY: The safety guarantees of `get_unchecked` are transferred to
5726 unsafe { self.iter.get_unchecked(i) }
5728 fn may_have_side_effect() -> bool {
5733 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
5734 /// time), starting at the end of the slice.
5736 /// When the slice len is not evenly divided by the chunk size, the last slice
5737 /// of the iteration will be the remainder.
5739 /// This struct is created by the [`rchunks`] method on [slices].
5741 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
5742 /// [slices]: ../../std/primitive.slice.html
5744 #[stable(feature = "rchunks", since = "1.31.0")]
5745 pub struct RChunks<'a, T: 'a> {
5750 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5751 #[stable(feature = "rchunks", since = "1.31.0")]
5752 impl<T> Clone for RChunks<'_, T> {
5753 fn clone(&self) -> Self {
5754 RChunks { v: self.v, chunk_size: self.chunk_size }
5758 #[stable(feature = "rchunks", since = "1.31.0")]
5759 impl<'a, T> Iterator for RChunks<'a, T> {
5760 type Item = &'a [T];
5763 fn next(&mut self) -> Option<&'a [T]> {
5764 if self.v.is_empty() {
5767 let chunksz = cmp::min(self.v.len(), self.chunk_size);
5768 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
5775 fn size_hint(&self) -> (usize, Option<usize>) {
5776 if self.v.is_empty() {
5779 let n = self.v.len() / self.chunk_size;
5780 let rem = self.v.len() % self.chunk_size;
5781 let n = if rem > 0 { n + 1 } else { n };
5787 fn count(self) -> usize {
5792 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5793 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5794 if end >= self.v.len() || overflow {
5798 // Can't underflow because of the check above
5799 let end = self.v.len() - end;
5800 let start = match end.checked_sub(self.chunk_size) {
5804 let nth = &self.v[start..end];
5805 self.v = &self.v[0..start];
5811 fn last(self) -> Option<Self::Item> {
5812 if self.v.is_empty() {
5815 let rem = self.v.len() % self.chunk_size;
5816 let end = if rem == 0 { self.chunk_size } else { rem };
5817 Some(&self.v[0..end])
5822 #[stable(feature = "rchunks", since = "1.31.0")]
5823 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
5825 fn next_back(&mut self) -> Option<&'a [T]> {
5826 if self.v.is_empty() {
5829 let remainder = self.v.len() % self.chunk_size;
5830 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
5831 let (fst, snd) = self.v.split_at(chunksz);
5838 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5839 let len = self.len();
5844 // can't underflow because `n < len`
5845 let offset_from_end = (len - 1 - n) * self.chunk_size;
5846 let end = self.v.len() - offset_from_end;
5847 let start = end.saturating_sub(self.chunk_size);
5848 let nth_back = &self.v[start..end];
5849 self.v = &self.v[end..];
5855 #[stable(feature = "rchunks", since = "1.31.0")]
5856 impl<T> ExactSizeIterator for RChunks<'_, T> {}
5858 #[unstable(feature = "trusted_len", issue = "37572")]
5859 unsafe impl<T> TrustedLen for RChunks<'_, T> {}
5861 #[stable(feature = "rchunks", since = "1.31.0")]
5862 impl<T> FusedIterator for RChunks<'_, T> {}
5865 #[stable(feature = "rchunks", since = "1.31.0")]
5866 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
5867 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5868 let end = self.v.len() - i * self.chunk_size;
5869 let start = match end.checked_sub(self.chunk_size) {
5871 Some(start) => start,
5873 // SAFETY: mostly identical to `Chunks::get_unchecked`.
5874 unsafe { from_raw_parts(self.v.as_ptr().add(start), end - start) }
5876 fn may_have_side_effect() -> bool {
5881 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5882 /// elements at a time), starting at the end of the slice.
5884 /// When the slice len is not evenly divided by the chunk size, the last slice
5885 /// of the iteration will be the remainder.
5887 /// This struct is created by the [`rchunks_mut`] method on [slices].
5889 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
5890 /// [slices]: ../../std/primitive.slice.html
5892 #[stable(feature = "rchunks", since = "1.31.0")]
5893 pub struct RChunksMut<'a, T: 'a> {
5898 #[stable(feature = "rchunks", since = "1.31.0")]
5899 impl<'a, T> Iterator for RChunksMut<'a, T> {
5900 type Item = &'a mut [T];
5903 fn next(&mut self) -> Option<&'a mut [T]> {
5904 if self.v.is_empty() {
5907 let sz = cmp::min(self.v.len(), self.chunk_size);
5908 let tmp = mem::replace(&mut self.v, &mut []);
5909 let tmp_len = tmp.len();
5910 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
5917 fn size_hint(&self) -> (usize, Option<usize>) {
5918 if self.v.is_empty() {
5921 let n = self.v.len() / self.chunk_size;
5922 let rem = self.v.len() % self.chunk_size;
5923 let n = if rem > 0 { n + 1 } else { n };
5929 fn count(self) -> usize {
5934 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5935 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5936 if end >= self.v.len() || overflow {
5940 // Can't underflow because of the check above
5941 let end = self.v.len() - end;
5942 let start = match end.checked_sub(self.chunk_size) {
5946 let tmp = mem::replace(&mut self.v, &mut []);
5947 let (head, tail) = tmp.split_at_mut(start);
5948 let (nth, _) = tail.split_at_mut(end - start);
5955 fn last(self) -> Option<Self::Item> {
5956 if self.v.is_empty() {
5959 let rem = self.v.len() % self.chunk_size;
5960 let end = if rem == 0 { self.chunk_size } else { rem };
5961 Some(&mut self.v[0..end])
5966 #[stable(feature = "rchunks", since = "1.31.0")]
5967 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
5969 fn next_back(&mut self) -> Option<&'a mut [T]> {
5970 if self.v.is_empty() {
5973 let remainder = self.v.len() % self.chunk_size;
5974 let sz = if remainder != 0 { remainder } else { self.chunk_size };
5975 let tmp = mem::replace(&mut self.v, &mut []);
5976 let (head, tail) = tmp.split_at_mut(sz);
5983 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5984 let len = self.len();
5989 // can't underflow because `n < len`
5990 let offset_from_end = (len - 1 - n) * self.chunk_size;
5991 let end = self.v.len() - offset_from_end;
5992 let start = end.saturating_sub(self.chunk_size);
5993 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5994 let (_, nth_back) = tmp.split_at_mut(start);
6001 #[stable(feature = "rchunks", since = "1.31.0")]
6002 impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
6004 #[unstable(feature = "trusted_len", issue = "37572")]
6005 unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
6007 #[stable(feature = "rchunks", since = "1.31.0")]
6008 impl<T> FusedIterator for RChunksMut<'_, T> {}
6011 #[stable(feature = "rchunks", since = "1.31.0")]
6012 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
6013 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
6014 let end = self.v.len() - i * self.chunk_size;
6015 let start = match end.checked_sub(self.chunk_size) {
6017 Some(start) => start,
6019 // SAFETY: see comments for `RChunks::get_unchecked`.
6020 unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) }
6022 fn may_have_side_effect() -> bool {
6027 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
6028 /// time), starting at the end of the slice.
6030 /// When the slice len is not evenly divided by the chunk size, the last
6031 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
6032 /// the [`remainder`] function from the iterator.
6034 /// This struct is created by the [`rchunks_exact`] method on [slices].
6036 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
6037 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
6038 /// [slices]: ../../std/primitive.slice.html
6040 #[stable(feature = "rchunks", since = "1.31.0")]
6041 pub struct RChunksExact<'a, T: 'a> {
6047 impl<'a, T> RChunksExact<'a, T> {
6048 /// Returns the remainder of the original slice that is not going to be
6049 /// returned by the iterator. The returned slice has at most `chunk_size-1`
6051 #[stable(feature = "rchunks", since = "1.31.0")]
6052 pub fn remainder(&self) -> &'a [T] {
6057 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
6058 #[stable(feature = "rchunks", since = "1.31.0")]
6059 impl<'a, T> Clone for RChunksExact<'a, T> {
6060 fn clone(&self) -> RChunksExact<'a, T> {
6061 RChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
6065 #[stable(feature = "rchunks", since = "1.31.0")]
6066 impl<'a, T> Iterator for RChunksExact<'a, T> {
6067 type Item = &'a [T];
6070 fn next(&mut self) -> Option<&'a [T]> {
6071 if self.v.len() < self.chunk_size {
6074 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
6081 fn size_hint(&self) -> (usize, Option<usize>) {
6082 let n = self.v.len() / self.chunk_size;
6087 fn count(self) -> usize {
6092 fn nth(&mut self, n: usize) -> Option<Self::Item> {
6093 let (end, overflow) = n.overflowing_mul(self.chunk_size);
6094 if end >= self.v.len() || overflow {
6098 let (fst, _) = self.v.split_at(self.v.len() - end);
6105 fn last(mut self) -> Option<Self::Item> {
6110 #[stable(feature = "rchunks", since = "1.31.0")]
6111 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
6113 fn next_back(&mut self) -> Option<&'a [T]> {
6114 if self.v.len() < self.chunk_size {
6117 let (fst, snd) = self.v.split_at(self.chunk_size);
6124 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
6125 let len = self.len();
6130 // now that we know that `n` corresponds to a chunk,
6131 // none of these operations can underflow/overflow
6132 let offset = (len - n) * self.chunk_size;
6133 let start = self.v.len() - offset;
6134 let end = start + self.chunk_size;
6135 let nth_back = &self.v[start..end];
6136 self.v = &self.v[end..];
6142 #[stable(feature = "rchunks", since = "1.31.0")]
6143 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
6144 fn is_empty(&self) -> bool {
6149 #[unstable(feature = "trusted_len", issue = "37572")]
6150 unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
6152 #[stable(feature = "rchunks", since = "1.31.0")]
6153 impl<T> FusedIterator for RChunksExact<'_, T> {}
6156 #[stable(feature = "rchunks", since = "1.31.0")]
6157 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
6158 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
6159 let end = self.v.len() - i * self.chunk_size;
6160 let start = end - self.chunk_size;
6161 // SAFETY: mostmy identical to `Chunks::get_unchecked`.
6162 unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) }
6164 fn may_have_side_effect() -> bool {
6169 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
6170 /// elements at a time), starting at the end of the slice.
6172 /// When the slice len is not evenly divided by the chunk size, the last up to
6173 /// `chunk_size-1` elements will be omitted but can be retrieved from the
6174 /// [`into_remainder`] function from the iterator.
6176 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
6178 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
6179 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
6180 /// [slices]: ../../std/primitive.slice.html
6182 #[stable(feature = "rchunks", since = "1.31.0")]
6183 pub struct RChunksExactMut<'a, T: 'a> {
6189 impl<'a, T> RChunksExactMut<'a, T> {
6190 /// Returns the remainder of the original slice that is not going to be
6191 /// returned by the iterator. The returned slice has at most `chunk_size-1`
6193 #[stable(feature = "rchunks", since = "1.31.0")]
6194 pub fn into_remainder(self) -> &'a mut [T] {
6199 #[stable(feature = "rchunks", since = "1.31.0")]
6200 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
6201 type Item = &'a mut [T];
6204 fn next(&mut self) -> Option<&'a mut [T]> {
6205 if self.v.len() < self.chunk_size {
6208 let tmp = mem::replace(&mut self.v, &mut []);
6209 let tmp_len = tmp.len();
6210 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
6217 fn size_hint(&self) -> (usize, Option<usize>) {
6218 let n = self.v.len() / self.chunk_size;
6223 fn count(self) -> usize {
6228 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
6229 let (end, overflow) = n.overflowing_mul(self.chunk_size);
6230 if end >= self.v.len() || overflow {
6234 let tmp = mem::replace(&mut self.v, &mut []);
6235 let tmp_len = tmp.len();
6236 let (fst, _) = tmp.split_at_mut(tmp_len - end);
6243 fn last(mut self) -> Option<Self::Item> {
6248 #[stable(feature = "rchunks", since = "1.31.0")]
6249 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
6251 fn next_back(&mut self) -> Option<&'a mut [T]> {
6252 if self.v.len() < self.chunk_size {
6255 let tmp = mem::replace(&mut self.v, &mut []);
6256 let (head, tail) = tmp.split_at_mut(self.chunk_size);
6263 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
6264 let len = self.len();
6269 // now that we know that `n` corresponds to a chunk,
6270 // none of these operations can underflow/overflow
6271 let offset = (len - n) * self.chunk_size;
6272 let start = self.v.len() - offset;
6273 let end = start + self.chunk_size;
6274 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
6275 let (_, nth_back) = tmp.split_at_mut(start);
6282 #[stable(feature = "rchunks", since = "1.31.0")]
6283 impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
6284 fn is_empty(&self) -> bool {
6289 #[unstable(feature = "trusted_len", issue = "37572")]
6290 unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
6292 #[stable(feature = "rchunks", since = "1.31.0")]
6293 impl<T> FusedIterator for RChunksExactMut<'_, T> {}
6296 #[stable(feature = "rchunks", since = "1.31.0")]
6297 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
6298 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
6299 let end = self.v.len() - i * self.chunk_size;
6300 let start = end - self.chunk_size;
6301 // SAFETY: see comments for `RChunksExact::get_unchecked`.
6302 unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) }
6304 fn may_have_side_effect() -> bool {
6313 /// Forms a slice from a pointer and a length.
6315 /// The `len` argument is the number of **elements**, not the number of bytes.
6319 /// Behavior is undefined if any of the following conditions are violated:
6321 /// * `data` must be [valid] for reads for `len * mem::size_of::<T>()` many bytes,
6322 /// and it must be properly aligned. This means in particular:
6324 /// * The entire memory range of this slice must be contained within a single allocated object!
6325 /// Slices can never span across multiple allocated objects. See [below](#incorrect-usage)
6326 /// for an example incorrectly not taking this into account.
6327 /// * `data` must be non-null and aligned even for zero-length slices. One
6328 /// reason for this is that enum layout optimizations may rely on references
6329 /// (including slices of any length) being aligned and non-null to distinguish
6330 /// them from other data. You can obtain a pointer that is usable as `data`
6331 /// for zero-length slices using [`NonNull::dangling()`].
6333 /// * The memory referenced by the returned slice must not be mutated for the duration
6334 /// of lifetime `'a`, except inside an `UnsafeCell`.
6336 /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
6337 /// See the safety documentation of [`pointer::offset`].
6341 /// The lifetime for the returned slice is inferred from its usage. To
6342 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
6343 /// source lifetime is safe in the context, such as by providing a helper
6344 /// function taking the lifetime of a host value for the slice, or by explicit
6352 /// // manifest a slice for a single element
6354 /// let ptr = &x as *const _;
6355 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
6356 /// assert_eq!(slice[0], 42);
6359 /// ### Incorrect usage
6361 /// The following `join_slices` function is **unsound** ⚠️
6366 /// fn join_slices<'a, T>(fst: &'a [T], snd: &'a [T]) -> &'a [T] {
6367 /// let fst_end = fst.as_ptr().wrapping_add(fst.len());
6368 /// let snd_start = snd.as_ptr();
6369 /// assert_eq!(fst_end, snd_start, "Slices must be contiguous!");
6371 /// // The assertion above ensures `fst` and `snd` are contiguous, but they might
6372 /// // still be contained within _different allocated objects_, in which case
6373 /// // creating this slice is undefined behavior.
6374 /// slice::from_raw_parts(fst.as_ptr(), fst.len() + snd.len())
6379 /// // `a` and `b` are different allocated objects...
6382 /// // ... which may nevertheless be laid out contiguously in memory: | a | b |
6383 /// let _ = join_slices(slice::from_ref(&a), slice::from_ref(&b)); // UB
6387 /// [valid]: ../../std/ptr/index.html#safety
6388 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
6389 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
6391 #[stable(feature = "rust1", since = "1.0.0")]
6392 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
6393 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
6395 mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
6396 "attempt to create slice covering at least half the address space"
6398 // SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
6399 unsafe { &*ptr::slice_from_raw_parts(data, len) }
6402 /// Performs the same functionality as [`from_raw_parts`], except that a
6403 /// mutable slice is returned.
6407 /// Behavior is undefined if any of the following conditions are violated:
6409 /// * `data` must be [valid] for boths reads and writes for `len * mem::size_of::<T>()` many bytes,
6410 /// and it must be properly aligned. This means in particular:
6412 /// * The entire memory range of this slice must be contained within a single allocated object!
6413 /// Slices can never span across multiple allocated objects.
6414 /// * `data` must be non-null and aligned even for zero-length slices. One
6415 /// reason for this is that enum layout optimizations may rely on references
6416 /// (including slices of any length) being aligned and non-null to distinguish
6417 /// them from other data. You can obtain a pointer that is usable as `data`
6418 /// for zero-length slices using [`NonNull::dangling()`].
6420 /// * The memory referenced by the returned slice must not be accessed through any other pointer
6421 /// (not derived from the return value) for the duration of lifetime `'a`.
6422 /// Both read and write accesses are forbidden.
6424 /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
6425 /// See the safety documentation of [`pointer::offset`].
6427 /// [valid]: ../../std/ptr/index.html#safety
6428 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
6429 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
6430 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
6432 #[stable(feature = "rust1", since = "1.0.0")]
6433 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
6434 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
6436 mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
6437 "attempt to create slice covering at least half the address space"
6439 // SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`.
6440 unsafe { &mut *ptr::slice_from_raw_parts_mut(data, len) }
6443 /// Converts a reference to T into a slice of length 1 (without copying).
6444 #[stable(feature = "from_ref", since = "1.28.0")]
6445 pub fn from_ref<T>(s: &T) -> &[T] {
6446 // SAFETY: a reference is guaranteed to be valid for reads. The returned
6447 // reference cannot be mutated as it is an immutable reference.
6448 // `mem::size_of::<T>()` cannot be larger than `isize::MAX`.
6449 // Thus the call to `from_raw_parts` is safe.
6450 unsafe { from_raw_parts(s, 1) }
6453 /// Converts a reference to T into a slice of length 1 (without copying).
6454 #[stable(feature = "from_ref", since = "1.28.0")]
6455 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
6456 // SAFETY: a mutable reference is guaranteed to be valid for writes.
6457 // The reference cannot be accessed by another pointer as it is an mutable reference.
6458 // `mem::size_of::<T>()` cannot be larger than `isize::MAX`.
6459 // Thus the call to `from_raw_parts_mut` is safe.
6460 unsafe { from_raw_parts_mut(s, 1) }
6463 // This function is public only because there is no other way to unit test heapsort.
6464 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "none")]
6466 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
6468 F: FnMut(&T, &T) -> bool,
6470 sort::heapsort(v, &mut is_less);
6474 // Comparison traits
6478 /// Calls implementation provided memcmp.
6480 /// Interprets the data as u8.
6482 /// Returns 0 for equal, < 0 for less than and > 0 for greater
6484 // FIXME(#32610): Return type should be c_int
6485 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
6488 #[stable(feature = "rust1", since = "1.0.0")]
6489 impl<A, B> PartialEq<[B]> for [A]
6493 fn eq(&self, other: &[B]) -> bool {
6494 SlicePartialEq::equal(self, other)
6497 fn ne(&self, other: &[B]) -> bool {
6498 SlicePartialEq::not_equal(self, other)
6502 #[stable(feature = "rust1", since = "1.0.0")]
6503 impl<T: Eq> Eq for [T] {}
6505 /// Implements comparison of vectors lexicographically.
6506 #[stable(feature = "rust1", since = "1.0.0")]
6507 impl<T: Ord> Ord for [T] {
6508 fn cmp(&self, other: &[T]) -> Ordering {
6509 SliceOrd::compare(self, other)
6513 /// Implements comparison of vectors lexicographically.
6514 #[stable(feature = "rust1", since = "1.0.0")]
6515 impl<T: PartialOrd> PartialOrd for [T] {
6516 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
6517 SlicePartialOrd::partial_compare(self, other)
6522 // intermediate trait for specialization of slice's PartialEq
6523 trait SlicePartialEq<B> {
6524 fn equal(&self, other: &[B]) -> bool;
6526 fn not_equal(&self, other: &[B]) -> bool {
6531 // Generic slice equality
6532 impl<A, B> SlicePartialEq<B> for [A]
6536 default fn equal(&self, other: &[B]) -> bool {
6537 if self.len() != other.len() {
6541 self.iter().zip(other.iter()).all(|(x, y)| x == y)
6545 // Use an equal-pointer optimization when types are `Eq`
6546 impl<A> SlicePartialEq<A> for [A]
6548 A: PartialEq<A> + Eq,
6550 default fn equal(&self, other: &[A]) -> bool {
6551 if self.len() != other.len() {
6555 // While performance would suffer if `guaranteed_eq` just returned `false`
6556 // for all arguments, correctness and return value of this function are not affected.
6557 if self.as_ptr().guaranteed_eq(other.as_ptr()) {
6561 self.iter().zip(other.iter()).all(|(x, y)| x == y)
6565 // Use memcmp for bytewise equality when the types allow
6566 impl<A> SlicePartialEq<A> for [A]
6568 A: PartialEq<A> + BytewiseEquality,
6570 fn equal(&self, other: &[A]) -> bool {
6571 if self.len() != other.len() {
6575 // While performance would suffer if `guaranteed_eq` just returned `false`
6576 // for all arguments, correctness and return value of this function are not affected.
6577 if self.as_ptr().guaranteed_eq(other.as_ptr()) {
6580 // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
6581 // The two slices have been checked to have the same size above.
6583 let size = mem::size_of_val(self);
6584 memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
6590 // intermediate trait for specialization of slice's PartialOrd
6591 trait SlicePartialOrd: Sized {
6592 fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
6595 impl<A: PartialOrd> SlicePartialOrd for A {
6596 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
6597 let l = cmp::min(left.len(), right.len());
6599 // Slice to the loop iteration range to enable bound check
6600 // elimination in the compiler
6601 let lhs = &left[..l];
6602 let rhs = &right[..l];
6605 match lhs[i].partial_cmp(&rhs[i]) {
6606 Some(Ordering::Equal) => (),
6607 non_eq => return non_eq,
6611 left.len().partial_cmp(&right.len())
6615 // This is the impl that we would like to have. Unfortunately it's not sound.
6616 // See `partial_ord_slice.rs`.
6618 impl<A> SlicePartialOrd for A
6622 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
6623 Some(SliceOrd::compare(left, right))
6628 impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
6629 fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
6630 Some(SliceOrd::compare(left, right))
6634 trait AlwaysApplicableOrd: SliceOrd + Ord {}
6636 macro_rules! always_applicable_ord {
6637 ($([$($p:tt)*] $t:ty,)*) => {
6638 $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
6642 always_applicable_ord! {
6643 [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
6644 [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
6646 [T: ?Sized] *const T, [T: ?Sized] *mut T,
6647 [T: AlwaysApplicableOrd] &T,
6648 [T: AlwaysApplicableOrd] &mut T,
6649 [T: AlwaysApplicableOrd] Option<T>,
6653 // intermediate trait for specialization of slice's Ord
6654 trait SliceOrd: Sized {
6655 fn compare(left: &[Self], right: &[Self]) -> Ordering;
6658 impl<A: Ord> SliceOrd for A {
6659 default fn compare(left: &[Self], right: &[Self]) -> Ordering {
6660 let l = cmp::min(left.len(), right.len());
6662 // Slice to the loop iteration range to enable bound check
6663 // elimination in the compiler
6664 let lhs = &left[..l];
6665 let rhs = &right[..l];
6668 match lhs[i].cmp(&rhs[i]) {
6669 Ordering::Equal => (),
6670 non_eq => return non_eq,
6674 left.len().cmp(&right.len())
6678 // memcmp compares a sequence of unsigned bytes lexicographically.
6679 // this matches the order we want for [u8], but no others (not even [i8]).
6680 impl SliceOrd for u8 {
6682 fn compare(left: &[Self], right: &[Self]) -> Ordering {
6684 // SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
6685 // We use the minimum of both lengths which guarantees that both regions are
6686 // valid for reads in that interval.
6687 unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
6689 left.len().cmp(&right.len())
6690 } else if order < 0 {
6699 /// Trait implemented for types that can be compared for equality using
6700 /// their bytewise representation
6701 trait BytewiseEquality: Eq + Copy {}
6703 macro_rules! impl_marker_for {
6704 ($traitname:ident, $($ty:ty)*) => {
6706 impl $traitname for $ty { }
6711 impl_marker_for!(BytewiseEquality,
6712 u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
6715 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
6716 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
6717 // SAFETY: the caller must guarantee that `i` is in bounds
6718 // of the underlying slice, so `i` cannot overflow an `isize`,
6719 // and the returned references is guaranteed to refer to an element
6720 // of the slice and thus guaranteed to be valid.
6721 unsafe { &*self.ptr.as_ptr().add(i) }
6723 fn may_have_side_effect() -> bool {
6729 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
6730 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
6731 // SAFETY: see comments for `Iter::get_unchecked`.
6732 unsafe { &mut *self.ptr.as_ptr().add(i) }
6734 fn may_have_side_effect() -> bool {
6739 trait SliceContains: Sized {
6740 fn slice_contains(&self, x: &[Self]) -> bool;
6743 impl<T> SliceContains for T
6747 default fn slice_contains(&self, x: &[Self]) -> bool {
6748 x.iter().any(|y| *y == *self)
6752 impl SliceContains for u8 {
6753 fn slice_contains(&self, x: &[Self]) -> bool {
6754 memchr::memchr(*self, x).is_some()
6758 impl SliceContains for i8 {
6759 fn slice_contains(&self, x: &[Self]) -> bool {
6760 let byte = *self as u8;
6761 // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
6762 // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
6763 // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
6764 // than `isize::MAX`. The returned slice is never mutated.
6765 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
6766 memchr::memchr(byte, bytes).is_some()