1 // ignore-tidy-filelength
3 //! Slice management and manipulation.
5 //! For more details see [`std::slice`].
7 //! [`std::slice`]: ../../std/slice/index.html
9 #![stable(feature = "rust1", since = "1.0.0")]
11 // How this module is organized.
13 // The library infrastructure for slices is fairly messy. There's
14 // a lot of stuff defined here. Let's keep it clean.
16 // The layout of this file is thus:
18 // * Inherent methods. This is where most of the slice API resides.
19 // * Implementations of a few common traits with important slice ops.
20 // * Definitions of a bunch of iterators.
22 // * The `raw` and `bytes` submodules.
23 // * Boilerplate trait implementations.
25 use crate::cmp::Ordering::{self, Less, Equal, Greater};
28 use crate::intrinsics::assume;
31 use crate::ops::{FnMut, Try, self};
32 use crate::option::Option;
33 use crate::option::Option::{None, Some};
34 use crate::result::Result;
35 use crate::result::Result::{Ok, Err};
38 use crate::marker::{Copy, Send, Sync, Sized, self};
40 #[unstable(feature = "slice_internals", issue = "0",
41 reason = "exposed from core to be reused in std; use the memchr crate")]
42 /// Pure rust memchr implementation, taken from rust-memchr
49 union Repr<'a, T: 'a> {
51 rust_mut: &'a mut [T],
68 /// Returns the number of elements in the slice.
73 /// let a = [1, 2, 3];
74 /// assert_eq!(a.len(), 3);
76 #[stable(feature = "rust1", since = "1.0.0")]
78 #[rustc_const_unstable(feature = "const_slice_len")]
79 pub const fn len(&self) -> usize {
81 Repr { rust: self }.raw.len
85 /// Returns `true` if the slice has a length of 0.
90 /// let a = [1, 2, 3];
91 /// assert!(!a.is_empty());
93 #[stable(feature = "rust1", since = "1.0.0")]
95 #[rustc_const_unstable(feature = "const_slice_len")]
96 pub const fn is_empty(&self) -> bool {
100 /// Returns the first element of the slice, or `None` if it is empty.
105 /// let v = [10, 40, 30];
106 /// assert_eq!(Some(&10), v.first());
108 /// let w: &[i32] = &[];
109 /// assert_eq!(None, w.first());
111 #[stable(feature = "rust1", since = "1.0.0")]
113 pub fn first(&self) -> Option<&T> {
117 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
122 /// let x = &mut [0, 1, 2];
124 /// if let Some(first) = x.first_mut() {
127 /// assert_eq!(x, &[5, 1, 2]);
129 #[stable(feature = "rust1", since = "1.0.0")]
131 pub fn first_mut(&mut self) -> Option<&mut T> {
135 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
140 /// let x = &[0, 1, 2];
142 /// if let Some((first, elements)) = x.split_first() {
143 /// assert_eq!(first, &0);
144 /// assert_eq!(elements, &[1, 2]);
147 #[stable(feature = "slice_splits", since = "1.5.0")]
149 pub fn split_first(&self) -> Option<(&T, &[T])> {
150 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
153 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
158 /// let x = &mut [0, 1, 2];
160 /// if let Some((first, elements)) = x.split_first_mut() {
165 /// assert_eq!(x, &[3, 4, 5]);
167 #[stable(feature = "slice_splits", since = "1.5.0")]
169 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
170 if self.is_empty() { None } else {
171 let split = self.split_at_mut(1);
172 Some((&mut split.0[0], split.1))
176 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
181 /// let x = &[0, 1, 2];
183 /// if let Some((last, elements)) = x.split_last() {
184 /// assert_eq!(last, &2);
185 /// assert_eq!(elements, &[0, 1]);
188 #[stable(feature = "slice_splits", since = "1.5.0")]
190 pub fn split_last(&self) -> Option<(&T, &[T])> {
191 let len = self.len();
192 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
195 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
200 /// let x = &mut [0, 1, 2];
202 /// if let Some((last, elements)) = x.split_last_mut() {
207 /// assert_eq!(x, &[4, 5, 3]);
209 #[stable(feature = "slice_splits", since = "1.5.0")]
211 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
212 let len = self.len();
213 if len == 0 { None } else {
214 let split = self.split_at_mut(len - 1);
215 Some((&mut split.1[0], split.0))
220 /// Returns the last element of the slice, or `None` if it is empty.
225 /// let v = [10, 40, 30];
226 /// assert_eq!(Some(&30), v.last());
228 /// let w: &[i32] = &[];
229 /// assert_eq!(None, w.last());
231 #[stable(feature = "rust1", since = "1.0.0")]
233 pub fn last(&self) -> Option<&T> {
234 let last_idx = self.len().checked_sub(1)?;
238 /// Returns a mutable pointer to the last item in the slice.
243 /// let x = &mut [0, 1, 2];
245 /// if let Some(last) = x.last_mut() {
248 /// assert_eq!(x, &[0, 1, 10]);
250 #[stable(feature = "rust1", since = "1.0.0")]
252 pub fn last_mut(&mut self) -> Option<&mut T> {
253 let last_idx = self.len().checked_sub(1)?;
254 self.get_mut(last_idx)
257 /// Returns a reference to an element or subslice depending on the type of
260 /// - If given a position, returns a reference to the element at that
261 /// position or `None` if out of bounds.
262 /// - If given a range, returns the subslice corresponding to that range,
263 /// or `None` if out of bounds.
268 /// let v = [10, 40, 30];
269 /// assert_eq!(Some(&40), v.get(1));
270 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
271 /// assert_eq!(None, v.get(3));
272 /// assert_eq!(None, v.get(0..4));
274 #[stable(feature = "rust1", since = "1.0.0")]
276 pub fn get<I>(&self, index: I) -> Option<&I::Output>
277 where I: SliceIndex<Self>
282 /// Returns a mutable reference to an element or subslice depending on the
283 /// type of index (see [`get`]) or `None` if the index is out of bounds.
285 /// [`get`]: #method.get
290 /// let x = &mut [0, 1, 2];
292 /// if let Some(elem) = x.get_mut(1) {
295 /// assert_eq!(x, &[0, 42, 2]);
297 #[stable(feature = "rust1", since = "1.0.0")]
299 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
300 where I: SliceIndex<Self>
305 /// Returns a reference to an element or subslice, without doing bounds
308 /// This is generally not recommended, use with caution! For a safe
309 /// alternative see [`get`].
311 /// [`get`]: #method.get
316 /// let x = &[1, 2, 4];
319 /// assert_eq!(x.get_unchecked(1), &2);
322 #[stable(feature = "rust1", since = "1.0.0")]
324 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
325 where I: SliceIndex<Self>
327 index.get_unchecked(self)
330 /// Returns a mutable reference to an element or subslice, without doing
333 /// This is generally not recommended, use with caution! For a safe
334 /// alternative see [`get_mut`].
336 /// [`get_mut`]: #method.get_mut
341 /// let x = &mut [1, 2, 4];
344 /// let elem = x.get_unchecked_mut(1);
347 /// assert_eq!(x, &[1, 13, 4]);
349 #[stable(feature = "rust1", since = "1.0.0")]
351 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
352 where I: SliceIndex<Self>
354 index.get_unchecked_mut(self)
357 /// Returns a raw pointer to the slice's buffer.
359 /// The caller must ensure that the slice outlives the pointer this
360 /// function returns, or else it will end up pointing to garbage.
362 /// The caller must also ensure that the memory the pointer (non-transitively) points to
363 /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
364 /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
366 /// Modifying the container referenced by this slice may cause its buffer
367 /// to be reallocated, which would also make any pointers to it invalid.
372 /// let x = &[1, 2, 4];
373 /// let x_ptr = x.as_ptr();
376 /// for i in 0..x.len() {
377 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
382 /// [`as_mut_ptr`]: #method.as_mut_ptr
383 #[stable(feature = "rust1", since = "1.0.0")]
385 pub const fn as_ptr(&self) -> *const T {
386 self as *const [T] as *const T
389 /// Returns an unsafe mutable pointer to the slice's buffer.
391 /// The caller must ensure that the slice outlives the pointer this
392 /// function returns, or else it will end up pointing to garbage.
394 /// Modifying the container referenced by this slice may cause its buffer
395 /// to be reallocated, which would also make any pointers to it invalid.
400 /// let x = &mut [1, 2, 4];
401 /// let x_ptr = x.as_mut_ptr();
404 /// for i in 0..x.len() {
405 /// *x_ptr.add(i) += 2;
408 /// assert_eq!(x, &[3, 4, 6]);
410 #[stable(feature = "rust1", since = "1.0.0")]
412 pub fn as_mut_ptr(&mut self) -> *mut T {
413 self as *mut [T] as *mut T
416 /// Swaps two elements in the slice.
420 /// * a - The index of the first element
421 /// * b - The index of the second element
425 /// Panics if `a` or `b` are out of bounds.
430 /// let mut v = ["a", "b", "c", "d"];
432 /// assert!(v == ["a", "d", "c", "b"]);
434 #[stable(feature = "rust1", since = "1.0.0")]
436 pub fn swap(&mut self, a: usize, b: usize) {
438 // Can't take two mutable loans from one vector, so instead just cast
439 // them to their raw pointers to do the swap
440 let pa: *mut T = &mut self[a];
441 let pb: *mut T = &mut self[b];
446 /// Reverses the order of elements in the slice, in place.
451 /// let mut v = [1, 2, 3];
453 /// assert!(v == [3, 2, 1]);
455 #[stable(feature = "rust1", since = "1.0.0")]
457 pub fn reverse(&mut self) {
458 let mut i: usize = 0;
461 // For very small types, all the individual reads in the normal
462 // path perform poorly. We can do better, given efficient unaligned
463 // load/store, by loading a larger chunk and reversing a register.
465 // Ideally LLVM would do this for us, as it knows better than we do
466 // whether unaligned reads are efficient (since that changes between
467 // different ARM versions, for example) and what the best chunk size
468 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
469 // the loop, so we need to do this ourselves. (Hypothesis: reverse
470 // is troublesome because the sides can be aligned differently --
471 // will be, when the length is odd -- so there's no way of emitting
472 // pre- and postludes to use fully-aligned SIMD in the middle.)
475 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
477 if fast_unaligned && mem::size_of::<T>() == 1 {
478 // Use the llvm.bswap intrinsic to reverse u8s in a usize
479 let chunk = mem::size_of::<usize>();
480 while i + chunk - 1 < ln / 2 {
482 let pa: *mut T = self.get_unchecked_mut(i);
483 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
484 let va = ptr::read_unaligned(pa as *mut usize);
485 let vb = ptr::read_unaligned(pb as *mut usize);
486 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
487 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
493 if fast_unaligned && mem::size_of::<T>() == 2 {
494 // Use rotate-by-16 to reverse u16s in a u32
495 let chunk = mem::size_of::<u32>() / 2;
496 while i + chunk - 1 < ln / 2 {
498 let pa: *mut T = self.get_unchecked_mut(i);
499 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
500 let va = ptr::read_unaligned(pa as *mut u32);
501 let vb = ptr::read_unaligned(pb as *mut u32);
502 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
503 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
510 // Unsafe swap to avoid the bounds check in safe swap.
512 let pa: *mut T = self.get_unchecked_mut(i);
513 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
520 /// Returns an iterator over the slice.
525 /// let x = &[1, 2, 4];
526 /// let mut iterator = x.iter();
528 /// assert_eq!(iterator.next(), Some(&1));
529 /// assert_eq!(iterator.next(), Some(&2));
530 /// assert_eq!(iterator.next(), Some(&4));
531 /// assert_eq!(iterator.next(), None);
533 #[stable(feature = "rust1", since = "1.0.0")]
535 pub fn iter(&self) -> Iter<'_, T> {
537 let ptr = self.as_ptr();
538 assume(!ptr.is_null());
540 let end = if mem::size_of::<T>() == 0 {
541 (ptr as *const u8).wrapping_add(self.len()) as *const T
549 _marker: marker::PhantomData
554 /// Returns an iterator that allows modifying each value.
559 /// let x = &mut [1, 2, 4];
560 /// for elem in x.iter_mut() {
563 /// assert_eq!(x, &[3, 4, 6]);
565 #[stable(feature = "rust1", since = "1.0.0")]
567 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
569 let ptr = self.as_mut_ptr();
570 assume(!ptr.is_null());
572 let end = if mem::size_of::<T>() == 0 {
573 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
581 _marker: marker::PhantomData
586 /// Returns an iterator over all contiguous windows of length
587 /// `size`. The windows overlap. If the slice is shorter than
588 /// `size`, the iterator returns no values.
592 /// Panics if `size` is 0.
597 /// let slice = ['r', 'u', 's', 't'];
598 /// let mut iter = slice.windows(2);
599 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
600 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
601 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
602 /// assert!(iter.next().is_none());
605 /// If the slice is shorter than `size`:
608 /// let slice = ['f', 'o', 'o'];
609 /// let mut iter = slice.windows(4);
610 /// assert!(iter.next().is_none());
612 #[stable(feature = "rust1", since = "1.0.0")]
614 pub fn windows(&self, size: usize) -> Windows<'_, T> {
616 Windows { v: self, size }
619 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
620 /// beginning of the slice.
622 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
623 /// slice, then the last chunk will not have length `chunk_size`.
625 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
626 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
627 /// slice of the slice.
631 /// Panics if `chunk_size` is 0.
636 /// let slice = ['l', 'o', 'r', 'e', 'm'];
637 /// let mut iter = slice.chunks(2);
638 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
639 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
640 /// assert_eq!(iter.next().unwrap(), &['m']);
641 /// assert!(iter.next().is_none());
644 /// [`chunks_exact`]: #method.chunks_exact
645 /// [`rchunks`]: #method.rchunks
646 #[stable(feature = "rust1", since = "1.0.0")]
648 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
649 assert!(chunk_size != 0);
650 Chunks { v: self, chunk_size }
653 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
654 /// beginning of the slice.
656 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
657 /// length of the slice, then the last chunk will not have length `chunk_size`.
659 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
660 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
661 /// the end of the slice of the slice.
665 /// Panics if `chunk_size` is 0.
670 /// let v = &mut [0, 0, 0, 0, 0];
671 /// let mut count = 1;
673 /// for chunk in v.chunks_mut(2) {
674 /// for elem in chunk.iter_mut() {
679 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
682 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
683 /// [`rchunks_mut`]: #method.rchunks_mut
684 #[stable(feature = "rust1", since = "1.0.0")]
686 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> {
687 assert!(chunk_size != 0);
688 ChunksMut { v: self, chunk_size }
691 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
692 /// beginning of the slice.
694 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
695 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
696 /// from the `remainder` function of the iterator.
698 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
699 /// resulting code better than in the case of [`chunks`].
701 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
702 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
706 /// Panics if `chunk_size` is 0.
711 /// let slice = ['l', 'o', 'r', 'e', 'm'];
712 /// let mut iter = slice.chunks_exact(2);
713 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
714 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
715 /// assert!(iter.next().is_none());
716 /// assert_eq!(iter.remainder(), &['m']);
719 /// [`chunks`]: #method.chunks
720 /// [`rchunks_exact`]: #method.rchunks_exact
721 #[stable(feature = "chunks_exact", since = "1.31.0")]
723 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
724 assert!(chunk_size != 0);
725 let rem = self.len() % chunk_size;
726 let len = self.len() - rem;
727 let (fst, snd) = self.split_at(len);
728 ChunksExact { v: fst, rem: snd, chunk_size }
731 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
732 /// beginning of the slice.
734 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
735 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
736 /// retrieved from the `into_remainder` function of the iterator.
738 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
739 /// resulting code better than in the case of [`chunks_mut`].
741 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
742 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
743 /// the slice of the slice.
747 /// Panics if `chunk_size` is 0.
752 /// let v = &mut [0, 0, 0, 0, 0];
753 /// let mut count = 1;
755 /// for chunk in v.chunks_exact_mut(2) {
756 /// for elem in chunk.iter_mut() {
761 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
764 /// [`chunks_mut`]: #method.chunks_mut
765 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
766 #[stable(feature = "chunks_exact", since = "1.31.0")]
768 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> {
769 assert!(chunk_size != 0);
770 let rem = self.len() % chunk_size;
771 let len = self.len() - rem;
772 let (fst, snd) = self.split_at_mut(len);
773 ChunksExactMut { v: fst, rem: snd, chunk_size }
776 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
779 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
780 /// slice, then the last chunk will not have length `chunk_size`.
782 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
783 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
788 /// Panics if `chunk_size` is 0.
793 /// let slice = ['l', 'o', 'r', 'e', 'm'];
794 /// let mut iter = slice.rchunks(2);
795 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
796 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
797 /// assert_eq!(iter.next().unwrap(), &['l']);
798 /// assert!(iter.next().is_none());
801 /// [`rchunks_exact`]: #method.rchunks_exact
802 /// [`chunks`]: #method.chunks
803 #[stable(feature = "rchunks", since = "1.31.0")]
805 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
806 assert!(chunk_size != 0);
807 RChunks { v: self, chunk_size }
810 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
813 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
814 /// length of the slice, then the last chunk will not have length `chunk_size`.
816 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
817 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
818 /// beginning of the slice.
822 /// Panics if `chunk_size` is 0.
827 /// let v = &mut [0, 0, 0, 0, 0];
828 /// let mut count = 1;
830 /// for chunk in v.rchunks_mut(2) {
831 /// for elem in chunk.iter_mut() {
836 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
839 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
840 /// [`chunks_mut`]: #method.chunks_mut
841 #[stable(feature = "rchunks", since = "1.31.0")]
843 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> {
844 assert!(chunk_size != 0);
845 RChunksMut { v: self, chunk_size }
848 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
849 /// end of the slice.
851 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
852 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
853 /// from the `remainder` function of the iterator.
855 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
856 /// resulting code better than in the case of [`chunks`].
858 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
859 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
864 /// Panics if `chunk_size` is 0.
869 /// let slice = ['l', 'o', 'r', 'e', 'm'];
870 /// let mut iter = slice.rchunks_exact(2);
871 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
872 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
873 /// assert!(iter.next().is_none());
874 /// assert_eq!(iter.remainder(), &['l']);
877 /// [`chunks`]: #method.chunks
878 /// [`rchunks`]: #method.rchunks
879 /// [`chunks_exact`]: #method.chunks_exact
880 #[stable(feature = "rchunks", since = "1.31.0")]
882 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> {
883 assert!(chunk_size != 0);
884 let rem = self.len() % chunk_size;
885 let (fst, snd) = self.split_at(rem);
886 RChunksExact { v: snd, rem: fst, chunk_size }
889 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
892 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
893 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
894 /// retrieved from the `into_remainder` function of the iterator.
896 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
897 /// resulting code better than in the case of [`chunks_mut`].
899 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
900 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
905 /// Panics if `chunk_size` is 0.
910 /// let v = &mut [0, 0, 0, 0, 0];
911 /// let mut count = 1;
913 /// for chunk in v.rchunks_exact_mut(2) {
914 /// for elem in chunk.iter_mut() {
919 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
922 /// [`chunks_mut`]: #method.chunks_mut
923 /// [`rchunks_mut`]: #method.rchunks_mut
924 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
925 #[stable(feature = "rchunks", since = "1.31.0")]
927 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> {
928 assert!(chunk_size != 0);
929 let rem = self.len() % chunk_size;
930 let (fst, snd) = self.split_at_mut(rem);
931 RChunksExactMut { v: snd, rem: fst, chunk_size }
934 /// Divides one slice into two at an index.
936 /// The first will contain all indices from `[0, mid)` (excluding
937 /// the index `mid` itself) and the second will contain all
938 /// indices from `[mid, len)` (excluding the index `len` itself).
942 /// Panics if `mid > len`.
947 /// let v = [1, 2, 3, 4, 5, 6];
950 /// let (left, right) = v.split_at(0);
951 /// assert!(left == []);
952 /// assert!(right == [1, 2, 3, 4, 5, 6]);
956 /// let (left, right) = v.split_at(2);
957 /// assert!(left == [1, 2]);
958 /// assert!(right == [3, 4, 5, 6]);
962 /// let (left, right) = v.split_at(6);
963 /// assert!(left == [1, 2, 3, 4, 5, 6]);
964 /// assert!(right == []);
967 #[stable(feature = "rust1", since = "1.0.0")]
969 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
970 (&self[..mid], &self[mid..])
973 /// Divides one mutable slice into two at an index.
975 /// The first will contain all indices from `[0, mid)` (excluding
976 /// the index `mid` itself) and the second will contain all
977 /// indices from `[mid, len)` (excluding the index `len` itself).
981 /// Panics if `mid > len`.
986 /// let mut v = [1, 0, 3, 0, 5, 6];
987 /// // scoped to restrict the lifetime of the borrows
989 /// let (left, right) = v.split_at_mut(2);
990 /// assert!(left == [1, 0]);
991 /// assert!(right == [3, 0, 5, 6]);
995 /// assert!(v == [1, 2, 3, 4, 5, 6]);
997 #[stable(feature = "rust1", since = "1.0.0")]
999 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
1000 let len = self.len();
1001 let ptr = self.as_mut_ptr();
1004 assert!(mid <= len);
1006 (from_raw_parts_mut(ptr, mid),
1007 from_raw_parts_mut(ptr.add(mid), len - mid))
1011 /// Returns an iterator over subslices separated by elements that match
1012 /// `pred`. The matched element is not contained in the subslices.
1017 /// let slice = [10, 40, 33, 20];
1018 /// let mut iter = slice.split(|num| num % 3 == 0);
1020 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1021 /// assert_eq!(iter.next().unwrap(), &[20]);
1022 /// assert!(iter.next().is_none());
1025 /// If the first element is matched, an empty slice will be the first item
1026 /// returned by the iterator. Similarly, if the last element in the slice
1027 /// is matched, an empty slice will be the last item returned by the
1031 /// let slice = [10, 40, 33];
1032 /// let mut iter = slice.split(|num| num % 3 == 0);
1034 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1035 /// assert_eq!(iter.next().unwrap(), &[]);
1036 /// assert!(iter.next().is_none());
1039 /// If two matched elements are directly adjacent, an empty slice will be
1040 /// present between them:
1043 /// let slice = [10, 6, 33, 20];
1044 /// let mut iter = slice.split(|num| num % 3 == 0);
1046 /// assert_eq!(iter.next().unwrap(), &[10]);
1047 /// assert_eq!(iter.next().unwrap(), &[]);
1048 /// assert_eq!(iter.next().unwrap(), &[20]);
1049 /// assert!(iter.next().is_none());
1051 #[stable(feature = "rust1", since = "1.0.0")]
1053 pub fn split<F>(&self, pred: F) -> Split<'_, T, F>
1054 where F: FnMut(&T) -> bool
1063 /// Returns an iterator over mutable subslices separated by elements that
1064 /// match `pred`. The matched element is not contained in the subslices.
1069 /// let mut v = [10, 40, 30, 20, 60, 50];
1071 /// for group in v.split_mut(|num| *num % 3 == 0) {
1074 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1076 #[stable(feature = "rust1", since = "1.0.0")]
1078 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<'_, T, F>
1079 where F: FnMut(&T) -> bool
1081 SplitMut { v: self, pred, finished: false }
1084 /// Returns an iterator over subslices separated by elements that match
1085 /// `pred`, starting at the end of the slice and working backwards.
1086 /// The matched element is not contained in the subslices.
1091 /// let slice = [11, 22, 33, 0, 44, 55];
1092 /// let mut iter = slice.rsplit(|num| *num == 0);
1094 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1095 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1096 /// assert_eq!(iter.next(), None);
1099 /// As with `split()`, if the first or last element is matched, an empty
1100 /// slice will be the first (or last) item returned by the iterator.
1103 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1104 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1105 /// assert_eq!(it.next().unwrap(), &[]);
1106 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1107 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1108 /// assert_eq!(it.next().unwrap(), &[]);
1109 /// assert_eq!(it.next(), None);
1111 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1113 pub fn rsplit<F>(&self, pred: F) -> RSplit<'_, T, F>
1114 where F: FnMut(&T) -> bool
1116 RSplit { inner: self.split(pred) }
1119 /// Returns an iterator over mutable subslices separated by elements that
1120 /// match `pred`, starting at the end of the slice and working
1121 /// backwards. The matched element is not contained in the subslices.
1126 /// let mut v = [100, 400, 300, 200, 600, 500];
1128 /// let mut count = 0;
1129 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1131 /// group[0] = count;
1133 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1136 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1138 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<'_, T, F>
1139 where F: FnMut(&T) -> bool
1141 RSplitMut { inner: self.split_mut(pred) }
1144 /// Returns an iterator over subslices separated by elements that match
1145 /// `pred`, limited to returning at most `n` items. The matched element is
1146 /// not contained in the subslices.
1148 /// The last element returned, if any, will contain the remainder of the
1153 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1154 /// `[20, 60, 50]`):
1157 /// let v = [10, 40, 30, 20, 60, 50];
1159 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1160 /// println!("{:?}", group);
1163 #[stable(feature = "rust1", since = "1.0.0")]
1165 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<'_, T, F>
1166 where F: FnMut(&T) -> bool
1169 inner: GenericSplitN {
1170 iter: self.split(pred),
1176 /// Returns an iterator over subslices separated by elements that match
1177 /// `pred`, limited to returning at most `n` items. The matched element is
1178 /// not contained in the subslices.
1180 /// The last element returned, if any, will contain the remainder of the
1186 /// let mut v = [10, 40, 30, 20, 60, 50];
1188 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1191 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1193 #[stable(feature = "rust1", since = "1.0.0")]
1195 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F>
1196 where F: FnMut(&T) -> bool
1199 inner: GenericSplitN {
1200 iter: self.split_mut(pred),
1206 /// Returns an iterator over subslices separated by elements that match
1207 /// `pred` limited to returning at most `n` items. This starts at the end of
1208 /// the slice and works backwards. The matched element is not contained in
1211 /// The last element returned, if any, will contain the remainder of the
1216 /// Print the slice split once, starting from the end, by numbers divisible
1217 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1220 /// let v = [10, 40, 30, 20, 60, 50];
1222 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1223 /// println!("{:?}", group);
1226 #[stable(feature = "rust1", since = "1.0.0")]
1228 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<'_, T, F>
1229 where F: FnMut(&T) -> bool
1232 inner: GenericSplitN {
1233 iter: self.rsplit(pred),
1239 /// Returns an iterator over subslices separated by elements that match
1240 /// `pred` limited to returning at most `n` items. This starts at the end of
1241 /// the slice and works backwards. The matched element is not contained in
1244 /// The last element returned, if any, will contain the remainder of the
1250 /// let mut s = [10, 40, 30, 20, 60, 50];
1252 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1255 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1257 #[stable(feature = "rust1", since = "1.0.0")]
1259 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F>
1260 where F: FnMut(&T) -> bool
1263 inner: GenericSplitN {
1264 iter: self.rsplit_mut(pred),
1270 /// Returns `true` if the slice contains an element with the given value.
1275 /// let v = [10, 40, 30];
1276 /// assert!(v.contains(&30));
1277 /// assert!(!v.contains(&50));
1279 #[stable(feature = "rust1", since = "1.0.0")]
1280 pub fn contains(&self, x: &T) -> bool
1283 x.slice_contains(self)
1286 /// Returns `true` if `needle` is a prefix of the slice.
1291 /// let v = [10, 40, 30];
1292 /// assert!(v.starts_with(&[10]));
1293 /// assert!(v.starts_with(&[10, 40]));
1294 /// assert!(!v.starts_with(&[50]));
1295 /// assert!(!v.starts_with(&[10, 50]));
1298 /// Always returns `true` if `needle` is an empty slice:
1301 /// let v = &[10, 40, 30];
1302 /// assert!(v.starts_with(&[]));
1303 /// let v: &[u8] = &[];
1304 /// assert!(v.starts_with(&[]));
1306 #[stable(feature = "rust1", since = "1.0.0")]
1307 pub fn starts_with(&self, needle: &[T]) -> bool
1310 let n = needle.len();
1311 self.len() >= n && needle == &self[..n]
1314 /// Returns `true` if `needle` is a suffix of the slice.
1319 /// let v = [10, 40, 30];
1320 /// assert!(v.ends_with(&[30]));
1321 /// assert!(v.ends_with(&[40, 30]));
1322 /// assert!(!v.ends_with(&[50]));
1323 /// assert!(!v.ends_with(&[50, 30]));
1326 /// Always returns `true` if `needle` is an empty slice:
1329 /// let v = &[10, 40, 30];
1330 /// assert!(v.ends_with(&[]));
1331 /// let v: &[u8] = &[];
1332 /// assert!(v.ends_with(&[]));
1334 #[stable(feature = "rust1", since = "1.0.0")]
1335 pub fn ends_with(&self, needle: &[T]) -> bool
1338 let (m, n) = (self.len(), needle.len());
1339 m >= n && needle == &self[m-n..]
1342 /// Binary searches this sorted slice for a given element.
1344 /// If the value is found then [`Result::Ok`] is returned, containing the
1345 /// index of the matching element. If there are multiple matches, then any
1346 /// one of the matches could be returned. If the value is not found then
1347 /// [`Result::Err`] is returned, containing the index where a matching
1348 /// element could be inserted while maintaining sorted order.
1352 /// Looks up a series of four elements. The first is found, with a
1353 /// uniquely determined position; the second and third are not
1354 /// found; the fourth could match any position in `[1, 4]`.
1357 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1359 /// assert_eq!(s.binary_search(&13), Ok(9));
1360 /// assert_eq!(s.binary_search(&4), Err(7));
1361 /// assert_eq!(s.binary_search(&100), Err(13));
1362 /// let r = s.binary_search(&1);
1363 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1365 #[stable(feature = "rust1", since = "1.0.0")]
1366 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1369 self.binary_search_by(|p| p.cmp(x))
1372 /// Binary searches this sorted slice with a comparator function.
1374 /// The comparator function should implement an order consistent
1375 /// with the sort order of the underlying slice, returning an
1376 /// order code that indicates whether its argument is `Less`,
1377 /// `Equal` or `Greater` the desired target.
1379 /// If the value is found then [`Result::Ok`] is returned, containing the
1380 /// index of the matching element. If there are multiple matches, then any
1381 /// one of the matches could be returned. If the value is not found then
1382 /// [`Result::Err`] is returned, containing the index where a matching
1383 /// element could be inserted while maintaining sorted order.
1387 /// Looks up a series of four elements. The first is found, with a
1388 /// uniquely determined position; the second and third are not
1389 /// found; the fourth could match any position in `[1, 4]`.
1392 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1395 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1397 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1399 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1401 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1402 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1404 #[stable(feature = "rust1", since = "1.0.0")]
1406 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1407 where F: FnMut(&'a T) -> Ordering
1410 let mut size = s.len();
1414 let mut base = 0usize;
1416 let half = size / 2;
1417 let mid = base + half;
1418 // mid is always in [0, size), that means mid is >= 0 and < size.
1419 // mid >= 0: by definition
1420 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1421 let cmp = f(unsafe { s.get_unchecked(mid) });
1422 base = if cmp == Greater { base } else { mid };
1425 // base is always in [0, size) because base <= mid.
1426 let cmp = f(unsafe { s.get_unchecked(base) });
1427 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1431 /// Binary searches this sorted slice with a key extraction function.
1433 /// Assumes that the slice is sorted by the key, for instance with
1434 /// [`sort_by_key`] using the same key extraction function.
1436 /// If the value is found then [`Result::Ok`] is returned, containing the
1437 /// index of the matching element. If there are multiple matches, then any
1438 /// one of the matches could be returned. If the value is not found then
1439 /// [`Result::Err`] is returned, containing the index where a matching
1440 /// element could be inserted while maintaining sorted order.
1442 /// [`sort_by_key`]: #method.sort_by_key
1446 /// Looks up a series of four elements in a slice of pairs sorted by
1447 /// their second elements. The first is found, with a uniquely
1448 /// determined position; the second and third are not found; the
1449 /// fourth could match any position in `[1, 4]`.
1452 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1453 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1454 /// (1, 21), (2, 34), (4, 55)];
1456 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1457 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1458 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1459 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1460 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1462 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1464 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1465 where F: FnMut(&'a T) -> B,
1468 self.binary_search_by(|k| f(k).cmp(b))
1471 /// Sorts the slice, but may not preserve the order of equal elements.
1473 /// This sort is unstable (i.e., may reorder equal elements), in-place
1474 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1476 /// # Current implementation
1478 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1479 /// which combines the fast average case of randomized quicksort with the fast worst case of
1480 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1481 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1482 /// deterministic behavior.
1484 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1485 /// slice consists of several concatenated sorted sequences.
1490 /// let mut v = [-5, 4, 1, -3, 2];
1492 /// v.sort_unstable();
1493 /// assert!(v == [-5, -3, 1, 2, 4]);
1496 /// [pdqsort]: https://github.com/orlp/pdqsort
1497 #[stable(feature = "sort_unstable", since = "1.20.0")]
1499 pub fn sort_unstable(&mut self)
1502 sort::quicksort(self, |a, b| a.lt(b));
1505 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1508 /// This sort is unstable (i.e., may reorder equal elements), in-place
1509 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1511 /// The comparator function must define a total ordering for the elements in the slice. If
1512 /// the ordering is not total, the order of the elements is unspecified. An order is a
1513 /// total order if it is (for all a, b and c):
1515 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1516 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1518 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1519 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1522 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1523 /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
1524 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1527 /// # Current implementation
1529 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1530 /// which combines the fast average case of randomized quicksort with the fast worst case of
1531 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1532 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1533 /// deterministic behavior.
1535 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1536 /// slice consists of several concatenated sorted sequences.
1541 /// let mut v = [5, 4, 1, 3, 2];
1542 /// v.sort_unstable_by(|a, b| a.cmp(b));
1543 /// assert!(v == [1, 2, 3, 4, 5]);
1545 /// // reverse sorting
1546 /// v.sort_unstable_by(|a, b| b.cmp(a));
1547 /// assert!(v == [5, 4, 3, 2, 1]);
1550 /// [pdqsort]: https://github.com/orlp/pdqsort
1551 #[stable(feature = "sort_unstable", since = "1.20.0")]
1553 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1554 where F: FnMut(&T, &T) -> Ordering
1556 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1559 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1562 /// This sort is unstable (i.e., may reorder equal elements), in-place
1563 /// (i.e., does not allocate), and `O(m n log(m n))` worst-case, where the key function is
1566 /// # Current implementation
1568 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1569 /// which combines the fast average case of randomized quicksort with the fast worst case of
1570 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1571 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1572 /// deterministic behavior.
1574 /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
1575 /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
1576 /// cases where the key function is expensive.
1581 /// let mut v = [-5i32, 4, 1, -3, 2];
1583 /// v.sort_unstable_by_key(|k| k.abs());
1584 /// assert!(v == [1, 2, -3, 4, -5]);
1587 /// [pdqsort]: https://github.com/orlp/pdqsort
1588 #[stable(feature = "sort_unstable", since = "1.20.0")]
1590 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1591 where F: FnMut(&T) -> K, K: Ord
1593 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1596 /// Reorder the slice such that the element at `index` is at its final sorted position.
1598 /// This reordering has the additional property that any value at position `i < index` will be
1599 /// less than or equal to any value at a position `j > index`. Additionally, this reordering is
1600 /// unstable (i.e. any number of equal elements may end up at position `index`), in-place
1601 /// (i.e. does not allocate), and `O(n)` worst-case. This function is also/ known as "kth
1602 /// element" in other libraries. It returns a triplet of the following values: all elements less
1603 /// than the one at the given index, the value at the given index, and all elements greater than
1604 /// the one at the given index.
1606 /// # Current implementation
1608 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1609 /// used for [`sort_unstable`].
1611 /// [`sort_unstable`]: #method.sort_unstable
1615 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1620 /// #![feature(slice_partition_at_index)]
1622 /// let mut v = [-5i32, 4, 1, -3, 2];
1624 /// // Find the median
1625 /// v.partition_at_index(2);
1627 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1628 /// // about the specified index.
1629 /// assert!(v == [-3, -5, 1, 2, 4] ||
1630 /// v == [-5, -3, 1, 2, 4] ||
1631 /// v == [-3, -5, 1, 4, 2] ||
1632 /// v == [-5, -3, 1, 4, 2]);
1634 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1636 pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T])
1639 let mut f = |a: &T, b: &T| a.lt(b);
1640 sort::partition_at_index(self, index, &mut f)
1643 /// Reorder the slice with a comparator function such that the element at `index` is at its
1644 /// final sorted position.
1646 /// This reordering has the additional property that any value at position `i < index` will be
1647 /// less than or equal to any value at a position `j > index` using the comparator function.
1648 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1649 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1650 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1651 /// values: all elements less than the one at the given index, the value at the given index,
1652 /// and all elements greater than the one at the given index, using the provided comparator
1655 /// # Current implementation
1657 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1658 /// used for [`sort_unstable`].
1660 /// [`sort_unstable`]: #method.sort_unstable
1664 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1669 /// #![feature(slice_partition_at_index)]
1671 /// let mut v = [-5i32, 4, 1, -3, 2];
1673 /// // Find the median as if the slice were sorted in descending order.
1674 /// v.partition_at_index_by(2, |a, b| b.cmp(a));
1676 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1677 /// // about the specified index.
1678 /// assert!(v == [2, 4, 1, -5, -3] ||
1679 /// v == [2, 4, 1, -3, -5] ||
1680 /// v == [4, 2, 1, -5, -3] ||
1681 /// v == [4, 2, 1, -3, -5]);
1683 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1685 pub fn partition_at_index_by<F>(&mut self, index: usize, mut compare: F)
1686 -> (&mut [T], &mut T, &mut [T])
1687 where F: FnMut(&T, &T) -> Ordering
1689 let mut f = |a: &T, b: &T| compare(a, b) == Less;
1690 sort::partition_at_index(self, index, &mut f)
1693 /// Reorder the slice with a key extraction function such that the element at `index` is at its
1694 /// final sorted position.
1696 /// This reordering has the additional property that any value at position `i < index` will be
1697 /// less than or equal to any value at a position `j > index` using the key extraction function.
1698 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1699 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1700 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1701 /// values: all elements less than the one at the given index, the value at the given index, and
1702 /// all elements greater than the one at the given index, using the provided key extraction
1705 /// # Current implementation
1707 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1708 /// used for [`sort_unstable`].
1710 /// [`sort_unstable`]: #method.sort_unstable
1714 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1719 /// #![feature(slice_partition_at_index)]
1721 /// let mut v = [-5i32, 4, 1, -3, 2];
1723 /// // Return the median as if the array were sorted according to absolute value.
1724 /// v.partition_at_index_by_key(2, |a| a.abs());
1726 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1727 /// // about the specified index.
1728 /// assert!(v == [1, 2, -3, 4, -5] ||
1729 /// v == [1, 2, -3, -5, 4] ||
1730 /// v == [2, 1, -3, 4, -5] ||
1731 /// v == [2, 1, -3, -5, 4]);
1733 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1735 pub fn partition_at_index_by_key<K, F>(&mut self, index: usize, mut f: F)
1736 -> (&mut [T], &mut T, &mut [T])
1737 where F: FnMut(&T) -> K, K: Ord
1739 let mut g = |a: &T, b: &T| f(a).lt(&f(b));
1740 sort::partition_at_index(self, index, &mut g)
1743 /// Moves all consecutive repeated elements to the end of the slice according to the
1744 /// [`PartialEq`] trait implementation.
1746 /// Returns two slices. The first contains no consecutive repeated elements.
1747 /// The second contains all the duplicates in no specified order.
1749 /// If the slice is sorted, the first returned slice contains no duplicates.
1754 /// #![feature(slice_partition_dedup)]
1756 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1758 /// let (dedup, duplicates) = slice.partition_dedup();
1760 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1761 /// assert_eq!(duplicates, [2, 3, 1]);
1763 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1765 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1768 self.partition_dedup_by(|a, b| a == b)
1771 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1772 /// a given equality relation.
1774 /// Returns two slices. The first contains no consecutive repeated elements.
1775 /// The second contains all the duplicates in no specified order.
1777 /// The `same_bucket` function is passed references to two elements from the slice and
1778 /// must determine if the elements compare equal. The elements are passed in opposite order
1779 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1780 /// at the end of the slice.
1782 /// If the slice is sorted, the first returned slice contains no duplicates.
1787 /// #![feature(slice_partition_dedup)]
1789 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1791 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1793 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1794 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1796 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1798 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1799 where F: FnMut(&mut T, &mut T) -> bool
1801 // Although we have a mutable reference to `self`, we cannot make
1802 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1803 // must ensure that the slice is in a valid state at all times.
1805 // The way that we handle this is by using swaps; we iterate
1806 // over all the elements, swapping as we go so that at the end
1807 // the elements we wish to keep are in the front, and those we
1808 // wish to reject are at the back. We can then split the slice.
1809 // This operation is still O(n).
1811 // Example: We start in this state, where `r` represents "next
1812 // read" and `w` represents "next_write`.
1815 // +---+---+---+---+---+---+
1816 // | 0 | 1 | 1 | 2 | 3 | 3 |
1817 // +---+---+---+---+---+---+
1820 // Comparing self[r] against self[w-1], this is not a duplicate, so
1821 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1822 // r and w, leaving us with:
1825 // +---+---+---+---+---+---+
1826 // | 0 | 1 | 1 | 2 | 3 | 3 |
1827 // +---+---+---+---+---+---+
1830 // Comparing self[r] against self[w-1], this value is a duplicate,
1831 // so we increment `r` but leave everything else unchanged:
1834 // +---+---+---+---+---+---+
1835 // | 0 | 1 | 1 | 2 | 3 | 3 |
1836 // +---+---+---+---+---+---+
1839 // Comparing self[r] against self[w-1], this is not a duplicate,
1840 // so swap self[r] and self[w] and advance r and w:
1843 // +---+---+---+---+---+---+
1844 // | 0 | 1 | 2 | 1 | 3 | 3 |
1845 // +---+---+---+---+---+---+
1848 // Not a duplicate, repeat:
1851 // +---+---+---+---+---+---+
1852 // | 0 | 1 | 2 | 3 | 1 | 3 |
1853 // +---+---+---+---+---+---+
1856 // Duplicate, advance r. End of slice. Split at w.
1858 let len = self.len();
1860 return (self, &mut [])
1863 let ptr = self.as_mut_ptr();
1864 let mut next_read: usize = 1;
1865 let mut next_write: usize = 1;
1868 // Avoid bounds checks by using raw pointers.
1869 while next_read < len {
1870 let ptr_read = ptr.add(next_read);
1871 let prev_ptr_write = ptr.add(next_write - 1);
1872 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1873 if next_read != next_write {
1874 let ptr_write = prev_ptr_write.offset(1);
1875 mem::swap(&mut *ptr_read, &mut *ptr_write);
1883 self.split_at_mut(next_write)
1886 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1887 /// to the same key.
1889 /// Returns two slices. The first contains no consecutive repeated elements.
1890 /// The second contains all the duplicates in no specified order.
1892 /// If the slice is sorted, the first returned slice contains no duplicates.
1897 /// #![feature(slice_partition_dedup)]
1899 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1901 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1903 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1904 /// assert_eq!(duplicates, [21, 30, 13]);
1906 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1908 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1909 where F: FnMut(&mut T) -> K,
1912 self.partition_dedup_by(|a, b| key(a) == key(b))
1915 /// Rotates the slice in-place such that the first `mid` elements of the
1916 /// slice move to the end while the last `self.len() - mid` elements move to
1917 /// the front. After calling `rotate_left`, the element previously at index
1918 /// `mid` will become the first element in the slice.
1922 /// This function will panic if `mid` is greater than the length of the
1923 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1928 /// Takes linear (in `self.len()`) time.
1933 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1934 /// a.rotate_left(2);
1935 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1938 /// Rotating a subslice:
1941 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1942 /// a[1..5].rotate_left(1);
1943 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1945 #[stable(feature = "slice_rotate", since = "1.26.0")]
1946 pub fn rotate_left(&mut self, mid: usize) {
1947 assert!(mid <= self.len());
1948 let k = self.len() - mid;
1951 let p = self.as_mut_ptr();
1952 rotate::ptr_rotate(mid, p.add(mid), k);
1956 /// Rotates the slice in-place such that the first `self.len() - k`
1957 /// elements of the slice move to the end while the last `k` elements move
1958 /// to the front. After calling `rotate_right`, the element previously at
1959 /// index `self.len() - k` will become the first element in the slice.
1963 /// This function will panic if `k` is greater than the length of the
1964 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1969 /// Takes linear (in `self.len()`) time.
1974 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1975 /// a.rotate_right(2);
1976 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1979 /// Rotate a subslice:
1982 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1983 /// a[1..5].rotate_right(1);
1984 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1986 #[stable(feature = "slice_rotate", since = "1.26.0")]
1987 pub fn rotate_right(&mut self, k: usize) {
1988 assert!(k <= self.len());
1989 let mid = self.len() - k;
1992 let p = self.as_mut_ptr();
1993 rotate::ptr_rotate(mid, p.add(mid), k);
1997 /// Copies the elements from `src` into `self`.
1999 /// The length of `src` must be the same as `self`.
2001 /// If `src` implements `Copy`, it can be more performant to use
2002 /// [`copy_from_slice`].
2006 /// This function will panic if the two slices have different lengths.
2010 /// Cloning two elements from a slice into another:
2013 /// let src = [1, 2, 3, 4];
2014 /// let mut dst = [0, 0];
2016 /// // Because the slices have to be the same length,
2017 /// // we slice the source slice from four elements
2018 /// // to two. It will panic if we don't do this.
2019 /// dst.clone_from_slice(&src[2..]);
2021 /// assert_eq!(src, [1, 2, 3, 4]);
2022 /// assert_eq!(dst, [3, 4]);
2025 /// Rust enforces that there can only be one mutable reference with no
2026 /// immutable references to a particular piece of data in a particular
2027 /// scope. Because of this, attempting to use `clone_from_slice` on a
2028 /// single slice will result in a compile failure:
2031 /// let mut slice = [1, 2, 3, 4, 5];
2033 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
2036 /// To work around this, we can use [`split_at_mut`] to create two distinct
2037 /// sub-slices from a slice:
2040 /// let mut slice = [1, 2, 3, 4, 5];
2043 /// let (left, right) = slice.split_at_mut(2);
2044 /// left.clone_from_slice(&right[1..]);
2047 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2050 /// [`copy_from_slice`]: #method.copy_from_slice
2051 /// [`split_at_mut`]: #method.split_at_mut
2052 #[stable(feature = "clone_from_slice", since = "1.7.0")]
2053 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
2054 assert!(self.len() == src.len(),
2055 "destination and source slices have different lengths");
2056 // NOTE: We need to explicitly slice them to the same length
2057 // for bounds checking to be elided, and the optimizer will
2058 // generate memcpy for simple cases (for example T = u8).
2059 let len = self.len();
2060 let src = &src[..len];
2062 self[i].clone_from(&src[i]);
2067 /// Copies all elements from `src` into `self`, using a memcpy.
2069 /// The length of `src` must be the same as `self`.
2071 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
2075 /// This function will panic if the two slices have different lengths.
2079 /// Copying two elements from a slice into another:
2082 /// let src = [1, 2, 3, 4];
2083 /// let mut dst = [0, 0];
2085 /// // Because the slices have to be the same length,
2086 /// // we slice the source slice from four elements
2087 /// // to two. It will panic if we don't do this.
2088 /// dst.copy_from_slice(&src[2..]);
2090 /// assert_eq!(src, [1, 2, 3, 4]);
2091 /// assert_eq!(dst, [3, 4]);
2094 /// Rust enforces that there can only be one mutable reference with no
2095 /// immutable references to a particular piece of data in a particular
2096 /// scope. Because of this, attempting to use `copy_from_slice` on a
2097 /// single slice will result in a compile failure:
2100 /// let mut slice = [1, 2, 3, 4, 5];
2102 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
2105 /// To work around this, we can use [`split_at_mut`] to create two distinct
2106 /// sub-slices from a slice:
2109 /// let mut slice = [1, 2, 3, 4, 5];
2112 /// let (left, right) = slice.split_at_mut(2);
2113 /// left.copy_from_slice(&right[1..]);
2116 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2119 /// [`clone_from_slice`]: #method.clone_from_slice
2120 /// [`split_at_mut`]: #method.split_at_mut
2121 #[stable(feature = "copy_from_slice", since = "1.9.0")]
2122 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
2123 assert_eq!(self.len(), src.len(),
2124 "destination and source slices have different lengths");
2126 ptr::copy_nonoverlapping(
2127 src.as_ptr(), self.as_mut_ptr(), self.len());
2131 /// Copies elements from one part of the slice to another part of itself,
2132 /// using a memmove.
2134 /// `src` is the range within `self` to copy from. `dest` is the starting
2135 /// index of the range within `self` to copy to, which will have the same
2136 /// length as `src`. The two ranges may overlap. The ends of the two ranges
2137 /// must be less than or equal to `self.len()`.
2141 /// This function will panic if either range exceeds the end of the slice,
2142 /// or if the end of `src` is before the start.
2146 /// Copying four bytes within a slice:
2149 /// # #![feature(copy_within)]
2150 /// let mut bytes = *b"Hello, World!";
2152 /// bytes.copy_within(1..5, 8);
2154 /// assert_eq!(&bytes, b"Hello, Wello!");
2156 #[unstable(feature = "copy_within", issue = "54236")]
2157 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2161 let src_start = match src.start_bound() {
2162 ops::Bound::Included(&n) => n,
2163 ops::Bound::Excluded(&n) => n
2165 .unwrap_or_else(|| slice_index_overflow_fail()),
2166 ops::Bound::Unbounded => 0,
2168 let src_end = match src.end_bound() {
2169 ops::Bound::Included(&n) => n
2171 .unwrap_or_else(|| slice_index_overflow_fail()),
2172 ops::Bound::Excluded(&n) => n,
2173 ops::Bound::Unbounded => self.len(),
2175 assert!(src_start <= src_end, "src end is before src start");
2176 assert!(src_end <= self.len(), "src is out of bounds");
2177 let count = src_end - src_start;
2178 assert!(dest <= self.len() - count, "dest is out of bounds");
2181 self.get_unchecked(src_start),
2182 self.get_unchecked_mut(dest),
2188 /// Swaps all elements in `self` with those in `other`.
2190 /// The length of `other` must be the same as `self`.
2194 /// This function will panic if the two slices have different lengths.
2198 /// Swapping two elements across slices:
2201 /// let mut slice1 = [0, 0];
2202 /// let mut slice2 = [1, 2, 3, 4];
2204 /// slice1.swap_with_slice(&mut slice2[2..]);
2206 /// assert_eq!(slice1, [3, 4]);
2207 /// assert_eq!(slice2, [1, 2, 0, 0]);
2210 /// Rust enforces that there can only be one mutable reference to a
2211 /// particular piece of data in a particular scope. Because of this,
2212 /// attempting to use `swap_with_slice` on a single slice will result in
2213 /// a compile failure:
2216 /// let mut slice = [1, 2, 3, 4, 5];
2217 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2220 /// To work around this, we can use [`split_at_mut`] to create two distinct
2221 /// mutable sub-slices from a slice:
2224 /// let mut slice = [1, 2, 3, 4, 5];
2227 /// let (left, right) = slice.split_at_mut(2);
2228 /// left.swap_with_slice(&mut right[1..]);
2231 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2234 /// [`split_at_mut`]: #method.split_at_mut
2235 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2236 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2237 assert!(self.len() == other.len(),
2238 "destination and source slices have different lengths");
2240 ptr::swap_nonoverlapping(
2241 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2245 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2246 fn align_to_offsets<U>(&self) -> (usize, usize) {
2247 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2248 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2250 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2251 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2252 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2254 // Formula to calculate this is:
2256 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2257 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2259 // Expanded and simplified:
2261 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2262 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2264 // Luckily since all this is constant-evaluated... performance here matters not!
2266 fn gcd(a: usize, b: usize) -> usize {
2267 use crate::intrinsics;
2268 // iterative stein’s algorithm
2269 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2270 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2271 let (ctz_a, mut ctz_b) = unsafe {
2272 if a == 0 { return b; }
2273 if b == 0 { return a; }
2274 (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
2276 let k = ctz_a.min(ctz_b);
2277 let mut a = a >> ctz_a;
2280 // remove all factors of 2 from b
2283 mem::swap(&mut a, &mut b);
2290 ctz_b = intrinsics::cttz_nonzero(b);
2295 let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
2296 let ts: usize = mem::size_of::<U>() / gcd;
2297 let us: usize = mem::size_of::<T>() / gcd;
2299 // Armed with this knowledge, we can find how many `U`s we can fit!
2300 let us_len = self.len() / ts * us;
2301 // And how many `T`s will be in the trailing slice!
2302 let ts_len = self.len() % ts;
2306 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2309 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2310 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2311 /// middle slice the greatest length possible for a given type and input slice, but only
2312 /// your algorithm's performance should depend on that, not its correctness.
2314 /// This method has no purpose when either input element `T` or output element `U` are
2315 /// zero-sized and will return the original slice without splitting anything.
2319 /// This method is essentially a `transmute` with respect to the elements in the returned
2320 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2328 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2329 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2330 /// // less_efficient_algorithm_for_bytes(prefix);
2331 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2332 /// // less_efficient_algorithm_for_bytes(suffix);
2335 #[stable(feature = "slice_align_to", since = "1.30.0")]
2336 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2337 // Note that most of this function will be constant-evaluated,
2338 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2339 // handle ZSTs specially, which is – don't handle them at all.
2340 return (self, &[], &[]);
2343 // First, find at what point do we split between the first and 2nd slice. Easy with
2344 // ptr.align_offset.
2345 let ptr = self.as_ptr();
2346 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2347 if offset > self.len() {
2350 let (left, rest) = self.split_at(offset);
2351 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2352 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2354 from_raw_parts(rest.as_ptr() as *const U, us_len),
2355 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2359 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2362 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2363 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2364 /// middle slice the greatest length possible for a given type and input slice, but only
2365 /// your algorithm's performance should depend on that, not its correctness.
2367 /// This method has no purpose when either input element `T` or output element `U` are
2368 /// zero-sized and will return the original slice without splitting anything.
2372 /// This method is essentially a `transmute` with respect to the elements in the returned
2373 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2381 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2382 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2383 /// // less_efficient_algorithm_for_bytes(prefix);
2384 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2385 /// // less_efficient_algorithm_for_bytes(suffix);
2388 #[stable(feature = "slice_align_to", since = "1.30.0")]
2389 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2390 // Note that most of this function will be constant-evaluated,
2391 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2392 // handle ZSTs specially, which is – don't handle them at all.
2393 return (self, &mut [], &mut []);
2396 // First, find at what point do we split between the first and 2nd slice. Easy with
2397 // ptr.align_offset.
2398 let ptr = self.as_ptr();
2399 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2400 if offset > self.len() {
2401 (self, &mut [], &mut [])
2403 let (left, rest) = self.split_at_mut(offset);
2404 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2405 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2406 let mut_ptr = rest.as_mut_ptr();
2408 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2409 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2413 /// Checks if the elements of this slice are sorted.
2415 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2416 /// slice yields exactly zero or one element, `true` is returned.
2418 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2419 /// implies that this function returns `false` if any two consecutive items are not
2425 /// #![feature(is_sorted)]
2426 /// let empty: [i32; 0] = [];
2428 /// assert!([1, 2, 2, 9].is_sorted());
2429 /// assert!(![1, 3, 2, 4].is_sorted());
2430 /// assert!([0].is_sorted());
2431 /// assert!(empty.is_sorted());
2432 /// assert!(![0.0, 1.0, std::f32::NAN].is_sorted());
2435 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2436 pub fn is_sorted(&self) -> bool
2440 self.is_sorted_by(|a, b| a.partial_cmp(b))
2443 /// Checks if the elements of this slice are sorted using the given comparator function.
2445 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2446 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2447 /// [`is_sorted`]; see its documentation for more information.
2449 /// [`is_sorted`]: #method.is_sorted
2450 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2451 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2453 F: FnMut(&T, &T) -> Option<Ordering>
2455 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2458 /// Checks if the elements of this slice are sorted using the given key extraction function.
2460 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2461 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2462 /// documentation for more information.
2464 /// [`is_sorted`]: #method.is_sorted
2469 /// #![feature(is_sorted)]
2471 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2472 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2475 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2476 pub fn is_sorted_by_key<F, K>(&self, mut f: F) -> bool
2481 self.is_sorted_by(|a, b| f(a).partial_cmp(&f(b)))
2485 #[lang = "slice_u8"]
2488 /// Checks if all bytes in this slice are within the ASCII range.
2489 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2491 pub fn is_ascii(&self) -> bool {
2492 self.iter().all(|b| b.is_ascii())
2495 /// Checks that two slices are an ASCII case-insensitive match.
2497 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2498 /// but without allocating and copying temporaries.
2499 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2501 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2502 self.len() == other.len() &&
2503 self.iter().zip(other).all(|(a, b)| {
2504 a.eq_ignore_ascii_case(b)
2508 /// Converts this slice to its ASCII upper case equivalent in-place.
2510 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2511 /// but non-ASCII letters are unchanged.
2513 /// To return a new uppercased value without modifying the existing one, use
2514 /// [`to_ascii_uppercase`].
2516 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2517 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2519 pub fn make_ascii_uppercase(&mut self) {
2521 byte.make_ascii_uppercase();
2525 /// Converts this slice to its ASCII lower case equivalent in-place.
2527 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2528 /// but non-ASCII letters are unchanged.
2530 /// To return a new lowercased value without modifying the existing one, use
2531 /// [`to_ascii_lowercase`].
2533 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2534 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2536 pub fn make_ascii_lowercase(&mut self) {
2538 byte.make_ascii_lowercase();
2544 #[stable(feature = "rust1", since = "1.0.0")]
2545 impl<T, I> ops::Index<I> for [T]
2546 where I: SliceIndex<[T]>
2548 type Output = I::Output;
2551 fn index(&self, index: I) -> &I::Output {
2556 #[stable(feature = "rust1", since = "1.0.0")]
2557 impl<T, I> ops::IndexMut<I> for [T]
2558 where I: SliceIndex<[T]>
2561 fn index_mut(&mut self, index: I) -> &mut I::Output {
2562 index.index_mut(self)
2568 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2569 panic!("index {} out of range for slice of length {}", index, len);
2574 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2575 panic!("slice index starts at {} but ends at {}", index, end);
2580 fn slice_index_overflow_fail() -> ! {
2581 panic!("attempted to index slice up to maximum usize");
2584 mod private_slice_index {
2586 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2589 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2590 impl Sealed for usize {}
2591 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2592 impl Sealed for ops::Range<usize> {}
2593 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2594 impl Sealed for ops::RangeTo<usize> {}
2595 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2596 impl Sealed for ops::RangeFrom<usize> {}
2597 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2598 impl Sealed for ops::RangeFull {}
2599 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2600 impl Sealed for ops::RangeInclusive<usize> {}
2601 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2602 impl Sealed for ops::RangeToInclusive<usize> {}
2605 /// A helper trait used for indexing operations.
2606 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2607 #[rustc_on_unimplemented(
2610 label = "string indices are ranges of `usize`",
2613 all(any(T = "str", T = "&str", T = "std::string::String"), _Self="{integer}"),
2614 note="you can use `.chars().nth()` or `.bytes().nth()`
2615 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
2617 message = "the type `{T}` cannot be indexed by `{Self}`",
2618 label = "slice indices are of type `usize` or ranges of `usize`",
2620 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2621 /// The output type returned by methods.
2622 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2623 type Output: ?Sized;
2625 /// Returns a shared reference to the output at this location, if in
2627 #[unstable(feature = "slice_index_methods", issue = "0")]
2628 fn get(self, slice: &T) -> Option<&Self::Output>;
2630 /// Returns a mutable reference to the output at this location, if in
2632 #[unstable(feature = "slice_index_methods", issue = "0")]
2633 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2635 /// Returns a shared reference to the output at this location, without
2636 /// performing any bounds checking.
2637 #[unstable(feature = "slice_index_methods", issue = "0")]
2638 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2640 /// Returns a mutable reference to the output at this location, without
2641 /// performing any bounds checking.
2642 #[unstable(feature = "slice_index_methods", issue = "0")]
2643 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2645 /// Returns a shared reference to the output at this location, panicking
2646 /// if out of bounds.
2647 #[unstable(feature = "slice_index_methods", issue = "0")]
2648 fn index(self, slice: &T) -> &Self::Output;
2650 /// Returns a mutable reference to the output at this location, panicking
2651 /// if out of bounds.
2652 #[unstable(feature = "slice_index_methods", issue = "0")]
2653 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2656 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2657 impl<T> SliceIndex<[T]> for usize {
2661 fn get(self, slice: &[T]) -> Option<&T> {
2662 if self < slice.len() {
2664 Some(self.get_unchecked(slice))
2672 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2673 if self < slice.len() {
2675 Some(self.get_unchecked_mut(slice))
2683 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2684 &*slice.as_ptr().add(self)
2688 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2689 &mut *slice.as_mut_ptr().add(self)
2693 fn index(self, slice: &[T]) -> &T {
2694 // N.B., use intrinsic indexing
2699 fn index_mut(self, slice: &mut [T]) -> &mut T {
2700 // N.B., use intrinsic indexing
2705 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2706 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2710 fn get(self, slice: &[T]) -> Option<&[T]> {
2711 if self.start > self.end || self.end > slice.len() {
2715 Some(self.get_unchecked(slice))
2721 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2722 if self.start > self.end || self.end > slice.len() {
2726 Some(self.get_unchecked_mut(slice))
2732 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2733 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2737 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2738 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2742 fn index(self, slice: &[T]) -> &[T] {
2743 if self.start > self.end {
2744 slice_index_order_fail(self.start, self.end);
2745 } else if self.end > slice.len() {
2746 slice_index_len_fail(self.end, slice.len());
2749 self.get_unchecked(slice)
2754 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2755 if self.start > self.end {
2756 slice_index_order_fail(self.start, self.end);
2757 } else if self.end > slice.len() {
2758 slice_index_len_fail(self.end, slice.len());
2761 self.get_unchecked_mut(slice)
2766 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2767 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2771 fn get(self, slice: &[T]) -> Option<&[T]> {
2772 (0..self.end).get(slice)
2776 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2777 (0..self.end).get_mut(slice)
2781 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2782 (0..self.end).get_unchecked(slice)
2786 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2787 (0..self.end).get_unchecked_mut(slice)
2791 fn index(self, slice: &[T]) -> &[T] {
2792 (0..self.end).index(slice)
2796 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2797 (0..self.end).index_mut(slice)
2801 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2802 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2806 fn get(self, slice: &[T]) -> Option<&[T]> {
2807 (self.start..slice.len()).get(slice)
2811 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2812 (self.start..slice.len()).get_mut(slice)
2816 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2817 (self.start..slice.len()).get_unchecked(slice)
2821 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2822 (self.start..slice.len()).get_unchecked_mut(slice)
2826 fn index(self, slice: &[T]) -> &[T] {
2827 (self.start..slice.len()).index(slice)
2831 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2832 (self.start..slice.len()).index_mut(slice)
2836 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2837 impl<T> SliceIndex<[T]> for ops::RangeFull {
2841 fn get(self, slice: &[T]) -> Option<&[T]> {
2846 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2851 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2856 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2861 fn index(self, slice: &[T]) -> &[T] {
2866 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2872 #[stable(feature = "inclusive_range", since = "1.26.0")]
2873 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2877 fn get(self, slice: &[T]) -> Option<&[T]> {
2878 if *self.end() == usize::max_value() { None }
2879 else { (*self.start()..self.end() + 1).get(slice) }
2883 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2884 if *self.end() == usize::max_value() { None }
2885 else { (*self.start()..self.end() + 1).get_mut(slice) }
2889 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2890 (*self.start()..self.end() + 1).get_unchecked(slice)
2894 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2895 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2899 fn index(self, slice: &[T]) -> &[T] {
2900 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2901 (*self.start()..self.end() + 1).index(slice)
2905 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2906 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2907 (*self.start()..self.end() + 1).index_mut(slice)
2911 #[stable(feature = "inclusive_range", since = "1.26.0")]
2912 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2916 fn get(self, slice: &[T]) -> Option<&[T]> {
2917 (0..=self.end).get(slice)
2921 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2922 (0..=self.end).get_mut(slice)
2926 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2927 (0..=self.end).get_unchecked(slice)
2931 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2932 (0..=self.end).get_unchecked_mut(slice)
2936 fn index(self, slice: &[T]) -> &[T] {
2937 (0..=self.end).index(slice)
2941 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2942 (0..=self.end).index_mut(slice)
2946 ////////////////////////////////////////////////////////////////////////////////
2948 ////////////////////////////////////////////////////////////////////////////////
2950 #[stable(feature = "rust1", since = "1.0.0")]
2951 impl<T> Default for &[T] {
2952 /// Creates an empty slice.
2953 fn default() -> Self { &[] }
2956 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2957 impl<T> Default for &mut [T] {
2958 /// Creates a mutable empty slice.
2959 fn default() -> Self { &mut [] }
2966 #[stable(feature = "rust1", since = "1.0.0")]
2967 impl<'a, T> IntoIterator for &'a [T] {
2969 type IntoIter = Iter<'a, T>;
2971 fn into_iter(self) -> Iter<'a, T> {
2976 #[stable(feature = "rust1", since = "1.0.0")]
2977 impl<'a, T> IntoIterator for &'a mut [T] {
2978 type Item = &'a mut T;
2979 type IntoIter = IterMut<'a, T>;
2981 fn into_iter(self) -> IterMut<'a, T> {
2986 // Macro helper functions
2988 fn size_from_ptr<T>(_: *const T) -> usize {
2992 // Inlining is_empty and len makes a huge performance difference
2993 macro_rules! is_empty {
2994 // The way we encode the length of a ZST iterator, this works both for ZST
2996 ($self: ident) => {$self.ptr == $self.end}
2998 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2999 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
3001 ($self: ident) => {{
3002 let start = $self.ptr;
3003 let diff = ($self.end as usize).wrapping_sub(start as usize);
3004 let size = size_from_ptr(start);
3008 // Using division instead of `offset_from` helps LLVM remove bounds checks
3014 // The shared definition of the `Iter` and `IterMut` iterators
3015 macro_rules! iterator {
3017 struct $name:ident -> $ptr:ty,
3023 impl<'a, T> $name<'a, T> {
3024 // Helper function for creating a slice from the iterator.
3026 fn make_slice(&self) -> &'a [T] {
3027 unsafe { from_raw_parts(self.ptr, len!(self)) }
3030 // Helper function for moving the start of the iterator forwards by `offset` elements,
3031 // returning the old start.
3032 // Unsafe because the offset must be in-bounds or one-past-the-end.
3034 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
3035 if mem::size_of::<T>() == 0 {
3036 // This is *reducing* the length. `ptr` never changes with ZST.
3037 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
3041 self.ptr = self.ptr.offset(offset);
3046 // Helper function for moving the end of the iterator backwards by `offset` elements,
3047 // returning the new end.
3048 // Unsafe because the offset must be in-bounds or one-past-the-end.
3050 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
3051 if mem::size_of::<T>() == 0 {
3052 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
3055 self.end = self.end.offset(-offset);
3061 #[stable(feature = "rust1", since = "1.0.0")]
3062 impl<T> ExactSizeIterator for $name<'_, T> {
3064 fn len(&self) -> usize {
3069 fn is_empty(&self) -> bool {
3074 #[stable(feature = "rust1", since = "1.0.0")]
3075 impl<'a, T> Iterator for $name<'a, T> {
3079 fn next(&mut self) -> Option<$elem> {
3080 // could be implemented with slices, but this avoids bounds checks
3082 assume(!self.ptr.is_null());
3083 if mem::size_of::<T>() != 0 {
3084 assume(!self.end.is_null());
3086 if is_empty!(self) {
3089 Some(& $( $mut_ )* *self.post_inc_start(1))
3095 fn size_hint(&self) -> (usize, Option<usize>) {
3096 let exact = len!(self);
3097 (exact, Some(exact))
3101 fn count(self) -> usize {
3106 fn nth(&mut self, n: usize) -> Option<$elem> {
3107 if n >= len!(self) {
3108 // This iterator is now empty.
3109 if mem::size_of::<T>() == 0 {
3110 // We have to do it this way as `ptr` may never be 0, but `end`
3111 // could be (due to wrapping).
3112 self.end = self.ptr;
3114 self.ptr = self.end;
3118 // We are in bounds. `offset` does the right thing even for ZSTs.
3120 let elem = Some(& $( $mut_ )* *self.ptr.add(n));
3121 self.post_inc_start((n as isize).wrapping_add(1));
3127 fn last(mut self) -> Option<$elem> {
3132 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3133 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3135 // manual unrolling is needed when there are conditional exits from the loop
3136 let mut accum = init;
3138 while len!(self) >= 4 {
3139 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
3140 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
3141 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
3142 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
3144 while !is_empty!(self) {
3145 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
3152 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3153 where Fold: FnMut(Acc, Self::Item) -> Acc,
3155 // Let LLVM unroll this, rather than using the default
3156 // impl that would force the manual unrolling above
3157 let mut accum = init;
3158 while let Some(x) = self.next() {
3159 accum = f(accum, x);
3165 #[rustc_inherit_overflow_checks]
3166 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3168 P: FnMut(Self::Item) -> bool,
3170 // The addition might panic on overflow.
3172 self.try_fold(0, move |i, x| {
3173 if predicate(x) { Err(i) }
3177 unsafe { assume(i < n) };
3183 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3184 P: FnMut(Self::Item) -> bool,
3185 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3187 // No need for an overflow check here, because `ExactSizeIterator`
3189 self.try_rfold(n, move |i, x| {
3191 if predicate(x) { Err(i) }
3195 unsafe { assume(i < n) };
3203 #[stable(feature = "rust1", since = "1.0.0")]
3204 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3206 fn next_back(&mut self) -> Option<$elem> {
3207 // could be implemented with slices, but this avoids bounds checks
3209 assume(!self.ptr.is_null());
3210 if mem::size_of::<T>() != 0 {
3211 assume(!self.end.is_null());
3213 if is_empty!(self) {
3216 Some(& $( $mut_ )* *self.pre_dec_end(1))
3222 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3223 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3225 // manual unrolling is needed when there are conditional exits from the loop
3226 let mut accum = init;
3228 while len!(self) >= 4 {
3229 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3230 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3231 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3232 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3234 // inlining is_empty everywhere makes a huge performance difference
3235 while !is_empty!(self) {
3236 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3243 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3244 where Fold: FnMut(Acc, Self::Item) -> Acc,
3246 // Let LLVM unroll this, rather than using the default
3247 // impl that would force the manual unrolling above
3248 let mut accum = init;
3249 while let Some(x) = self.next_back() {
3250 accum = f(accum, x);
3256 #[stable(feature = "fused", since = "1.26.0")]
3257 impl<T> FusedIterator for $name<'_, T> {}
3259 #[unstable(feature = "trusted_len", issue = "37572")]
3260 unsafe impl<T> TrustedLen for $name<'_, T> {}
3264 /// Immutable slice iterator
3266 /// This struct is created by the [`iter`] method on [slices].
3273 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3274 /// let slice = &[1, 2, 3];
3276 /// // Then, we iterate over it:
3277 /// for element in slice.iter() {
3278 /// println!("{}", element);
3282 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3283 /// [slices]: ../../std/primitive.slice.html
3284 #[stable(feature = "rust1", since = "1.0.0")]
3285 pub struct Iter<'a, T: 'a> {
3287 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3288 // ptr == end is a quick test for the Iterator being empty, that works
3289 // for both ZST and non-ZST.
3290 _marker: marker::PhantomData<&'a T>,
3293 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3294 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3295 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3296 f.debug_tuple("Iter")
3297 .field(&self.as_slice())
3302 #[stable(feature = "rust1", since = "1.0.0")]
3303 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3304 #[stable(feature = "rust1", since = "1.0.0")]
3305 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3307 impl<'a, T> Iter<'a, T> {
3308 /// Views the underlying data as a subslice of the original data.
3310 /// This has the same lifetime as the original slice, and so the
3311 /// iterator can continue to be used while this exists.
3318 /// // First, we declare a type which has the `iter` method to get the `Iter`
3319 /// // struct (&[usize here]):
3320 /// let slice = &[1, 2, 3];
3322 /// // Then, we get the iterator:
3323 /// let mut iter = slice.iter();
3324 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3325 /// println!("{:?}", iter.as_slice());
3327 /// // Next, we move to the second element of the slice:
3329 /// // Now `as_slice` returns "[2, 3]":
3330 /// println!("{:?}", iter.as_slice());
3332 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3333 pub fn as_slice(&self) -> &'a [T] {
3338 iterator!{struct Iter -> *const T, &'a T, const, {/* no mut */}, {
3339 fn is_sorted_by<F>(self, mut compare: F) -> bool
3342 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
3344 self.as_slice().windows(2).all(|w| {
3345 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
3350 #[stable(feature = "rust1", since = "1.0.0")]
3351 impl<T> Clone for Iter<'_, T> {
3352 fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
3355 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3356 impl<T> AsRef<[T]> for Iter<'_, T> {
3357 fn as_ref(&self) -> &[T] {
3362 /// Mutable slice iterator.
3364 /// This struct is created by the [`iter_mut`] method on [slices].
3371 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3372 /// // struct (&[usize here]):
3373 /// let mut slice = &mut [1, 2, 3];
3375 /// // Then, we iterate over it and increment each element value:
3376 /// for element in slice.iter_mut() {
3380 /// // We now have "[2, 3, 4]":
3381 /// println!("{:?}", slice);
3384 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3385 /// [slices]: ../../std/primitive.slice.html
3386 #[stable(feature = "rust1", since = "1.0.0")]
3387 pub struct IterMut<'a, T: 'a> {
3389 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3390 // ptr == end is a quick test for the Iterator being empty, that works
3391 // for both ZST and non-ZST.
3392 _marker: marker::PhantomData<&'a mut T>,
3395 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3396 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3397 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3398 f.debug_tuple("IterMut")
3399 .field(&self.make_slice())
3404 #[stable(feature = "rust1", since = "1.0.0")]
3405 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3406 #[stable(feature = "rust1", since = "1.0.0")]
3407 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3409 impl<'a, T> IterMut<'a, T> {
3410 /// Views the underlying data as a subslice of the original data.
3412 /// To avoid creating `&mut` references that alias, this is forced
3413 /// to consume the iterator.
3420 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3421 /// // struct (&[usize here]):
3422 /// let mut slice = &mut [1, 2, 3];
3425 /// // Then, we get the iterator:
3426 /// let mut iter = slice.iter_mut();
3427 /// // We move to next element:
3429 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3430 /// println!("{:?}", iter.into_slice());
3433 /// // Now let's modify a value of the slice:
3435 /// // First we get back the iterator:
3436 /// let mut iter = slice.iter_mut();
3437 /// // We change the value of the first element of the slice returned by the `next` method:
3438 /// *iter.next().unwrap() += 1;
3440 /// // Now slice is "[2, 2, 3]":
3441 /// println!("{:?}", slice);
3443 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3444 pub fn into_slice(self) -> &'a mut [T] {
3445 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3448 /// Views the underlying data as a subslice of the original data.
3450 /// To avoid creating `&mut [T]` references that alias, the returned slice
3451 /// borrows its lifetime from the iterator the method is applied on.
3458 /// # #![feature(slice_iter_mut_as_slice)]
3459 /// let mut slice: &mut [usize] = &mut [1, 2, 3];
3461 /// // First, we get the iterator:
3462 /// let mut iter = slice.iter_mut();
3463 /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
3464 /// assert_eq!(iter.as_slice(), &[1, 2, 3]);
3466 /// // Next, we move to the second element of the slice:
3468 /// // Now `as_slice` returns "[2, 3]":
3469 /// assert_eq!(iter.as_slice(), &[2, 3]);
3471 #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
3472 pub fn as_slice(&self) -> &[T] {
3477 iterator!{struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
3479 /// An internal abstraction over the splitting iterators, so that
3480 /// splitn, splitn_mut etc can be implemented once.
3482 trait SplitIter: DoubleEndedIterator {
3483 /// Marks the underlying iterator as complete, extracting the remaining
3484 /// portion of the slice.
3485 fn finish(&mut self) -> Option<Self::Item>;
3488 /// An iterator over subslices separated by elements that match a predicate
3491 /// This struct is created by the [`split`] method on [slices].
3493 /// [`split`]: ../../std/primitive.slice.html#method.split
3494 /// [slices]: ../../std/primitive.slice.html
3495 #[stable(feature = "rust1", since = "1.0.0")]
3496 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3502 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3503 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool {
3504 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3505 f.debug_struct("Split")
3506 .field("v", &self.v)
3507 .field("finished", &self.finished)
3512 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3513 #[stable(feature = "rust1", since = "1.0.0")]
3514 impl<T, P> Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool {
3515 fn clone(&self) -> Self {
3518 pred: self.pred.clone(),
3519 finished: self.finished,
3524 #[stable(feature = "rust1", since = "1.0.0")]
3525 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3526 type Item = &'a [T];
3529 fn next(&mut self) -> Option<&'a [T]> {
3530 if self.finished { return None; }
3532 match self.v.iter().position(|x| (self.pred)(x)) {
3533 None => self.finish(),
3535 let ret = Some(&self.v[..idx]);
3536 self.v = &self.v[idx + 1..];
3543 fn size_hint(&self) -> (usize, Option<usize>) {
3547 (1, Some(self.v.len() + 1))
3552 fn last(mut self) -> Option<Self::Item> {
3557 #[stable(feature = "rust1", since = "1.0.0")]
3558 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3560 fn next_back(&mut self) -> Option<&'a [T]> {
3561 if self.finished { return None; }
3563 match self.v.iter().rposition(|x| (self.pred)(x)) {
3564 None => self.finish(),
3566 let ret = Some(&self.v[idx + 1..]);
3567 self.v = &self.v[..idx];
3574 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3576 fn finish(&mut self) -> Option<&'a [T]> {
3577 if self.finished { None } else { self.finished = true; Some(self.v) }
3581 #[stable(feature = "fused", since = "1.26.0")]
3582 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3584 /// An iterator over the subslices of the vector which are separated
3585 /// by elements that match `pred`.
3587 /// This struct is created by the [`split_mut`] method on [slices].
3589 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3590 /// [slices]: ../../std/primitive.slice.html
3591 #[stable(feature = "rust1", since = "1.0.0")]
3592 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3598 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3599 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3600 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3601 f.debug_struct("SplitMut")
3602 .field("v", &self.v)
3603 .field("finished", &self.finished)
3608 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3610 fn finish(&mut self) -> Option<&'a mut [T]> {
3614 self.finished = true;
3615 Some(mem::replace(&mut self.v, &mut []))
3620 #[stable(feature = "rust1", since = "1.0.0")]
3621 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3622 type Item = &'a mut [T];
3625 fn next(&mut self) -> Option<&'a mut [T]> {
3626 if self.finished { return None; }
3628 let idx_opt = { // work around borrowck limitations
3629 let pred = &mut self.pred;
3630 self.v.iter().position(|x| (*pred)(x))
3633 None => self.finish(),
3635 let tmp = mem::replace(&mut self.v, &mut []);
3636 let (head, tail) = tmp.split_at_mut(idx);
3637 self.v = &mut tail[1..];
3644 fn size_hint(&self) -> (usize, Option<usize>) {
3648 // if the predicate doesn't match anything, we yield one slice
3649 // if it matches every element, we yield len+1 empty slices.
3650 (1, Some(self.v.len() + 1))
3655 fn last(mut self) -> Option<Self::Item> {
3660 #[stable(feature = "rust1", since = "1.0.0")]
3661 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3662 P: FnMut(&T) -> bool,
3665 fn next_back(&mut self) -> Option<&'a mut [T]> {
3666 if self.finished { return None; }
3668 let idx_opt = { // work around borrowck limitations
3669 let pred = &mut self.pred;
3670 self.v.iter().rposition(|x| (*pred)(x))
3673 None => self.finish(),
3675 let tmp = mem::replace(&mut self.v, &mut []);
3676 let (head, tail) = tmp.split_at_mut(idx);
3678 Some(&mut tail[1..])
3684 #[stable(feature = "fused", since = "1.26.0")]
3685 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3687 /// An iterator over subslices separated by elements that match a predicate
3688 /// function, starting from the end of the slice.
3690 /// This struct is created by the [`rsplit`] method on [slices].
3692 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3693 /// [slices]: ../../std/primitive.slice.html
3694 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3695 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3696 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3697 inner: Split<'a, T, P>
3700 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3701 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool {
3702 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3703 f.debug_struct("RSplit")
3704 .field("v", &self.inner.v)
3705 .field("finished", &self.inner.finished)
3710 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3711 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3712 type Item = &'a [T];
3715 fn next(&mut self) -> Option<&'a [T]> {
3716 self.inner.next_back()
3720 fn size_hint(&self) -> (usize, Option<usize>) {
3721 self.inner.size_hint()
3725 fn last(mut self) -> Option<Self::Item> {
3730 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3731 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3733 fn next_back(&mut self) -> Option<&'a [T]> {
3738 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3739 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3741 fn finish(&mut self) -> Option<&'a [T]> {
3746 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3747 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3749 /// An iterator over the subslices of the vector which are separated
3750 /// by elements that match `pred`, starting from the end of the slice.
3752 /// This struct is created by the [`rsplit_mut`] method on [slices].
3754 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3755 /// [slices]: ../../std/primitive.slice.html
3756 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3757 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3758 inner: SplitMut<'a, T, P>
3761 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3762 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3763 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3764 f.debug_struct("RSplitMut")
3765 .field("v", &self.inner.v)
3766 .field("finished", &self.inner.finished)
3771 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3772 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3774 fn finish(&mut self) -> Option<&'a mut [T]> {
3779 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3780 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3781 type Item = &'a mut [T];
3784 fn next(&mut self) -> Option<&'a mut [T]> {
3785 self.inner.next_back()
3789 fn size_hint(&self) -> (usize, Option<usize>) {
3790 self.inner.size_hint()
3794 fn last(mut self) -> Option<Self::Item> {
3799 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3800 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3801 P: FnMut(&T) -> bool,
3804 fn next_back(&mut self) -> Option<&'a mut [T]> {
3809 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3810 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3812 /// An private iterator over subslices separated by elements that
3813 /// match a predicate function, splitting at most a fixed number of
3816 struct GenericSplitN<I> {
3821 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3825 fn next(&mut self) -> Option<T> {
3828 1 => { self.count -= 1; self.iter.finish() }
3829 _ => { self.count -= 1; self.iter.next() }
3834 fn size_hint(&self) -> (usize, Option<usize>) {
3835 let (lower, upper_opt) = self.iter.size_hint();
3836 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3840 /// An iterator over subslices separated by elements that match a predicate
3841 /// function, limited to a given number of splits.
3843 /// This struct is created by the [`splitn`] method on [slices].
3845 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3846 /// [slices]: ../../std/primitive.slice.html
3847 #[stable(feature = "rust1", since = "1.0.0")]
3848 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3849 inner: GenericSplitN<Split<'a, T, P>>
3852 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3853 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool {
3854 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3855 f.debug_struct("SplitN")
3856 .field("inner", &self.inner)
3861 /// An iterator over subslices separated by elements that match a
3862 /// predicate function, limited to a given number of splits, starting
3863 /// from the end of the slice.
3865 /// This struct is created by the [`rsplitn`] method on [slices].
3867 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3868 /// [slices]: ../../std/primitive.slice.html
3869 #[stable(feature = "rust1", since = "1.0.0")]
3870 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3871 inner: GenericSplitN<RSplit<'a, T, P>>
3874 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3875 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool {
3876 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3877 f.debug_struct("RSplitN")
3878 .field("inner", &self.inner)
3883 /// An iterator over subslices separated by elements that match a predicate
3884 /// function, limited to a given number of splits.
3886 /// This struct is created by the [`splitn_mut`] method on [slices].
3888 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3889 /// [slices]: ../../std/primitive.slice.html
3890 #[stable(feature = "rust1", since = "1.0.0")]
3891 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3892 inner: GenericSplitN<SplitMut<'a, T, P>>
3895 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3896 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3897 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3898 f.debug_struct("SplitNMut")
3899 .field("inner", &self.inner)
3904 /// An iterator over subslices separated by elements that match a
3905 /// predicate function, limited to a given number of splits, starting
3906 /// from the end of the slice.
3908 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3910 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3911 /// [slices]: ../../std/primitive.slice.html
3912 #[stable(feature = "rust1", since = "1.0.0")]
3913 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3914 inner: GenericSplitN<RSplitMut<'a, T, P>>
3917 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3918 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3919 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3920 f.debug_struct("RSplitNMut")
3921 .field("inner", &self.inner)
3926 macro_rules! forward_iterator {
3927 ($name:ident: $elem:ident, $iter_of:ty) => {
3928 #[stable(feature = "rust1", since = "1.0.0")]
3929 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3930 P: FnMut(&T) -> bool
3932 type Item = $iter_of;
3935 fn next(&mut self) -> Option<$iter_of> {
3940 fn size_hint(&self) -> (usize, Option<usize>) {
3941 self.inner.size_hint()
3945 #[stable(feature = "fused", since = "1.26.0")]
3946 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3947 where P: FnMut(&T) -> bool {}
3951 forward_iterator! { SplitN: T, &'a [T] }
3952 forward_iterator! { RSplitN: T, &'a [T] }
3953 forward_iterator! { SplitNMut: T, &'a mut [T] }
3954 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3956 /// An iterator over overlapping subslices of length `size`.
3958 /// This struct is created by the [`windows`] method on [slices].
3960 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3961 /// [slices]: ../../std/primitive.slice.html
3963 #[stable(feature = "rust1", since = "1.0.0")]
3964 pub struct Windows<'a, T:'a> {
3969 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3970 #[stable(feature = "rust1", since = "1.0.0")]
3971 impl<T> Clone for Windows<'_, T> {
3972 fn clone(&self) -> Self {
3980 #[stable(feature = "rust1", since = "1.0.0")]
3981 impl<'a, T> Iterator for Windows<'a, T> {
3982 type Item = &'a [T];
3985 fn next(&mut self) -> Option<&'a [T]> {
3986 if self.size > self.v.len() {
3989 let ret = Some(&self.v[..self.size]);
3990 self.v = &self.v[1..];
3996 fn size_hint(&self) -> (usize, Option<usize>) {
3997 if self.size > self.v.len() {
4000 let size = self.v.len() - self.size + 1;
4006 fn count(self) -> usize {
4011 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4012 let (end, overflow) = self.size.overflowing_add(n);
4013 if end > self.v.len() || overflow {
4017 let nth = &self.v[n..end];
4018 self.v = &self.v[n+1..];
4024 fn last(self) -> Option<Self::Item> {
4025 if self.size > self.v.len() {
4028 let start = self.v.len() - self.size;
4029 Some(&self.v[start..])
4034 #[stable(feature = "rust1", since = "1.0.0")]
4035 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
4037 fn next_back(&mut self) -> Option<&'a [T]> {
4038 if self.size > self.v.len() {
4041 let ret = Some(&self.v[self.v.len()-self.size..]);
4042 self.v = &self.v[..self.v.len()-1];
4048 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4049 let (end, overflow) = self.v.len().overflowing_sub(n);
4050 if end < self.size || overflow {
4054 let ret = &self.v[end-self.size..end];
4055 self.v = &self.v[..end-1];
4061 #[stable(feature = "rust1", since = "1.0.0")]
4062 impl<T> ExactSizeIterator for Windows<'_, T> {}
4064 #[unstable(feature = "trusted_len", issue = "37572")]
4065 unsafe impl<T> TrustedLen for Windows<'_, T> {}
4067 #[stable(feature = "fused", since = "1.26.0")]
4068 impl<T> FusedIterator for Windows<'_, T> {}
4071 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
4072 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4073 from_raw_parts(self.v.as_ptr().add(i), self.size)
4075 fn may_have_side_effect() -> bool { false }
4078 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4079 /// time), starting at the beginning of the slice.
4081 /// When the slice len is not evenly divided by the chunk size, the last slice
4082 /// of the iteration will be the remainder.
4084 /// This struct is created by the [`chunks`] method on [slices].
4086 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
4087 /// [slices]: ../../std/primitive.slice.html
4089 #[stable(feature = "rust1", since = "1.0.0")]
4090 pub struct Chunks<'a, T:'a> {
4095 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4096 #[stable(feature = "rust1", since = "1.0.0")]
4097 impl<T> Clone for Chunks<'_, T> {
4098 fn clone(&self) -> Self {
4101 chunk_size: self.chunk_size,
4106 #[stable(feature = "rust1", since = "1.0.0")]
4107 impl<'a, T> Iterator for Chunks<'a, T> {
4108 type Item = &'a [T];
4111 fn next(&mut self) -> Option<&'a [T]> {
4112 if self.v.is_empty() {
4115 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4116 let (fst, snd) = self.v.split_at(chunksz);
4123 fn size_hint(&self) -> (usize, Option<usize>) {
4124 if self.v.is_empty() {
4127 let n = self.v.len() / self.chunk_size;
4128 let rem = self.v.len() % self.chunk_size;
4129 let n = if rem > 0 { n+1 } else { n };
4135 fn count(self) -> usize {
4140 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4141 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4142 if start >= self.v.len() || overflow {
4146 let end = match start.checked_add(self.chunk_size) {
4147 Some(sum) => cmp::min(self.v.len(), sum),
4148 None => self.v.len(),
4150 let nth = &self.v[start..end];
4151 self.v = &self.v[end..];
4157 fn last(self) -> Option<Self::Item> {
4158 if self.v.is_empty() {
4161 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4162 Some(&self.v[start..])
4167 #[stable(feature = "rust1", since = "1.0.0")]
4168 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
4170 fn next_back(&mut self) -> Option<&'a [T]> {
4171 if self.v.is_empty() {
4174 let remainder = self.v.len() % self.chunk_size;
4175 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4176 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4183 fn nth_back(&mut self, n: usize) {
4184 let (end, overflow) = self.v.len().overflowing_sub(n);
4185 if end < self.v.len() || overflow {
4189 let start = match end.checked_sub(self.chunk_size) {
4190 Some(sum) => cmp::min(self.v.len(), sum),
4191 None => self.v.len(),
4193 let nth = &self.v[start..end];
4194 self.v = &self.v[end..];
4199 #[stable(feature = "rust1", since = "1.0.0")]
4200 impl<T> ExactSizeIterator for Chunks<'_, T> {}
4202 #[unstable(feature = "trusted_len", issue = "37572")]
4203 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
4205 #[stable(feature = "fused", since = "1.26.0")]
4206 impl<T> FusedIterator for Chunks<'_, T> {}
4209 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
4210 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4211 let start = i * self.chunk_size;
4212 let end = match start.checked_add(self.chunk_size) {
4213 None => self.v.len(),
4214 Some(end) => cmp::min(end, self.v.len()),
4216 from_raw_parts(self.v.as_ptr().add(start), end - start)
4218 fn may_have_side_effect() -> bool { false }
4221 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4222 /// elements at a time), starting at the beginning of the slice.
4224 /// When the slice len is not evenly divided by the chunk size, the last slice
4225 /// of the iteration will be the remainder.
4227 /// This struct is created by the [`chunks_mut`] method on [slices].
4229 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
4230 /// [slices]: ../../std/primitive.slice.html
4232 #[stable(feature = "rust1", since = "1.0.0")]
4233 pub struct ChunksMut<'a, T:'a> {
4238 #[stable(feature = "rust1", since = "1.0.0")]
4239 impl<'a, T> Iterator for ChunksMut<'a, T> {
4240 type Item = &'a mut [T];
4243 fn next(&mut self) -> Option<&'a mut [T]> {
4244 if self.v.is_empty() {
4247 let sz = cmp::min(self.v.len(), self.chunk_size);
4248 let tmp = mem::replace(&mut self.v, &mut []);
4249 let (head, tail) = tmp.split_at_mut(sz);
4256 fn size_hint(&self) -> (usize, Option<usize>) {
4257 if self.v.is_empty() {
4260 let n = self.v.len() / self.chunk_size;
4261 let rem = self.v.len() % self.chunk_size;
4262 let n = if rem > 0 { n + 1 } else { n };
4268 fn count(self) -> usize {
4273 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4274 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4275 if start >= self.v.len() || overflow {
4279 let end = match start.checked_add(self.chunk_size) {
4280 Some(sum) => cmp::min(self.v.len(), sum),
4281 None => self.v.len(),
4283 let tmp = mem::replace(&mut self.v, &mut []);
4284 let (head, tail) = tmp.split_at_mut(end);
4285 let (_, nth) = head.split_at_mut(start);
4292 fn last(self) -> Option<Self::Item> {
4293 if self.v.is_empty() {
4296 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4297 Some(&mut self.v[start..])
4302 #[stable(feature = "rust1", since = "1.0.0")]
4303 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4305 fn next_back(&mut self) -> Option<&'a mut [T]> {
4306 if self.v.is_empty() {
4309 let remainder = self.v.len() % self.chunk_size;
4310 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4311 let tmp = mem::replace(&mut self.v, &mut []);
4312 let tmp_len = tmp.len();
4313 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4320 #[stable(feature = "rust1", since = "1.0.0")]
4321 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4323 #[unstable(feature = "trusted_len", issue = "37572")]
4324 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4326 #[stable(feature = "fused", since = "1.26.0")]
4327 impl<T> FusedIterator for ChunksMut<'_, T> {}
4330 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4331 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4332 let start = i * self.chunk_size;
4333 let end = match start.checked_add(self.chunk_size) {
4334 None => self.v.len(),
4335 Some(end) => cmp::min(end, self.v.len()),
4337 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4339 fn may_have_side_effect() -> bool { false }
4342 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4343 /// time), starting at the beginning of the slice.
4345 /// When the slice len is not evenly divided by the chunk size, the last
4346 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4347 /// the [`remainder`] function from the iterator.
4349 /// This struct is created by the [`chunks_exact`] method on [slices].
4351 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4352 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4353 /// [slices]: ../../std/primitive.slice.html
4355 #[stable(feature = "chunks_exact", since = "1.31.0")]
4356 pub struct ChunksExact<'a, T:'a> {
4362 impl<'a, T> ChunksExact<'a, T> {
4363 /// Returns the remainder of the original slice that is not going to be
4364 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4366 #[stable(feature = "chunks_exact", since = "1.31.0")]
4367 pub fn remainder(&self) -> &'a [T] {
4372 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4373 #[stable(feature = "chunks_exact", since = "1.31.0")]
4374 impl<T> Clone for ChunksExact<'_, T> {
4375 fn clone(&self) -> Self {
4379 chunk_size: self.chunk_size,
4384 #[stable(feature = "chunks_exact", since = "1.31.0")]
4385 impl<'a, T> Iterator for ChunksExact<'a, T> {
4386 type Item = &'a [T];
4389 fn next(&mut self) -> Option<&'a [T]> {
4390 if self.v.len() < self.chunk_size {
4393 let (fst, snd) = self.v.split_at(self.chunk_size);
4400 fn size_hint(&self) -> (usize, Option<usize>) {
4401 let n = self.v.len() / self.chunk_size;
4406 fn count(self) -> usize {
4411 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4412 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4413 if start >= self.v.len() || overflow {
4417 let (_, snd) = self.v.split_at(start);
4424 fn last(mut self) -> Option<Self::Item> {
4429 #[stable(feature = "chunks_exact", since = "1.31.0")]
4430 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4432 fn next_back(&mut self) -> Option<&'a [T]> {
4433 if self.v.len() < self.chunk_size {
4436 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4443 #[stable(feature = "chunks_exact", since = "1.31.0")]
4444 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4445 fn is_empty(&self) -> bool {
4450 #[unstable(feature = "trusted_len", issue = "37572")]
4451 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4453 #[stable(feature = "chunks_exact", since = "1.31.0")]
4454 impl<T> FusedIterator for ChunksExact<'_, T> {}
4457 #[stable(feature = "chunks_exact", since = "1.31.0")]
4458 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4459 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4460 let start = i * self.chunk_size;
4461 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4463 fn may_have_side_effect() -> bool { false }
4466 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4467 /// elements at a time), starting at the beginning of the slice.
4469 /// When the slice len is not evenly divided by the chunk size, the last up to
4470 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4471 /// [`into_remainder`] function from the iterator.
4473 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4475 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4476 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4477 /// [slices]: ../../std/primitive.slice.html
4479 #[stable(feature = "chunks_exact", since = "1.31.0")]
4480 pub struct ChunksExactMut<'a, T:'a> {
4486 impl<'a, T> ChunksExactMut<'a, T> {
4487 /// Returns the remainder of the original slice that is not going to be
4488 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4490 #[stable(feature = "chunks_exact", since = "1.31.0")]
4491 pub fn into_remainder(self) -> &'a mut [T] {
4496 #[stable(feature = "chunks_exact", since = "1.31.0")]
4497 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4498 type Item = &'a mut [T];
4501 fn next(&mut self) -> Option<&'a mut [T]> {
4502 if self.v.len() < self.chunk_size {
4505 let tmp = mem::replace(&mut self.v, &mut []);
4506 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4513 fn size_hint(&self) -> (usize, Option<usize>) {
4514 let n = self.v.len() / self.chunk_size;
4519 fn count(self) -> usize {
4524 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4525 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4526 if start >= self.v.len() || overflow {
4530 let tmp = mem::replace(&mut self.v, &mut []);
4531 let (_, snd) = tmp.split_at_mut(start);
4538 fn last(mut self) -> Option<Self::Item> {
4543 #[stable(feature = "chunks_exact", since = "1.31.0")]
4544 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4546 fn next_back(&mut self) -> Option<&'a mut [T]> {
4547 if self.v.len() < self.chunk_size {
4550 let tmp = mem::replace(&mut self.v, &mut []);
4551 let tmp_len = tmp.len();
4552 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4559 #[stable(feature = "chunks_exact", since = "1.31.0")]
4560 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4561 fn is_empty(&self) -> bool {
4566 #[unstable(feature = "trusted_len", issue = "37572")]
4567 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4569 #[stable(feature = "chunks_exact", since = "1.31.0")]
4570 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4573 #[stable(feature = "chunks_exact", since = "1.31.0")]
4574 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4575 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4576 let start = i * self.chunk_size;
4577 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4579 fn may_have_side_effect() -> bool { false }
4582 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4583 /// time), starting at the end of the slice.
4585 /// When the slice len is not evenly divided by the chunk size, the last slice
4586 /// of the iteration will be the remainder.
4588 /// This struct is created by the [`rchunks`] method on [slices].
4590 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4591 /// [slices]: ../../std/primitive.slice.html
4593 #[stable(feature = "rchunks", since = "1.31.0")]
4594 pub struct RChunks<'a, T:'a> {
4599 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4600 #[stable(feature = "rchunks", since = "1.31.0")]
4601 impl<T> Clone for RChunks<'_, T> {
4602 fn clone(&self) -> Self {
4605 chunk_size: self.chunk_size,
4610 #[stable(feature = "rchunks", since = "1.31.0")]
4611 impl<'a, T> Iterator for RChunks<'a, T> {
4612 type Item = &'a [T];
4615 fn next(&mut self) -> Option<&'a [T]> {
4616 if self.v.is_empty() {
4619 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4620 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4627 fn size_hint(&self) -> (usize, Option<usize>) {
4628 if self.v.is_empty() {
4631 let n = self.v.len() / self.chunk_size;
4632 let rem = self.v.len() % self.chunk_size;
4633 let n = if rem > 0 { n+1 } else { n };
4639 fn count(self) -> usize {
4644 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4645 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4646 if end >= self.v.len() || overflow {
4650 // Can't underflow because of the check above
4651 let end = self.v.len() - end;
4652 let start = match end.checked_sub(self.chunk_size) {
4656 let nth = &self.v[start..end];
4657 self.v = &self.v[0..start];
4663 fn last(self) -> Option<Self::Item> {
4664 if self.v.is_empty() {
4667 let rem = self.v.len() % self.chunk_size;
4668 let end = if rem == 0 { self.chunk_size } else { rem };
4669 Some(&self.v[0..end])
4674 #[stable(feature = "rchunks", since = "1.31.0")]
4675 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4677 fn next_back(&mut self) -> Option<&'a [T]> {
4678 if self.v.is_empty() {
4681 let remainder = self.v.len() % self.chunk_size;
4682 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4683 let (fst, snd) = self.v.split_at(chunksz);
4690 #[stable(feature = "rchunks", since = "1.31.0")]
4691 impl<T> ExactSizeIterator for RChunks<'_, T> {}
4693 #[unstable(feature = "trusted_len", issue = "37572")]
4694 unsafe impl<T> TrustedLen for RChunks<'_, T> {}
4696 #[stable(feature = "rchunks", since = "1.31.0")]
4697 impl<T> FusedIterator for RChunks<'_, T> {}
4700 #[stable(feature = "rchunks", since = "1.31.0")]
4701 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4702 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4703 let end = self.v.len() - i * self.chunk_size;
4704 let start = match end.checked_sub(self.chunk_size) {
4706 Some(start) => start,
4708 from_raw_parts(self.v.as_ptr().add(start), end - start)
4710 fn may_have_side_effect() -> bool { false }
4713 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4714 /// elements at a time), starting at the end of the slice.
4716 /// When the slice len is not evenly divided by the chunk size, the last slice
4717 /// of the iteration will be the remainder.
4719 /// This struct is created by the [`rchunks_mut`] method on [slices].
4721 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4722 /// [slices]: ../../std/primitive.slice.html
4724 #[stable(feature = "rchunks", since = "1.31.0")]
4725 pub struct RChunksMut<'a, T:'a> {
4730 #[stable(feature = "rchunks", since = "1.31.0")]
4731 impl<'a, T> Iterator for RChunksMut<'a, T> {
4732 type Item = &'a mut [T];
4735 fn next(&mut self) -> Option<&'a mut [T]> {
4736 if self.v.is_empty() {
4739 let sz = cmp::min(self.v.len(), self.chunk_size);
4740 let tmp = mem::replace(&mut self.v, &mut []);
4741 let tmp_len = tmp.len();
4742 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4749 fn size_hint(&self) -> (usize, Option<usize>) {
4750 if self.v.is_empty() {
4753 let n = self.v.len() / self.chunk_size;
4754 let rem = self.v.len() % self.chunk_size;
4755 let n = if rem > 0 { n + 1 } else { n };
4761 fn count(self) -> usize {
4766 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4767 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4768 if end >= self.v.len() || overflow {
4772 // Can't underflow because of the check above
4773 let end = self.v.len() - end;
4774 let start = match end.checked_sub(self.chunk_size) {
4778 let tmp = mem::replace(&mut self.v, &mut []);
4779 let (head, tail) = tmp.split_at_mut(start);
4780 let (nth, _) = tail.split_at_mut(end - start);
4787 fn last(self) -> Option<Self::Item> {
4788 if self.v.is_empty() {
4791 let rem = self.v.len() % self.chunk_size;
4792 let end = if rem == 0 { self.chunk_size } else { rem };
4793 Some(&mut self.v[0..end])
4798 #[stable(feature = "rchunks", since = "1.31.0")]
4799 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
4801 fn next_back(&mut self) -> Option<&'a mut [T]> {
4802 if self.v.is_empty() {
4805 let remainder = self.v.len() % self.chunk_size;
4806 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4807 let tmp = mem::replace(&mut self.v, &mut []);
4808 let (head, tail) = tmp.split_at_mut(sz);
4815 #[stable(feature = "rchunks", since = "1.31.0")]
4816 impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
4818 #[unstable(feature = "trusted_len", issue = "37572")]
4819 unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
4821 #[stable(feature = "rchunks", since = "1.31.0")]
4822 impl<T> FusedIterator for RChunksMut<'_, T> {}
4825 #[stable(feature = "rchunks", since = "1.31.0")]
4826 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
4827 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4828 let end = self.v.len() - i * self.chunk_size;
4829 let start = match end.checked_sub(self.chunk_size) {
4831 Some(start) => start,
4833 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4835 fn may_have_side_effect() -> bool { false }
4838 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4839 /// time), starting at the end of the slice.
4841 /// When the slice len is not evenly divided by the chunk size, the last
4842 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4843 /// the [`remainder`] function from the iterator.
4845 /// This struct is created by the [`rchunks_exact`] method on [slices].
4847 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
4848 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4849 /// [slices]: ../../std/primitive.slice.html
4851 #[stable(feature = "rchunks", since = "1.31.0")]
4852 pub struct RChunksExact<'a, T:'a> {
4858 impl<'a, T> RChunksExact<'a, T> {
4859 /// Returns the remainder of the original slice that is not going to be
4860 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4862 #[stable(feature = "rchunks", since = "1.31.0")]
4863 pub fn remainder(&self) -> &'a [T] {
4868 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4869 #[stable(feature = "rchunks", since = "1.31.0")]
4870 impl<'a, T> Clone for RChunksExact<'a, T> {
4871 fn clone(&self) -> RChunksExact<'a, T> {
4875 chunk_size: self.chunk_size,
4880 #[stable(feature = "rchunks", since = "1.31.0")]
4881 impl<'a, T> Iterator for RChunksExact<'a, T> {
4882 type Item = &'a [T];
4885 fn next(&mut self) -> Option<&'a [T]> {
4886 if self.v.len() < self.chunk_size {
4889 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4896 fn size_hint(&self) -> (usize, Option<usize>) {
4897 let n = self.v.len() / self.chunk_size;
4902 fn count(self) -> usize {
4907 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4908 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4909 if end >= self.v.len() || overflow {
4913 let (fst, _) = self.v.split_at(self.v.len() - end);
4920 fn last(mut self) -> Option<Self::Item> {
4925 #[stable(feature = "rchunks", since = "1.31.0")]
4926 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
4928 fn next_back(&mut self) -> Option<&'a [T]> {
4929 if self.v.len() < self.chunk_size {
4932 let (fst, snd) = self.v.split_at(self.chunk_size);
4939 #[stable(feature = "rchunks", since = "1.31.0")]
4940 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
4941 fn is_empty(&self) -> bool {
4946 #[unstable(feature = "trusted_len", issue = "37572")]
4947 unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
4949 #[stable(feature = "rchunks", since = "1.31.0")]
4950 impl<T> FusedIterator for RChunksExact<'_, T> {}
4953 #[stable(feature = "rchunks", since = "1.31.0")]
4954 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
4955 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4956 let end = self.v.len() - i * self.chunk_size;
4957 let start = end - self.chunk_size;
4958 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4960 fn may_have_side_effect() -> bool { false }
4963 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4964 /// elements at a time), starting at the end of the slice.
4966 /// When the slice len is not evenly divided by the chunk size, the last up to
4967 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4968 /// [`into_remainder`] function from the iterator.
4970 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
4972 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
4973 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4974 /// [slices]: ../../std/primitive.slice.html
4976 #[stable(feature = "rchunks", since = "1.31.0")]
4977 pub struct RChunksExactMut<'a, T:'a> {
4983 impl<'a, T> RChunksExactMut<'a, T> {
4984 /// Returns the remainder of the original slice that is not going to be
4985 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4987 #[stable(feature = "rchunks", since = "1.31.0")]
4988 pub fn into_remainder(self) -> &'a mut [T] {
4993 #[stable(feature = "rchunks", since = "1.31.0")]
4994 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
4995 type Item = &'a mut [T];
4998 fn next(&mut self) -> Option<&'a mut [T]> {
4999 if self.v.len() < self.chunk_size {
5002 let tmp = mem::replace(&mut self.v, &mut []);
5003 let tmp_len = tmp.len();
5004 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
5011 fn size_hint(&self) -> (usize, Option<usize>) {
5012 let n = self.v.len() / self.chunk_size;
5017 fn count(self) -> usize {
5022 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5023 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5024 if end >= self.v.len() || overflow {
5028 let tmp = mem::replace(&mut self.v, &mut []);
5029 let tmp_len = tmp.len();
5030 let (fst, _) = tmp.split_at_mut(tmp_len - end);
5037 fn last(mut self) -> Option<Self::Item> {
5042 #[stable(feature = "rchunks", since = "1.31.0")]
5043 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
5045 fn next_back(&mut self) -> Option<&'a mut [T]> {
5046 if self.v.len() < self.chunk_size {
5049 let tmp = mem::replace(&mut self.v, &mut []);
5050 let (head, tail) = tmp.split_at_mut(self.chunk_size);
5057 #[stable(feature = "rchunks", since = "1.31.0")]
5058 impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
5059 fn is_empty(&self) -> bool {
5064 #[unstable(feature = "trusted_len", issue = "37572")]
5065 unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
5067 #[stable(feature = "rchunks", since = "1.31.0")]
5068 impl<T> FusedIterator for RChunksExactMut<'_, T> {}
5071 #[stable(feature = "rchunks", since = "1.31.0")]
5072 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
5073 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5074 let end = self.v.len() - i * self.chunk_size;
5075 let start = end - self.chunk_size;
5076 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
5078 fn may_have_side_effect() -> bool { false }
5085 /// Forms a slice from a pointer and a length.
5087 /// The `len` argument is the number of **elements**, not the number of bytes.
5091 /// This function is unsafe as there is no guarantee that the given pointer is
5092 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
5093 /// lifetime for the returned slice.
5095 /// `data` must be non-null and aligned, even for zero-length slices. One
5096 /// reason for this is that enum layout optimizations may rely on references
5097 /// (including slices of any length) being aligned and non-null to distinguish
5098 /// them from other data. You can obtain a pointer that is usable as `data`
5099 /// for zero-length slices using [`NonNull::dangling()`].
5101 /// The total size of the slice must be no larger than `isize::MAX` **bytes**
5102 /// in memory. See the safety documentation of [`pointer::offset`].
5106 /// The lifetime for the returned slice is inferred from its usage. To
5107 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
5108 /// source lifetime is safe in the context, such as by providing a helper
5109 /// function taking the lifetime of a host value for the slice, or by explicit
5117 /// // manifest a slice for a single element
5119 /// let ptr = &x as *const _;
5120 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
5121 /// assert_eq!(slice[0], 42);
5124 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
5125 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
5127 #[stable(feature = "rust1", since = "1.0.0")]
5128 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
5129 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
5130 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5131 "attempt to create slice covering half the address space");
5132 Repr { raw: FatPtr { data, len } }.rust
5135 /// Performs the same functionality as [`from_raw_parts`], except that a
5136 /// mutable slice is returned.
5138 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
5139 /// as not being able to provide a non-aliasing guarantee of the returned
5140 /// mutable slice. `data` must be non-null and aligned even for zero-length
5141 /// slices as with [`from_raw_parts`]. The total size of the slice must be no
5142 /// larger than `isize::MAX` **bytes** in memory.
5144 /// See the documentation of [`from_raw_parts`] for more details.
5146 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
5148 #[stable(feature = "rust1", since = "1.0.0")]
5149 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
5150 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
5151 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5152 "attempt to create slice covering half the address space");
5153 Repr { raw: FatPtr { data, len } }.rust_mut
5156 /// Converts a reference to T into a slice of length 1 (without copying).
5157 #[stable(feature = "from_ref", since = "1.28.0")]
5158 pub fn from_ref<T>(s: &T) -> &[T] {
5160 from_raw_parts(s, 1)
5164 /// Converts a reference to T into a slice of length 1 (without copying).
5165 #[stable(feature = "from_ref", since = "1.28.0")]
5166 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
5168 from_raw_parts_mut(s, 1)
5172 // This function is public only because there is no other way to unit test heapsort.
5173 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
5175 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
5176 where F: FnMut(&T, &T) -> bool
5178 sort::heapsort(v, &mut is_less);
5182 // Comparison traits
5186 /// Calls implementation provided memcmp.
5188 /// Interprets the data as u8.
5190 /// Returns 0 for equal, < 0 for less than and > 0 for greater
5192 // FIXME(#32610): Return type should be c_int
5193 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
5196 #[stable(feature = "rust1", since = "1.0.0")]
5197 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
5198 fn eq(&self, other: &[B]) -> bool {
5199 SlicePartialEq::equal(self, other)
5202 fn ne(&self, other: &[B]) -> bool {
5203 SlicePartialEq::not_equal(self, other)
5207 #[stable(feature = "rust1", since = "1.0.0")]
5208 impl<T: Eq> Eq for [T] {}
5210 /// Implements comparison of vectors lexicographically.
5211 #[stable(feature = "rust1", since = "1.0.0")]
5212 impl<T: Ord> Ord for [T] {
5213 fn cmp(&self, other: &[T]) -> Ordering {
5214 SliceOrd::compare(self, other)
5218 /// Implements comparison of vectors lexicographically.
5219 #[stable(feature = "rust1", since = "1.0.0")]
5220 impl<T: PartialOrd> PartialOrd for [T] {
5221 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
5222 SlicePartialOrd::partial_compare(self, other)
5227 // intermediate trait for specialization of slice's PartialEq
5228 trait SlicePartialEq<B> {
5229 fn equal(&self, other: &[B]) -> bool;
5231 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
5234 // Generic slice equality
5235 impl<A, B> SlicePartialEq<B> for [A]
5236 where A: PartialEq<B>
5238 default fn equal(&self, other: &[B]) -> bool {
5239 if self.len() != other.len() {
5243 for i in 0..self.len() {
5244 if !self[i].eq(&other[i]) {
5253 // Use memcmp for bytewise equality when the types allow
5254 impl<A> SlicePartialEq<A> for [A]
5255 where A: PartialEq<A> + BytewiseEquality
5257 fn equal(&self, other: &[A]) -> bool {
5258 if self.len() != other.len() {
5261 if self.as_ptr() == other.as_ptr() {
5265 let size = mem::size_of_val(self);
5266 memcmp(self.as_ptr() as *const u8,
5267 other.as_ptr() as *const u8, size) == 0
5273 // intermediate trait for specialization of slice's PartialOrd
5274 trait SlicePartialOrd<B> {
5275 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
5278 impl<A> SlicePartialOrd<A> for [A]
5281 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5282 let l = cmp::min(self.len(), other.len());
5284 // Slice to the loop iteration range to enable bound check
5285 // elimination in the compiler
5286 let lhs = &self[..l];
5287 let rhs = &other[..l];
5290 match lhs[i].partial_cmp(&rhs[i]) {
5291 Some(Ordering::Equal) => (),
5292 non_eq => return non_eq,
5296 self.len().partial_cmp(&other.len())
5300 impl<A> SlicePartialOrd<A> for [A]
5303 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5304 Some(SliceOrd::compare(self, other))
5309 // intermediate trait for specialization of slice's Ord
5311 fn compare(&self, other: &[B]) -> Ordering;
5314 impl<A> SliceOrd<A> for [A]
5317 default fn compare(&self, other: &[A]) -> Ordering {
5318 let l = cmp::min(self.len(), other.len());
5320 // Slice to the loop iteration range to enable bound check
5321 // elimination in the compiler
5322 let lhs = &self[..l];
5323 let rhs = &other[..l];
5326 match lhs[i].cmp(&rhs[i]) {
5327 Ordering::Equal => (),
5328 non_eq => return non_eq,
5332 self.len().cmp(&other.len())
5336 // memcmp compares a sequence of unsigned bytes lexicographically.
5337 // this matches the order we want for [u8], but no others (not even [i8]).
5338 impl SliceOrd<u8> for [u8] {
5340 fn compare(&self, other: &[u8]) -> Ordering {
5341 let order = unsafe {
5342 memcmp(self.as_ptr(), other.as_ptr(),
5343 cmp::min(self.len(), other.len()))
5346 self.len().cmp(&other.len())
5347 } else if order < 0 {
5356 /// Trait implemented for types that can be compared for equality using
5357 /// their bytewise representation
5358 trait BytewiseEquality { }
5360 macro_rules! impl_marker_for {
5361 ($traitname:ident, $($ty:ty)*) => {
5363 impl $traitname for $ty { }
5368 impl_marker_for!(BytewiseEquality,
5369 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
5372 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5373 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5376 fn may_have_side_effect() -> bool { false }
5380 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5381 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5382 &mut *self.ptr.add(i)
5384 fn may_have_side_effect() -> bool { false }
5387 trait SliceContains: Sized {
5388 fn slice_contains(&self, x: &[Self]) -> bool;
5391 impl<T> SliceContains for T where T: PartialEq {
5392 default fn slice_contains(&self, x: &[Self]) -> bool {
5393 x.iter().any(|y| *y == *self)
5397 impl SliceContains for u8 {
5398 fn slice_contains(&self, x: &[Self]) -> bool {
5399 memchr::memchr(*self, x).is_some()
5403 impl SliceContains for i8 {
5404 fn slice_contains(&self, x: &[Self]) -> bool {
5405 let byte = *self as u8;
5406 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5407 memchr::memchr(byte, bytes).is_some()