1 //! Slice management and manipulation.
3 //! For more details see [`std::slice`].
5 //! [`std::slice`]: ../../std/slice/index.html
7 #![stable(feature = "rust1", since = "1.0.0")]
9 // How this module is organized.
11 // The library infrastructure for slices is fairly messy. There's
12 // a lot of stuff defined here. Let's keep it clean.
14 // The layout of this file is thus:
16 // * Inherent methods. This is where most of the slice API resides.
17 // * Implementations of a few common traits with important slice ops.
18 // * Definitions of a bunch of iterators.
20 // * The `raw` and `bytes` submodules.
21 // * Boilerplate trait implementations.
23 use cmp::Ordering::{self, Less, Equal, Greater};
26 use intrinsics::assume;
29 use ops::{FnMut, Try, self};
31 use option::Option::{None, Some};
33 use result::Result::{Ok, Err};
36 use marker::{Copy, Send, Sync, Sized, self};
38 #[unstable(feature = "slice_internals", issue = "0",
39 reason = "exposed from core to be reused in std; use the memchr crate")]
40 /// Pure rust memchr implementation, taken from rust-memchr
47 union Repr<'a, T: 'a> {
49 rust_mut: &'a mut [T],
66 /// Returns the number of elements in the slice.
71 /// let a = [1, 2, 3];
72 /// assert_eq!(a.len(), 3);
74 #[stable(feature = "rust1", since = "1.0.0")]
76 #[rustc_const_unstable(feature = "const_slice_len")]
77 pub const fn len(&self) -> usize {
79 Repr { rust: self }.raw.len
83 /// Returns `true` if the slice has a length of 0.
88 /// let a = [1, 2, 3];
89 /// assert!(!a.is_empty());
91 #[stable(feature = "rust1", since = "1.0.0")]
93 #[rustc_const_unstable(feature = "const_slice_len")]
94 pub const fn is_empty(&self) -> bool {
98 /// Returns the first element of the slice, or `None` if it is empty.
103 /// let v = [10, 40, 30];
104 /// assert_eq!(Some(&10), v.first());
106 /// let w: &[i32] = &[];
107 /// assert_eq!(None, w.first());
109 #[stable(feature = "rust1", since = "1.0.0")]
111 pub fn first(&self) -> Option<&T> {
115 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
120 /// let x = &mut [0, 1, 2];
122 /// if let Some(first) = x.first_mut() {
125 /// assert_eq!(x, &[5, 1, 2]);
127 #[stable(feature = "rust1", since = "1.0.0")]
129 pub fn first_mut(&mut self) -> Option<&mut T> {
133 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
138 /// let x = &[0, 1, 2];
140 /// if let Some((first, elements)) = x.split_first() {
141 /// assert_eq!(first, &0);
142 /// assert_eq!(elements, &[1, 2]);
145 #[stable(feature = "slice_splits", since = "1.5.0")]
147 pub fn split_first(&self) -> Option<(&T, &[T])> {
148 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
151 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
156 /// let x = &mut [0, 1, 2];
158 /// if let Some((first, elements)) = x.split_first_mut() {
163 /// assert_eq!(x, &[3, 4, 5]);
165 #[stable(feature = "slice_splits", since = "1.5.0")]
167 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
168 if self.is_empty() { None } else {
169 let split = self.split_at_mut(1);
170 Some((&mut split.0[0], split.1))
174 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
179 /// let x = &[0, 1, 2];
181 /// if let Some((last, elements)) = x.split_last() {
182 /// assert_eq!(last, &2);
183 /// assert_eq!(elements, &[0, 1]);
186 #[stable(feature = "slice_splits", since = "1.5.0")]
188 pub fn split_last(&self) -> Option<(&T, &[T])> {
189 let len = self.len();
190 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
193 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
198 /// let x = &mut [0, 1, 2];
200 /// if let Some((last, elements)) = x.split_last_mut() {
205 /// assert_eq!(x, &[4, 5, 3]);
207 #[stable(feature = "slice_splits", since = "1.5.0")]
209 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
210 let len = self.len();
211 if len == 0 { None } else {
212 let split = self.split_at_mut(len - 1);
213 Some((&mut split.1[0], split.0))
218 /// Returns the last element of the slice, or `None` if it is empty.
223 /// let v = [10, 40, 30];
224 /// assert_eq!(Some(&30), v.last());
226 /// let w: &[i32] = &[];
227 /// assert_eq!(None, w.last());
229 #[stable(feature = "rust1", since = "1.0.0")]
231 pub fn last(&self) -> Option<&T> {
232 let last_idx = self.len().checked_sub(1)?;
236 /// Returns a mutable pointer to the last item in the slice.
241 /// let x = &mut [0, 1, 2];
243 /// if let Some(last) = x.last_mut() {
246 /// assert_eq!(x, &[0, 1, 10]);
248 #[stable(feature = "rust1", since = "1.0.0")]
250 pub fn last_mut(&mut self) -> Option<&mut T> {
251 let last_idx = self.len().checked_sub(1)?;
252 self.get_mut(last_idx)
255 /// Returns a reference to an element or subslice depending on the type of
258 /// - If given a position, returns a reference to the element at that
259 /// position or `None` if out of bounds.
260 /// - If given a range, returns the subslice corresponding to that range,
261 /// or `None` if out of bounds.
266 /// let v = [10, 40, 30];
267 /// assert_eq!(Some(&40), v.get(1));
268 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
269 /// assert_eq!(None, v.get(3));
270 /// assert_eq!(None, v.get(0..4));
272 #[stable(feature = "rust1", since = "1.0.0")]
274 pub fn get<I>(&self, index: I) -> Option<&I::Output>
275 where I: SliceIndex<Self>
280 /// Returns a mutable reference to an element or subslice depending on the
281 /// type of index (see [`get`]) or `None` if the index is out of bounds.
283 /// [`get`]: #method.get
288 /// let x = &mut [0, 1, 2];
290 /// if let Some(elem) = x.get_mut(1) {
293 /// assert_eq!(x, &[0, 42, 2]);
295 #[stable(feature = "rust1", since = "1.0.0")]
297 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
298 where I: SliceIndex<Self>
303 /// Returns a reference to an element or subslice, without doing bounds
306 /// This is generally not recommended, use with caution! For a safe
307 /// alternative see [`get`].
309 /// [`get`]: #method.get
314 /// let x = &[1, 2, 4];
317 /// assert_eq!(x.get_unchecked(1), &2);
320 #[stable(feature = "rust1", since = "1.0.0")]
322 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
323 where I: SliceIndex<Self>
325 index.get_unchecked(self)
328 /// Returns a mutable reference to an element or subslice, without doing
331 /// This is generally not recommended, use with caution! For a safe
332 /// alternative see [`get_mut`].
334 /// [`get_mut`]: #method.get_mut
339 /// let x = &mut [1, 2, 4];
342 /// let elem = x.get_unchecked_mut(1);
345 /// assert_eq!(x, &[1, 13, 4]);
347 #[stable(feature = "rust1", since = "1.0.0")]
349 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
350 where I: SliceIndex<Self>
352 index.get_unchecked_mut(self)
355 /// Returns a raw pointer to the slice's buffer.
357 /// The caller must ensure that the slice outlives the pointer this
358 /// function returns, or else it will end up pointing to garbage.
360 /// Modifying the container referenced by this slice may cause its buffer
361 /// to be reallocated, which would also make any pointers to it invalid.
366 /// let x = &[1, 2, 4];
367 /// let x_ptr = x.as_ptr();
370 /// for i in 0..x.len() {
371 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
375 #[stable(feature = "rust1", since = "1.0.0")]
377 pub const fn as_ptr(&self) -> *const T {
378 self as *const [T] as *const T
381 /// Returns an unsafe mutable pointer to the slice's buffer.
383 /// The caller must ensure that the slice outlives the pointer this
384 /// function returns, or else it will end up pointing to garbage.
386 /// Modifying the container referenced by this slice may cause its buffer
387 /// to be reallocated, which would also make any pointers to it invalid.
392 /// let x = &mut [1, 2, 4];
393 /// let x_ptr = x.as_mut_ptr();
396 /// for i in 0..x.len() {
397 /// *x_ptr.add(i) += 2;
400 /// assert_eq!(x, &[3, 4, 6]);
402 #[stable(feature = "rust1", since = "1.0.0")]
404 pub fn as_mut_ptr(&mut self) -> *mut T {
405 self as *mut [T] as *mut T
408 /// Swaps two elements in the slice.
412 /// * a - The index of the first element
413 /// * b - The index of the second element
417 /// Panics if `a` or `b` are out of bounds.
422 /// let mut v = ["a", "b", "c", "d"];
424 /// assert!(v == ["a", "d", "c", "b"]);
426 #[stable(feature = "rust1", since = "1.0.0")]
428 pub fn swap(&mut self, a: usize, b: usize) {
430 // Can't take two mutable loans from one vector, so instead just cast
431 // them to their raw pointers to do the swap
432 let pa: *mut T = &mut self[a];
433 let pb: *mut T = &mut self[b];
438 /// Reverses the order of elements in the slice, in place.
443 /// let mut v = [1, 2, 3];
445 /// assert!(v == [3, 2, 1]);
447 #[stable(feature = "rust1", since = "1.0.0")]
449 pub fn reverse(&mut self) {
450 let mut i: usize = 0;
453 // For very small types, all the individual reads in the normal
454 // path perform poorly. We can do better, given efficient unaligned
455 // load/store, by loading a larger chunk and reversing a register.
457 // Ideally LLVM would do this for us, as it knows better than we do
458 // whether unaligned reads are efficient (since that changes between
459 // different ARM versions, for example) and what the best chunk size
460 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
461 // the loop, so we need to do this ourselves. (Hypothesis: reverse
462 // is troublesome because the sides can be aligned differently --
463 // will be, when the length is odd -- so there's no way of emitting
464 // pre- and postludes to use fully-aligned SIMD in the middle.)
467 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
469 if fast_unaligned && mem::size_of::<T>() == 1 {
470 // Use the llvm.bswap intrinsic to reverse u8s in a usize
471 let chunk = mem::size_of::<usize>();
472 while i + chunk - 1 < ln / 2 {
474 let pa: *mut T = self.get_unchecked_mut(i);
475 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
476 let va = ptr::read_unaligned(pa as *mut usize);
477 let vb = ptr::read_unaligned(pb as *mut usize);
478 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
479 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
485 if fast_unaligned && mem::size_of::<T>() == 2 {
486 // Use rotate-by-16 to reverse u16s in a u32
487 let chunk = mem::size_of::<u32>() / 2;
488 while i + chunk - 1 < ln / 2 {
490 let pa: *mut T = self.get_unchecked_mut(i);
491 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
492 let va = ptr::read_unaligned(pa as *mut u32);
493 let vb = ptr::read_unaligned(pb as *mut u32);
494 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
495 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
502 // Unsafe swap to avoid the bounds check in safe swap.
504 let pa: *mut T = self.get_unchecked_mut(i);
505 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
512 /// Returns an iterator over the slice.
517 /// let x = &[1, 2, 4];
518 /// let mut iterator = x.iter();
520 /// assert_eq!(iterator.next(), Some(&1));
521 /// assert_eq!(iterator.next(), Some(&2));
522 /// assert_eq!(iterator.next(), Some(&4));
523 /// assert_eq!(iterator.next(), None);
525 #[stable(feature = "rust1", since = "1.0.0")]
527 pub fn iter(&self) -> Iter<T> {
529 let ptr = self.as_ptr();
530 assume(!ptr.is_null());
532 let end = if mem::size_of::<T>() == 0 {
533 (ptr as *const u8).wrapping_add(self.len()) as *const T
541 _marker: marker::PhantomData
546 /// Returns an iterator that allows modifying each value.
551 /// let x = &mut [1, 2, 4];
552 /// for elem in x.iter_mut() {
555 /// assert_eq!(x, &[3, 4, 6]);
557 #[stable(feature = "rust1", since = "1.0.0")]
559 pub fn iter_mut(&mut self) -> IterMut<T> {
561 let ptr = self.as_mut_ptr();
562 assume(!ptr.is_null());
564 let end = if mem::size_of::<T>() == 0 {
565 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
573 _marker: marker::PhantomData
578 /// Returns an iterator over all contiguous windows of length
579 /// `size`. The windows overlap. If the slice is shorter than
580 /// `size`, the iterator returns no values.
584 /// Panics if `size` is 0.
589 /// let slice = ['r', 'u', 's', 't'];
590 /// let mut iter = slice.windows(2);
591 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
592 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
593 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
594 /// assert!(iter.next().is_none());
597 /// If the slice is shorter than `size`:
600 /// let slice = ['f', 'o', 'o'];
601 /// let mut iter = slice.windows(4);
602 /// assert!(iter.next().is_none());
604 #[stable(feature = "rust1", since = "1.0.0")]
606 pub fn windows(&self, size: usize) -> Windows<T> {
608 Windows { v: self, size }
611 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
612 /// beginning of the slice.
614 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
615 /// slice, then the last chunk will not have length `chunk_size`.
617 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
618 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
619 /// slice of the slice.
623 /// Panics if `chunk_size` is 0.
628 /// let slice = ['l', 'o', 'r', 'e', 'm'];
629 /// let mut iter = slice.chunks(2);
630 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
631 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
632 /// assert_eq!(iter.next().unwrap(), &['m']);
633 /// assert!(iter.next().is_none());
636 /// [`chunks_exact`]: #method.chunks_exact
637 /// [`rchunks`]: #method.rchunks
638 #[stable(feature = "rust1", since = "1.0.0")]
640 pub fn chunks(&self, chunk_size: usize) -> Chunks<T> {
641 assert!(chunk_size != 0);
642 Chunks { v: self, chunk_size }
645 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
646 /// beginning of the slice.
648 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
649 /// length of the slice, then the last chunk will not have length `chunk_size`.
651 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
652 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
653 /// the end of the slice of the slice.
657 /// Panics if `chunk_size` is 0.
662 /// let v = &mut [0, 0, 0, 0, 0];
663 /// let mut count = 1;
665 /// for chunk in v.chunks_mut(2) {
666 /// for elem in chunk.iter_mut() {
671 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
674 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
675 /// [`rchunks_mut`]: #method.rchunks_mut
676 #[stable(feature = "rust1", since = "1.0.0")]
678 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
679 assert!(chunk_size != 0);
680 ChunksMut { v: self, chunk_size }
683 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
684 /// beginning of the slice.
686 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
687 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
688 /// from the `remainder` function of the iterator.
690 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
691 /// resulting code better than in the case of [`chunks`].
693 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
694 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
698 /// Panics if `chunk_size` is 0.
703 /// let slice = ['l', 'o', 'r', 'e', 'm'];
704 /// let mut iter = slice.chunks_exact(2);
705 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
706 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
707 /// assert!(iter.next().is_none());
708 /// assert_eq!(iter.remainder(), &['m']);
711 /// [`chunks`]: #method.chunks
712 /// [`rchunks_exact`]: #method.rchunks_exact
713 #[stable(feature = "chunks_exact", since = "1.31.0")]
715 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<T> {
716 assert!(chunk_size != 0);
717 let rem = self.len() % chunk_size;
718 let len = self.len() - rem;
719 let (fst, snd) = self.split_at(len);
720 ChunksExact { v: fst, rem: snd, chunk_size }
723 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
724 /// beginning of the slice.
726 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
727 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
728 /// retrieved from the `into_remainder` function of the iterator.
730 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
731 /// resulting code better than in the case of [`chunks_mut`].
733 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
734 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
735 /// the slice of the slice.
739 /// Panics if `chunk_size` is 0.
744 /// let v = &mut [0, 0, 0, 0, 0];
745 /// let mut count = 1;
747 /// for chunk in v.chunks_exact_mut(2) {
748 /// for elem in chunk.iter_mut() {
753 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
756 /// [`chunks_mut`]: #method.chunks_mut
757 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
758 #[stable(feature = "chunks_exact", since = "1.31.0")]
760 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<T> {
761 assert!(chunk_size != 0);
762 let rem = self.len() % chunk_size;
763 let len = self.len() - rem;
764 let (fst, snd) = self.split_at_mut(len);
765 ChunksExactMut { v: fst, rem: snd, chunk_size }
768 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
771 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
772 /// slice, then the last chunk will not have length `chunk_size`.
774 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
775 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
780 /// Panics if `chunk_size` is 0.
785 /// let slice = ['l', 'o', 'r', 'e', 'm'];
786 /// let mut iter = slice.rchunks(2);
787 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
788 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
789 /// assert_eq!(iter.next().unwrap(), &['l']);
790 /// assert!(iter.next().is_none());
793 /// [`rchunks_exact`]: #method.rchunks_exact
794 /// [`chunks`]: #method.chunks
795 #[stable(feature = "rchunks", since = "1.31.0")]
797 pub fn rchunks(&self, chunk_size: usize) -> RChunks<T> {
798 assert!(chunk_size != 0);
799 RChunks { v: self, chunk_size }
802 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
805 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
806 /// length of the slice, then the last chunk will not have length `chunk_size`.
808 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
809 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
810 /// beginning of the slice.
814 /// Panics if `chunk_size` is 0.
819 /// let v = &mut [0, 0, 0, 0, 0];
820 /// let mut count = 1;
822 /// for chunk in v.rchunks_mut(2) {
823 /// for elem in chunk.iter_mut() {
828 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
831 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
832 /// [`chunks_mut`]: #method.chunks_mut
833 #[stable(feature = "rchunks", since = "1.31.0")]
835 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<T> {
836 assert!(chunk_size != 0);
837 RChunksMut { v: self, chunk_size }
840 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
841 /// beginning of the slice.
843 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
844 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
845 /// from the `remainder` function of the iterator.
847 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
848 /// resulting code better than in the case of [`chunks`].
850 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
851 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
852 /// slice of the slice.
856 /// Panics if `chunk_size` is 0.
861 /// let slice = ['l', 'o', 'r', 'e', 'm'];
862 /// let mut iter = slice.rchunks_exact(2);
863 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
864 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
865 /// assert!(iter.next().is_none());
866 /// assert_eq!(iter.remainder(), &['l']);
869 /// [`chunks`]: #method.chunks
870 /// [`rchunks`]: #method.rchunks
871 /// [`chunks_exact`]: #method.chunks_exact
872 #[stable(feature = "rchunks", since = "1.31.0")]
874 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<T> {
875 assert!(chunk_size != 0);
876 let rem = self.len() % chunk_size;
877 let (fst, snd) = self.split_at(rem);
878 RChunksExact { v: snd, rem: fst, chunk_size }
881 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
884 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
885 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
886 /// retrieved from the `into_remainder` function of the iterator.
888 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
889 /// resulting code better than in the case of [`chunks_mut`].
891 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
892 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
893 /// of the slice of the slice.
897 /// Panics if `chunk_size` is 0.
902 /// let v = &mut [0, 0, 0, 0, 0];
903 /// let mut count = 1;
905 /// for chunk in v.rchunks_exact_mut(2) {
906 /// for elem in chunk.iter_mut() {
911 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
914 /// [`chunks_mut`]: #method.chunks_mut
915 /// [`rchunks_mut`]: #method.rchunks_mut
916 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
917 #[stable(feature = "rchunks", since = "1.31.0")]
919 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<T> {
920 assert!(chunk_size != 0);
921 let rem = self.len() % chunk_size;
922 let (fst, snd) = self.split_at_mut(rem);
923 RChunksExactMut { v: snd, rem: fst, chunk_size }
926 /// Divides one slice into two at an index.
928 /// The first will contain all indices from `[0, mid)` (excluding
929 /// the index `mid` itself) and the second will contain all
930 /// indices from `[mid, len)` (excluding the index `len` itself).
934 /// Panics if `mid > len`.
939 /// let v = [1, 2, 3, 4, 5, 6];
942 /// let (left, right) = v.split_at(0);
943 /// assert!(left == []);
944 /// assert!(right == [1, 2, 3, 4, 5, 6]);
948 /// let (left, right) = v.split_at(2);
949 /// assert!(left == [1, 2]);
950 /// assert!(right == [3, 4, 5, 6]);
954 /// let (left, right) = v.split_at(6);
955 /// assert!(left == [1, 2, 3, 4, 5, 6]);
956 /// assert!(right == []);
959 #[stable(feature = "rust1", since = "1.0.0")]
961 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
962 (&self[..mid], &self[mid..])
965 /// Divides one mutable slice into two at an index.
967 /// The first will contain all indices from `[0, mid)` (excluding
968 /// the index `mid` itself) and the second will contain all
969 /// indices from `[mid, len)` (excluding the index `len` itself).
973 /// Panics if `mid > len`.
978 /// let mut v = [1, 0, 3, 0, 5, 6];
979 /// // scoped to restrict the lifetime of the borrows
981 /// let (left, right) = v.split_at_mut(2);
982 /// assert!(left == [1, 0]);
983 /// assert!(right == [3, 0, 5, 6]);
987 /// assert!(v == [1, 2, 3, 4, 5, 6]);
989 #[stable(feature = "rust1", since = "1.0.0")]
991 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
992 let len = self.len();
993 let ptr = self.as_mut_ptr();
998 (from_raw_parts_mut(ptr, mid),
999 from_raw_parts_mut(ptr.add(mid), len - mid))
1003 /// Returns an iterator over subslices separated by elements that match
1004 /// `pred`. The matched element is not contained in the subslices.
1009 /// let slice = [10, 40, 33, 20];
1010 /// let mut iter = slice.split(|num| num % 3 == 0);
1012 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1013 /// assert_eq!(iter.next().unwrap(), &[20]);
1014 /// assert!(iter.next().is_none());
1017 /// If the first element is matched, an empty slice will be the first item
1018 /// returned by the iterator. Similarly, if the last element in the slice
1019 /// is matched, an empty slice will be the last item returned by the
1023 /// let slice = [10, 40, 33];
1024 /// let mut iter = slice.split(|num| num % 3 == 0);
1026 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1027 /// assert_eq!(iter.next().unwrap(), &[]);
1028 /// assert!(iter.next().is_none());
1031 /// If two matched elements are directly adjacent, an empty slice will be
1032 /// present between them:
1035 /// let slice = [10, 6, 33, 20];
1036 /// let mut iter = slice.split(|num| num % 3 == 0);
1038 /// assert_eq!(iter.next().unwrap(), &[10]);
1039 /// assert_eq!(iter.next().unwrap(), &[]);
1040 /// assert_eq!(iter.next().unwrap(), &[20]);
1041 /// assert!(iter.next().is_none());
1043 #[stable(feature = "rust1", since = "1.0.0")]
1045 pub fn split<F>(&self, pred: F) -> Split<T, F>
1046 where F: FnMut(&T) -> bool
1055 /// Returns an iterator over mutable subslices separated by elements that
1056 /// match `pred`. The matched element is not contained in the subslices.
1061 /// let mut v = [10, 40, 30, 20, 60, 50];
1063 /// for group in v.split_mut(|num| *num % 3 == 0) {
1066 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1068 #[stable(feature = "rust1", since = "1.0.0")]
1070 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<T, F>
1071 where F: FnMut(&T) -> bool
1073 SplitMut { v: self, pred, finished: false }
1076 /// Returns an iterator over subslices separated by elements that match
1077 /// `pred`, starting at the end of the slice and working backwards.
1078 /// The matched element is not contained in the subslices.
1083 /// let slice = [11, 22, 33, 0, 44, 55];
1084 /// let mut iter = slice.rsplit(|num| *num == 0);
1086 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1087 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1088 /// assert_eq!(iter.next(), None);
1091 /// As with `split()`, if the first or last element is matched, an empty
1092 /// slice will be the first (or last) item returned by the iterator.
1095 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1096 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1097 /// assert_eq!(it.next().unwrap(), &[]);
1098 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1099 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1100 /// assert_eq!(it.next().unwrap(), &[]);
1101 /// assert_eq!(it.next(), None);
1103 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1105 pub fn rsplit<F>(&self, pred: F) -> RSplit<T, F>
1106 where F: FnMut(&T) -> bool
1108 RSplit { inner: self.split(pred) }
1111 /// Returns an iterator over mutable subslices separated by elements that
1112 /// match `pred`, starting at the end of the slice and working
1113 /// backwards. The matched element is not contained in the subslices.
1118 /// let mut v = [100, 400, 300, 200, 600, 500];
1120 /// let mut count = 0;
1121 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1123 /// group[0] = count;
1125 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1128 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1130 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<T, F>
1131 where F: FnMut(&T) -> bool
1133 RSplitMut { inner: self.split_mut(pred) }
1136 /// Returns an iterator over subslices separated by elements that match
1137 /// `pred`, limited to returning at most `n` items. The matched element is
1138 /// not contained in the subslices.
1140 /// The last element returned, if any, will contain the remainder of the
1145 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1146 /// `[20, 60, 50]`):
1149 /// let v = [10, 40, 30, 20, 60, 50];
1151 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1152 /// println!("{:?}", group);
1155 #[stable(feature = "rust1", since = "1.0.0")]
1157 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<T, F>
1158 where F: FnMut(&T) -> bool
1161 inner: GenericSplitN {
1162 iter: self.split(pred),
1168 /// Returns an iterator over subslices separated by elements that match
1169 /// `pred`, limited to returning at most `n` items. The matched element is
1170 /// not contained in the subslices.
1172 /// The last element returned, if any, will contain the remainder of the
1178 /// let mut v = [10, 40, 30, 20, 60, 50];
1180 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1183 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1185 #[stable(feature = "rust1", since = "1.0.0")]
1187 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<T, F>
1188 where F: FnMut(&T) -> bool
1191 inner: GenericSplitN {
1192 iter: self.split_mut(pred),
1198 /// Returns an iterator over subslices separated by elements that match
1199 /// `pred` limited to returning at most `n` items. This starts at the end of
1200 /// the slice and works backwards. The matched element is not contained in
1203 /// The last element returned, if any, will contain the remainder of the
1208 /// Print the slice split once, starting from the end, by numbers divisible
1209 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1212 /// let v = [10, 40, 30, 20, 60, 50];
1214 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1215 /// println!("{:?}", group);
1218 #[stable(feature = "rust1", since = "1.0.0")]
1220 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<T, F>
1221 where F: FnMut(&T) -> bool
1224 inner: GenericSplitN {
1225 iter: self.rsplit(pred),
1231 /// Returns an iterator over subslices separated by elements that match
1232 /// `pred` limited to returning at most `n` items. This starts at the end of
1233 /// the slice and works backwards. The matched element is not contained in
1236 /// The last element returned, if any, will contain the remainder of the
1242 /// let mut s = [10, 40, 30, 20, 60, 50];
1244 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1247 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1249 #[stable(feature = "rust1", since = "1.0.0")]
1251 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<T, F>
1252 where F: FnMut(&T) -> bool
1255 inner: GenericSplitN {
1256 iter: self.rsplit_mut(pred),
1262 /// Returns `true` if the slice contains an element with the given value.
1267 /// let v = [10, 40, 30];
1268 /// assert!(v.contains(&30));
1269 /// assert!(!v.contains(&50));
1271 #[stable(feature = "rust1", since = "1.0.0")]
1272 pub fn contains(&self, x: &T) -> bool
1275 x.slice_contains(self)
1278 /// Returns `true` if `needle` is a prefix of the slice.
1283 /// let v = [10, 40, 30];
1284 /// assert!(v.starts_with(&[10]));
1285 /// assert!(v.starts_with(&[10, 40]));
1286 /// assert!(!v.starts_with(&[50]));
1287 /// assert!(!v.starts_with(&[10, 50]));
1290 /// Always returns `true` if `needle` is an empty slice:
1293 /// let v = &[10, 40, 30];
1294 /// assert!(v.starts_with(&[]));
1295 /// let v: &[u8] = &[];
1296 /// assert!(v.starts_with(&[]));
1298 #[stable(feature = "rust1", since = "1.0.0")]
1299 pub fn starts_with(&self, needle: &[T]) -> bool
1302 let n = needle.len();
1303 self.len() >= n && needle == &self[..n]
1306 /// Returns `true` if `needle` is a suffix of the slice.
1311 /// let v = [10, 40, 30];
1312 /// assert!(v.ends_with(&[30]));
1313 /// assert!(v.ends_with(&[40, 30]));
1314 /// assert!(!v.ends_with(&[50]));
1315 /// assert!(!v.ends_with(&[50, 30]));
1318 /// Always returns `true` if `needle` is an empty slice:
1321 /// let v = &[10, 40, 30];
1322 /// assert!(v.ends_with(&[]));
1323 /// let v: &[u8] = &[];
1324 /// assert!(v.ends_with(&[]));
1326 #[stable(feature = "rust1", since = "1.0.0")]
1327 pub fn ends_with(&self, needle: &[T]) -> bool
1330 let (m, n) = (self.len(), needle.len());
1331 m >= n && needle == &self[m-n..]
1334 /// Binary searches this sorted slice for a given element.
1336 /// If the value is found then [`Result::Ok`] is returned, containing the
1337 /// index of the matching element. If there are multiple matches, then any
1338 /// one of the matches could be returned. If the value is not found then
1339 /// [`Result::Err`] is returned, containing the index where a matching
1340 /// element could be inserted while maintaining sorted order.
1344 /// Looks up a series of four elements. The first is found, with a
1345 /// uniquely determined position; the second and third are not
1346 /// found; the fourth could match any position in `[1, 4]`.
1349 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1351 /// assert_eq!(s.binary_search(&13), Ok(9));
1352 /// assert_eq!(s.binary_search(&4), Err(7));
1353 /// assert_eq!(s.binary_search(&100), Err(13));
1354 /// let r = s.binary_search(&1);
1355 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1357 #[stable(feature = "rust1", since = "1.0.0")]
1358 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1361 self.binary_search_by(|p| p.cmp(x))
1364 /// Binary searches this sorted slice with a comparator function.
1366 /// The comparator function should implement an order consistent
1367 /// with the sort order of the underlying slice, returning an
1368 /// order code that indicates whether its argument is `Less`,
1369 /// `Equal` or `Greater` the desired target.
1371 /// If the value is found then [`Result::Ok`] is returned, containing the
1372 /// index of the matching element. If there are multiple matches, then any
1373 /// one of the matches could be returned. If the value is not found then
1374 /// [`Result::Err`] is returned, containing the index where a matching
1375 /// element could be inserted while maintaining sorted order.
1379 /// Looks up a series of four elements. The first is found, with a
1380 /// uniquely determined position; the second and third are not
1381 /// found; the fourth could match any position in `[1, 4]`.
1384 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1387 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1389 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1391 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1393 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1394 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1396 #[stable(feature = "rust1", since = "1.0.0")]
1398 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1399 where F: FnMut(&'a T) -> Ordering
1402 let mut size = s.len();
1406 let mut base = 0usize;
1408 let half = size / 2;
1409 let mid = base + half;
1410 // mid is always in [0, size), that means mid is >= 0 and < size.
1411 // mid >= 0: by definition
1412 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1413 let cmp = f(unsafe { s.get_unchecked(mid) });
1414 base = if cmp == Greater { base } else { mid };
1417 // base is always in [0, size) because base <= mid.
1418 let cmp = f(unsafe { s.get_unchecked(base) });
1419 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1423 /// Binary searches this sorted slice with a key extraction function.
1425 /// Assumes that the slice is sorted by the key, for instance with
1426 /// [`sort_by_key`] using the same key extraction function.
1428 /// If the value is found then [`Result::Ok`] is returned, containing the
1429 /// index of the matching element. If there are multiple matches, then any
1430 /// one of the matches could be returned. If the value is not found then
1431 /// [`Result::Err`] is returned, containing the index where a matching
1432 /// element could be inserted while maintaining sorted order.
1434 /// [`sort_by_key`]: #method.sort_by_key
1438 /// Looks up a series of four elements in a slice of pairs sorted by
1439 /// their second elements. The first is found, with a uniquely
1440 /// determined position; the second and third are not found; the
1441 /// fourth could match any position in `[1, 4]`.
1444 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1445 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1446 /// (1, 21), (2, 34), (4, 55)];
1448 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1449 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1450 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1451 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1452 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1454 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1456 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1457 where F: FnMut(&'a T) -> B,
1460 self.binary_search_by(|k| f(k).cmp(b))
1463 /// Sorts the slice, but may not preserve the order of equal elements.
1465 /// This sort is unstable (i.e., may reorder equal elements), in-place
1466 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1468 /// # Current implementation
1470 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1471 /// which combines the fast average case of randomized quicksort with the fast worst case of
1472 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1473 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1474 /// deterministic behavior.
1476 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1477 /// slice consists of several concatenated sorted sequences.
1482 /// let mut v = [-5, 4, 1, -3, 2];
1484 /// v.sort_unstable();
1485 /// assert!(v == [-5, -3, 1, 2, 4]);
1488 /// [pdqsort]: https://github.com/orlp/pdqsort
1489 #[stable(feature = "sort_unstable", since = "1.20.0")]
1491 pub fn sort_unstable(&mut self)
1494 sort::quicksort(self, |a, b| a.lt(b));
1497 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1500 /// This sort is unstable (i.e., may reorder equal elements), in-place
1501 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1503 /// The comparator function must define a total ordering for the elements in the slice. If
1504 /// the ordering is not total, the order of the elements is unspecified. An order is a
1505 /// total order if it is (for all a, b and c):
1507 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1508 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1510 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1511 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1514 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1515 /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
1516 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1519 /// # Current implementation
1521 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1522 /// which combines the fast average case of randomized quicksort with the fast worst case of
1523 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1524 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1525 /// deterministic behavior.
1527 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1528 /// slice consists of several concatenated sorted sequences.
1533 /// let mut v = [5, 4, 1, 3, 2];
1534 /// v.sort_unstable_by(|a, b| a.cmp(b));
1535 /// assert!(v == [1, 2, 3, 4, 5]);
1537 /// // reverse sorting
1538 /// v.sort_unstable_by(|a, b| b.cmp(a));
1539 /// assert!(v == [5, 4, 3, 2, 1]);
1542 /// [pdqsort]: https://github.com/orlp/pdqsort
1543 #[stable(feature = "sort_unstable", since = "1.20.0")]
1545 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1546 where F: FnMut(&T, &T) -> Ordering
1548 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1551 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1554 /// This sort is unstable (i.e., may reorder equal elements), in-place
1555 /// (i.e., does not allocate), and `O(m n log(m n))` worst-case, where the key function is
1558 /// # Current implementation
1560 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1561 /// which combines the fast average case of randomized quicksort with the fast worst case of
1562 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1563 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1564 /// deterministic behavior.
1566 /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
1567 /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
1568 /// cases where the key function is expensive.
1573 /// let mut v = [-5i32, 4, 1, -3, 2];
1575 /// v.sort_unstable_by_key(|k| k.abs());
1576 /// assert!(v == [1, 2, -3, 4, -5]);
1579 /// [pdqsort]: https://github.com/orlp/pdqsort
1580 #[stable(feature = "sort_unstable", since = "1.20.0")]
1582 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1583 where F: FnMut(&T) -> K, K: Ord
1585 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1588 /// Moves all consecutive repeated elements to the end of the slice according to the
1589 /// [`PartialEq`] trait implementation.
1591 /// Returns two slices. The first contains no consecutive repeated elements.
1592 /// The second contains all the duplicates in no specified order.
1594 /// If the slice is sorted, the first returned slice contains no duplicates.
1599 /// #![feature(slice_partition_dedup)]
1601 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1603 /// let (dedup, duplicates) = slice.partition_dedup();
1605 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1606 /// assert_eq!(duplicates, [2, 3, 1]);
1608 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1610 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1613 self.partition_dedup_by(|a, b| a == b)
1616 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1617 /// a given equality relation.
1619 /// Returns two slices. The first contains no consecutive repeated elements.
1620 /// The second contains all the duplicates in no specified order.
1622 /// The `same_bucket` function is passed references to two elements from the slice and
1623 /// must determine if the elements compare equal. The elements are passed in opposite order
1624 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1625 /// at the end of the slice.
1627 /// If the slice is sorted, the first returned slice contains no duplicates.
1632 /// #![feature(slice_partition_dedup)]
1634 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1636 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1638 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1639 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1641 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1643 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1644 where F: FnMut(&mut T, &mut T) -> bool
1646 // Although we have a mutable reference to `self`, we cannot make
1647 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1648 // must ensure that the slice is in a valid state at all times.
1650 // The way that we handle this is by using swaps; we iterate
1651 // over all the elements, swapping as we go so that at the end
1652 // the elements we wish to keep are in the front, and those we
1653 // wish to reject are at the back. We can then split the slice.
1654 // This operation is still O(n).
1656 // Example: We start in this state, where `r` represents "next
1657 // read" and `w` represents "next_write`.
1660 // +---+---+---+---+---+---+
1661 // | 0 | 1 | 1 | 2 | 3 | 3 |
1662 // +---+---+---+---+---+---+
1665 // Comparing self[r] against self[w-1], this is not a duplicate, so
1666 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1667 // r and w, leaving us with:
1670 // +---+---+---+---+---+---+
1671 // | 0 | 1 | 1 | 2 | 3 | 3 |
1672 // +---+---+---+---+---+---+
1675 // Comparing self[r] against self[w-1], this value is a duplicate,
1676 // so we increment `r` but leave everything else unchanged:
1679 // +---+---+---+---+---+---+
1680 // | 0 | 1 | 1 | 2 | 3 | 3 |
1681 // +---+---+---+---+---+---+
1684 // Comparing self[r] against self[w-1], this is not a duplicate,
1685 // so swap self[r] and self[w] and advance r and w:
1688 // +---+---+---+---+---+---+
1689 // | 0 | 1 | 2 | 1 | 3 | 3 |
1690 // +---+---+---+---+---+---+
1693 // Not a duplicate, repeat:
1696 // +---+---+---+---+---+---+
1697 // | 0 | 1 | 2 | 3 | 1 | 3 |
1698 // +---+---+---+---+---+---+
1701 // Duplicate, advance r. End of slice. Split at w.
1703 let len = self.len();
1705 return (self, &mut [])
1708 let ptr = self.as_mut_ptr();
1709 let mut next_read: usize = 1;
1710 let mut next_write: usize = 1;
1713 // Avoid bounds checks by using raw pointers.
1714 while next_read < len {
1715 let ptr_read = ptr.add(next_read);
1716 let prev_ptr_write = ptr.add(next_write - 1);
1717 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1718 if next_read != next_write {
1719 let ptr_write = prev_ptr_write.offset(1);
1720 mem::swap(&mut *ptr_read, &mut *ptr_write);
1728 self.split_at_mut(next_write)
1731 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1732 /// to the same key.
1734 /// Returns two slices. The first contains no consecutive repeated elements.
1735 /// The second contains all the duplicates in no specified order.
1737 /// If the slice is sorted, the first returned slice contains no duplicates.
1742 /// #![feature(slice_partition_dedup)]
1744 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1746 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1748 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1749 /// assert_eq!(duplicates, [21, 30, 13]);
1751 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1753 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1754 where F: FnMut(&mut T) -> K,
1757 self.partition_dedup_by(|a, b| key(a) == key(b))
1760 /// Rotates the slice in-place such that the first `mid` elements of the
1761 /// slice move to the end while the last `self.len() - mid` elements move to
1762 /// the front. After calling `rotate_left`, the element previously at index
1763 /// `mid` will become the first element in the slice.
1767 /// This function will panic if `mid` is greater than the length of the
1768 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1773 /// Takes linear (in `self.len()`) time.
1778 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1779 /// a.rotate_left(2);
1780 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1783 /// Rotating a subslice:
1786 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1787 /// a[1..5].rotate_left(1);
1788 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1790 #[stable(feature = "slice_rotate", since = "1.26.0")]
1791 pub fn rotate_left(&mut self, mid: usize) {
1792 assert!(mid <= self.len());
1793 let k = self.len() - mid;
1796 let p = self.as_mut_ptr();
1797 rotate::ptr_rotate(mid, p.add(mid), k);
1801 /// Rotates the slice in-place such that the first `self.len() - k`
1802 /// elements of the slice move to the end while the last `k` elements move
1803 /// to the front. After calling `rotate_right`, the element previously at
1804 /// index `self.len() - k` will become the first element in the slice.
1808 /// This function will panic if `k` is greater than the length of the
1809 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1814 /// Takes linear (in `self.len()`) time.
1819 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1820 /// a.rotate_right(2);
1821 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1824 /// Rotate a subslice:
1827 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1828 /// a[1..5].rotate_right(1);
1829 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1831 #[stable(feature = "slice_rotate", since = "1.26.0")]
1832 pub fn rotate_right(&mut self, k: usize) {
1833 assert!(k <= self.len());
1834 let mid = self.len() - k;
1837 let p = self.as_mut_ptr();
1838 rotate::ptr_rotate(mid, p.add(mid), k);
1842 /// Copies the elements from `src` into `self`.
1844 /// The length of `src` must be the same as `self`.
1846 /// If `src` implements `Copy`, it can be more performant to use
1847 /// [`copy_from_slice`].
1851 /// This function will panic if the two slices have different lengths.
1855 /// Cloning two elements from a slice into another:
1858 /// let src = [1, 2, 3, 4];
1859 /// let mut dst = [0, 0];
1861 /// // Because the slices have to be the same length,
1862 /// // we slice the source slice from four elements
1863 /// // to two. It will panic if we don't do this.
1864 /// dst.clone_from_slice(&src[2..]);
1866 /// assert_eq!(src, [1, 2, 3, 4]);
1867 /// assert_eq!(dst, [3, 4]);
1870 /// Rust enforces that there can only be one mutable reference with no
1871 /// immutable references to a particular piece of data in a particular
1872 /// scope. Because of this, attempting to use `clone_from_slice` on a
1873 /// single slice will result in a compile failure:
1876 /// let mut slice = [1, 2, 3, 4, 5];
1878 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
1881 /// To work around this, we can use [`split_at_mut`] to create two distinct
1882 /// sub-slices from a slice:
1885 /// let mut slice = [1, 2, 3, 4, 5];
1888 /// let (left, right) = slice.split_at_mut(2);
1889 /// left.clone_from_slice(&right[1..]);
1892 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1895 /// [`copy_from_slice`]: #method.copy_from_slice
1896 /// [`split_at_mut`]: #method.split_at_mut
1897 #[stable(feature = "clone_from_slice", since = "1.7.0")]
1898 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
1899 assert!(self.len() == src.len(),
1900 "destination and source slices have different lengths");
1901 // NOTE: We need to explicitly slice them to the same length
1902 // for bounds checking to be elided, and the optimizer will
1903 // generate memcpy for simple cases (for example T = u8).
1904 let len = self.len();
1905 let src = &src[..len];
1907 self[i].clone_from(&src[i]);
1912 /// Copies all elements from `src` into `self`, using a memcpy.
1914 /// The length of `src` must be the same as `self`.
1916 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
1920 /// This function will panic if the two slices have different lengths.
1924 /// Copying two elements from a slice into another:
1927 /// let src = [1, 2, 3, 4];
1928 /// let mut dst = [0, 0];
1930 /// // Because the slices have to be the same length,
1931 /// // we slice the source slice from four elements
1932 /// // to two. It will panic if we don't do this.
1933 /// dst.copy_from_slice(&src[2..]);
1935 /// assert_eq!(src, [1, 2, 3, 4]);
1936 /// assert_eq!(dst, [3, 4]);
1939 /// Rust enforces that there can only be one mutable reference with no
1940 /// immutable references to a particular piece of data in a particular
1941 /// scope. Because of this, attempting to use `copy_from_slice` on a
1942 /// single slice will result in a compile failure:
1945 /// let mut slice = [1, 2, 3, 4, 5];
1947 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
1950 /// To work around this, we can use [`split_at_mut`] to create two distinct
1951 /// sub-slices from a slice:
1954 /// let mut slice = [1, 2, 3, 4, 5];
1957 /// let (left, right) = slice.split_at_mut(2);
1958 /// left.copy_from_slice(&right[1..]);
1961 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1964 /// [`clone_from_slice`]: #method.clone_from_slice
1965 /// [`split_at_mut`]: #method.split_at_mut
1966 #[stable(feature = "copy_from_slice", since = "1.9.0")]
1967 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
1968 assert_eq!(self.len(), src.len(),
1969 "destination and source slices have different lengths");
1971 ptr::copy_nonoverlapping(
1972 src.as_ptr(), self.as_mut_ptr(), self.len());
1976 /// Copies elements from one part of the slice to another part of itself,
1977 /// using a memmove.
1979 /// `src` is the range within `self` to copy from. `dest` is the starting
1980 /// index of the range within `self` to copy to, which will have the same
1981 /// length as `src`. The two ranges may overlap. The ends of the two ranges
1982 /// must be less than or equal to `self.len()`.
1986 /// This function will panic if either range exceeds the end of the slice,
1987 /// or if the end of `src` is before the start.
1991 /// Copying four bytes within a slice:
1994 /// # #![feature(copy_within)]
1995 /// let mut bytes = *b"Hello, World!";
1997 /// bytes.copy_within(1..5, 8);
1999 /// assert_eq!(&bytes, b"Hello, Wello!");
2001 #[unstable(feature = "copy_within", issue = "54236")]
2002 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2006 let src_start = match src.start_bound() {
2007 ops::Bound::Included(&n) => n,
2008 ops::Bound::Excluded(&n) => n
2010 .unwrap_or_else(|| slice_index_overflow_fail()),
2011 ops::Bound::Unbounded => 0,
2013 let src_end = match src.end_bound() {
2014 ops::Bound::Included(&n) => n
2016 .unwrap_or_else(|| slice_index_overflow_fail()),
2017 ops::Bound::Excluded(&n) => n,
2018 ops::Bound::Unbounded => self.len(),
2020 assert!(src_start <= src_end, "src end is before src start");
2021 assert!(src_end <= self.len(), "src is out of bounds");
2022 let count = src_end - src_start;
2023 assert!(dest <= self.len() - count, "dest is out of bounds");
2026 self.get_unchecked(src_start),
2027 self.get_unchecked_mut(dest),
2033 /// Swaps all elements in `self` with those in `other`.
2035 /// The length of `other` must be the same as `self`.
2039 /// This function will panic if the two slices have different lengths.
2043 /// Swapping two elements across slices:
2046 /// let mut slice1 = [0, 0];
2047 /// let mut slice2 = [1, 2, 3, 4];
2049 /// slice1.swap_with_slice(&mut slice2[2..]);
2051 /// assert_eq!(slice1, [3, 4]);
2052 /// assert_eq!(slice2, [1, 2, 0, 0]);
2055 /// Rust enforces that there can only be one mutable reference to a
2056 /// particular piece of data in a particular scope. Because of this,
2057 /// attempting to use `swap_with_slice` on a single slice will result in
2058 /// a compile failure:
2061 /// let mut slice = [1, 2, 3, 4, 5];
2062 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2065 /// To work around this, we can use [`split_at_mut`] to create two distinct
2066 /// mutable sub-slices from a slice:
2069 /// let mut slice = [1, 2, 3, 4, 5];
2072 /// let (left, right) = slice.split_at_mut(2);
2073 /// left.swap_with_slice(&mut right[1..]);
2076 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2079 /// [`split_at_mut`]: #method.split_at_mut
2080 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2081 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2082 assert!(self.len() == other.len(),
2083 "destination and source slices have different lengths");
2085 ptr::swap_nonoverlapping(
2086 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2090 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2091 fn align_to_offsets<U>(&self) -> (usize, usize) {
2092 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2093 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2095 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2096 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2097 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2099 // Formula to calculate this is:
2101 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2102 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2104 // Expanded and simplified:
2106 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2107 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2109 // Luckily since all this is constant-evaluated... performance here matters not!
2111 fn gcd(a: usize, b: usize) -> usize {
2112 // iterative stein’s algorithm
2113 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2114 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2115 let (ctz_a, mut ctz_b) = unsafe {
2116 if a == 0 { return b; }
2117 if b == 0 { return a; }
2118 (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b))
2120 let k = ctz_a.min(ctz_b);
2121 let mut a = a >> ctz_a;
2124 // remove all factors of 2 from b
2127 ::mem::swap(&mut a, &mut b);
2134 ctz_b = ::intrinsics::cttz_nonzero(b);
2139 let gcd: usize = gcd(::mem::size_of::<T>(), ::mem::size_of::<U>());
2140 let ts: usize = ::mem::size_of::<U>() / gcd;
2141 let us: usize = ::mem::size_of::<T>() / gcd;
2143 // Armed with this knowledge, we can find how many `U`s we can fit!
2144 let us_len = self.len() / ts * us;
2145 // And how many `T`s will be in the trailing slice!
2146 let ts_len = self.len() % ts;
2150 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2153 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2154 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2155 /// middle slice the greatest length possible for a given type and input slice, but only
2156 /// your algorithm's performance should depend on that, not its correctness.
2158 /// This method has no purpose when either input element `T` or output element `U` are
2159 /// zero-sized and will return the original slice without splitting anything.
2163 /// This method is essentially a `transmute` with respect to the elements in the returned
2164 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2172 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2173 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2174 /// // less_efficient_algorithm_for_bytes(prefix);
2175 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2176 /// // less_efficient_algorithm_for_bytes(suffix);
2179 #[stable(feature = "slice_align_to", since = "1.30.0")]
2180 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2181 // Note that most of this function will be constant-evaluated,
2182 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2183 // handle ZSTs specially, which is – don't handle them at all.
2184 return (self, &[], &[]);
2187 // First, find at what point do we split between the first and 2nd slice. Easy with
2188 // ptr.align_offset.
2189 let ptr = self.as_ptr();
2190 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2191 if offset > self.len() {
2194 let (left, rest) = self.split_at(offset);
2195 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2196 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2198 from_raw_parts(rest.as_ptr() as *const U, us_len),
2199 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2203 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2206 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2207 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2208 /// middle slice the greatest length possible for a given type and input slice, but only
2209 /// your algorithm's performance should depend on that, not its correctness.
2211 /// This method has no purpose when either input element `T` or output element `U` are
2212 /// zero-sized and will return the original slice without splitting anything.
2216 /// This method is essentially a `transmute` with respect to the elements in the returned
2217 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2225 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2226 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2227 /// // less_efficient_algorithm_for_bytes(prefix);
2228 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2229 /// // less_efficient_algorithm_for_bytes(suffix);
2232 #[stable(feature = "slice_align_to", since = "1.30.0")]
2233 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2234 // Note that most of this function will be constant-evaluated,
2235 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2236 // handle ZSTs specially, which is – don't handle them at all.
2237 return (self, &mut [], &mut []);
2240 // First, find at what point do we split between the first and 2nd slice. Easy with
2241 // ptr.align_offset.
2242 let ptr = self.as_ptr();
2243 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2244 if offset > self.len() {
2245 (self, &mut [], &mut [])
2247 let (left, rest) = self.split_at_mut(offset);
2248 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2249 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2250 let mut_ptr = rest.as_mut_ptr();
2252 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2253 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2257 /// Checks if the elements of this slice are sorted.
2259 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2260 /// slice yields exactly zero or one element, `true` is returned.
2262 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2263 /// implies that this function returns `false` if any two consecutive items are not
2269 /// #![feature(is_sorted)]
2270 /// let empty: [i32; 0] = [];
2272 /// assert!([1, 2, 2, 9].is_sorted());
2273 /// assert!(![1, 3, 2, 4].is_sorted());
2274 /// assert!([0].is_sorted());
2275 /// assert!(empty.is_sorted());
2276 /// assert!(![0.0, 1.0, std::f32::NAN].is_sorted());
2279 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2280 pub fn is_sorted(&self) -> bool
2284 self.is_sorted_by(|a, b| a.partial_cmp(b))
2287 /// Checks if the elements of this slice are sorted using the given comparator function.
2289 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2290 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2291 /// [`is_sorted`]; see its documentation for more information.
2293 /// [`is_sorted`]: #method.is_sorted
2294 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2295 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2297 F: FnMut(&T, &T) -> Option<Ordering>
2299 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2302 /// Checks if the elements of this slice are sorted using the given key extraction function.
2304 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2305 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2306 /// documentation for more information.
2308 /// [`is_sorted`]: #method.is_sorted
2313 /// #![feature(is_sorted)]
2315 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2316 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2319 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2320 pub fn is_sorted_by_key<F, K>(&self, mut f: F) -> bool
2325 self.is_sorted_by(|a, b| f(a).partial_cmp(&f(b)))
2329 #[lang = "slice_u8"]
2332 /// Checks if all bytes in this slice are within the ASCII range.
2333 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2335 pub fn is_ascii(&self) -> bool {
2336 self.iter().all(|b| b.is_ascii())
2339 /// Checks that two slices are an ASCII case-insensitive match.
2341 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2342 /// but without allocating and copying temporaries.
2343 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2345 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2346 self.len() == other.len() &&
2347 self.iter().zip(other).all(|(a, b)| {
2348 a.eq_ignore_ascii_case(b)
2352 /// Converts this slice to its ASCII upper case equivalent in-place.
2354 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2355 /// but non-ASCII letters are unchanged.
2357 /// To return a new uppercased value without modifying the existing one, use
2358 /// [`to_ascii_uppercase`].
2360 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2361 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2363 pub fn make_ascii_uppercase(&mut self) {
2365 byte.make_ascii_uppercase();
2369 /// Converts this slice to its ASCII lower case equivalent in-place.
2371 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2372 /// but non-ASCII letters are unchanged.
2374 /// To return a new lowercased value without modifying the existing one, use
2375 /// [`to_ascii_lowercase`].
2377 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2378 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2380 pub fn make_ascii_lowercase(&mut self) {
2382 byte.make_ascii_lowercase();
2388 #[stable(feature = "rust1", since = "1.0.0")]
2389 impl<T, I> ops::Index<I> for [T]
2390 where I: SliceIndex<[T]>
2392 type Output = I::Output;
2395 fn index(&self, index: I) -> &I::Output {
2400 #[stable(feature = "rust1", since = "1.0.0")]
2401 impl<T, I> ops::IndexMut<I> for [T]
2402 where I: SliceIndex<[T]>
2405 fn index_mut(&mut self, index: I) -> &mut I::Output {
2406 index.index_mut(self)
2412 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2413 panic!("index {} out of range for slice of length {}", index, len);
2418 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2419 panic!("slice index starts at {} but ends at {}", index, end);
2424 fn slice_index_overflow_fail() -> ! {
2425 panic!("attempted to index slice up to maximum usize");
2428 mod private_slice_index {
2430 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2433 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2434 impl Sealed for usize {}
2435 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2436 impl Sealed for ops::Range<usize> {}
2437 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2438 impl Sealed for ops::RangeTo<usize> {}
2439 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2440 impl Sealed for ops::RangeFrom<usize> {}
2441 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2442 impl Sealed for ops::RangeFull {}
2443 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2444 impl Sealed for ops::RangeInclusive<usize> {}
2445 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2446 impl Sealed for ops::RangeToInclusive<usize> {}
2449 /// A helper trait used for indexing operations.
2450 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2451 #[rustc_on_unimplemented(
2454 label = "string indices are ranges of `usize`",
2457 all(any(T = "str", T = "&str", T = "std::string::String"), _Self="{integer}"),
2458 note="you can use `.chars().nth()` or `.bytes().nth()`
2459 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
2461 message = "the type `{T}` cannot be indexed by `{Self}`",
2462 label = "slice indices are of type `usize` or ranges of `usize`",
2464 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2465 /// The output type returned by methods.
2466 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2467 type Output: ?Sized;
2469 /// Returns a shared reference to the output at this location, if in
2471 #[unstable(feature = "slice_index_methods", issue = "0")]
2472 fn get(self, slice: &T) -> Option<&Self::Output>;
2474 /// Returns a mutable reference to the output at this location, if in
2476 #[unstable(feature = "slice_index_methods", issue = "0")]
2477 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2479 /// Returns a shared reference to the output at this location, without
2480 /// performing any bounds checking.
2481 #[unstable(feature = "slice_index_methods", issue = "0")]
2482 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2484 /// Returns a mutable reference to the output at this location, without
2485 /// performing any bounds checking.
2486 #[unstable(feature = "slice_index_methods", issue = "0")]
2487 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2489 /// Returns a shared reference to the output at this location, panicking
2490 /// if out of bounds.
2491 #[unstable(feature = "slice_index_methods", issue = "0")]
2492 fn index(self, slice: &T) -> &Self::Output;
2494 /// Returns a mutable reference to the output at this location, panicking
2495 /// if out of bounds.
2496 #[unstable(feature = "slice_index_methods", issue = "0")]
2497 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2500 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2501 impl<T> SliceIndex<[T]> for usize {
2505 fn get(self, slice: &[T]) -> Option<&T> {
2506 if self < slice.len() {
2508 Some(self.get_unchecked(slice))
2516 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2517 if self < slice.len() {
2519 Some(self.get_unchecked_mut(slice))
2527 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2528 &*slice.as_ptr().add(self)
2532 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2533 &mut *slice.as_mut_ptr().add(self)
2537 fn index(self, slice: &[T]) -> &T {
2538 // N.B., use intrinsic indexing
2543 fn index_mut(self, slice: &mut [T]) -> &mut T {
2544 // N.B., use intrinsic indexing
2549 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2550 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2554 fn get(self, slice: &[T]) -> Option<&[T]> {
2555 if self.start > self.end || self.end > slice.len() {
2559 Some(self.get_unchecked(slice))
2565 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2566 if self.start > self.end || self.end > slice.len() {
2570 Some(self.get_unchecked_mut(slice))
2576 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2577 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2581 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2582 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2586 fn index(self, slice: &[T]) -> &[T] {
2587 if self.start > self.end {
2588 slice_index_order_fail(self.start, self.end);
2589 } else if self.end > slice.len() {
2590 slice_index_len_fail(self.end, slice.len());
2593 self.get_unchecked(slice)
2598 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2599 if self.start > self.end {
2600 slice_index_order_fail(self.start, self.end);
2601 } else if self.end > slice.len() {
2602 slice_index_len_fail(self.end, slice.len());
2605 self.get_unchecked_mut(slice)
2610 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2611 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2615 fn get(self, slice: &[T]) -> Option<&[T]> {
2616 (0..self.end).get(slice)
2620 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2621 (0..self.end).get_mut(slice)
2625 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2626 (0..self.end).get_unchecked(slice)
2630 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2631 (0..self.end).get_unchecked_mut(slice)
2635 fn index(self, slice: &[T]) -> &[T] {
2636 (0..self.end).index(slice)
2640 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2641 (0..self.end).index_mut(slice)
2645 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2646 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2650 fn get(self, slice: &[T]) -> Option<&[T]> {
2651 (self.start..slice.len()).get(slice)
2655 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2656 (self.start..slice.len()).get_mut(slice)
2660 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2661 (self.start..slice.len()).get_unchecked(slice)
2665 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2666 (self.start..slice.len()).get_unchecked_mut(slice)
2670 fn index(self, slice: &[T]) -> &[T] {
2671 (self.start..slice.len()).index(slice)
2675 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2676 (self.start..slice.len()).index_mut(slice)
2680 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2681 impl<T> SliceIndex<[T]> for ops::RangeFull {
2685 fn get(self, slice: &[T]) -> Option<&[T]> {
2690 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2695 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2700 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2705 fn index(self, slice: &[T]) -> &[T] {
2710 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2716 #[stable(feature = "inclusive_range", since = "1.26.0")]
2717 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2721 fn get(self, slice: &[T]) -> Option<&[T]> {
2722 if *self.end() == usize::max_value() { None }
2723 else { (*self.start()..self.end() + 1).get(slice) }
2727 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2728 if *self.end() == usize::max_value() { None }
2729 else { (*self.start()..self.end() + 1).get_mut(slice) }
2733 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2734 (*self.start()..self.end() + 1).get_unchecked(slice)
2738 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2739 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2743 fn index(self, slice: &[T]) -> &[T] {
2744 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2745 (*self.start()..self.end() + 1).index(slice)
2749 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2750 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2751 (*self.start()..self.end() + 1).index_mut(slice)
2755 #[stable(feature = "inclusive_range", since = "1.26.0")]
2756 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2760 fn get(self, slice: &[T]) -> Option<&[T]> {
2761 (0..=self.end).get(slice)
2765 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2766 (0..=self.end).get_mut(slice)
2770 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2771 (0..=self.end).get_unchecked(slice)
2775 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2776 (0..=self.end).get_unchecked_mut(slice)
2780 fn index(self, slice: &[T]) -> &[T] {
2781 (0..=self.end).index(slice)
2785 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2786 (0..=self.end).index_mut(slice)
2790 ////////////////////////////////////////////////////////////////////////////////
2792 ////////////////////////////////////////////////////////////////////////////////
2794 #[stable(feature = "rust1", since = "1.0.0")]
2795 impl<T> Default for &[T] {
2796 /// Creates an empty slice.
2797 fn default() -> Self { &[] }
2800 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2801 impl<T> Default for &mut [T] {
2802 /// Creates a mutable empty slice.
2803 fn default() -> Self { &mut [] }
2810 #[stable(feature = "rust1", since = "1.0.0")]
2811 impl<'a, T> IntoIterator for &'a [T] {
2813 type IntoIter = Iter<'a, T>;
2815 fn into_iter(self) -> Iter<'a, T> {
2820 #[stable(feature = "rust1", since = "1.0.0")]
2821 impl<'a, T> IntoIterator for &'a mut [T] {
2822 type Item = &'a mut T;
2823 type IntoIter = IterMut<'a, T>;
2825 fn into_iter(self) -> IterMut<'a, T> {
2830 // Macro helper functions
2832 fn size_from_ptr<T>(_: *const T) -> usize {
2836 // Inlining is_empty and len makes a huge performance difference
2837 macro_rules! is_empty {
2838 // The way we encode the length of a ZST iterator, this works both for ZST
2840 ($self: ident) => {$self.ptr == $self.end}
2842 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2843 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
2845 ($self: ident) => {{
2846 let start = $self.ptr;
2847 let diff = ($self.end as usize).wrapping_sub(start as usize);
2848 let size = size_from_ptr(start);
2852 // Using division instead of `offset_from` helps LLVM remove bounds checks
2858 // The shared definition of the `Iter` and `IterMut` iterators
2859 macro_rules! iterator {
2861 struct $name:ident -> $ptr:ty,
2867 impl<'a, T> $name<'a, T> {
2868 // Helper function for creating a slice from the iterator.
2870 fn make_slice(&self) -> &'a [T] {
2871 unsafe { from_raw_parts(self.ptr, len!(self)) }
2874 // Helper function for moving the start of the iterator forwards by `offset` elements,
2875 // returning the old start.
2876 // Unsafe because the offset must be in-bounds or one-past-the-end.
2878 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
2879 if mem::size_of::<T>() == 0 {
2880 // This is *reducing* the length. `ptr` never changes with ZST.
2881 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2885 self.ptr = self.ptr.offset(offset);
2890 // Helper function for moving the end of the iterator backwards by `offset` elements,
2891 // returning the new end.
2892 // Unsafe because the offset must be in-bounds or one-past-the-end.
2894 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
2895 if mem::size_of::<T>() == 0 {
2896 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2899 self.end = self.end.offset(-offset);
2905 #[stable(feature = "rust1", since = "1.0.0")]
2906 impl<T> ExactSizeIterator for $name<'_, T> {
2908 fn len(&self) -> usize {
2913 fn is_empty(&self) -> bool {
2918 #[stable(feature = "rust1", since = "1.0.0")]
2919 impl<'a, T> Iterator for $name<'a, T> {
2923 fn next(&mut self) -> Option<$elem> {
2924 // could be implemented with slices, but this avoids bounds checks
2926 assume(!self.ptr.is_null());
2927 if mem::size_of::<T>() != 0 {
2928 assume(!self.end.is_null());
2930 if is_empty!(self) {
2933 Some(& $( $mut_ )* *self.post_inc_start(1))
2939 fn size_hint(&self) -> (usize, Option<usize>) {
2940 let exact = len!(self);
2941 (exact, Some(exact))
2945 fn count(self) -> usize {
2950 fn nth(&mut self, n: usize) -> Option<$elem> {
2951 if n >= len!(self) {
2952 // This iterator is now empty.
2953 if mem::size_of::<T>() == 0 {
2954 // We have to do it this way as `ptr` may never be 0, but `end`
2955 // could be (due to wrapping).
2956 self.end = self.ptr;
2958 self.ptr = self.end;
2962 // We are in bounds. `offset` does the right thing even for ZSTs.
2964 let elem = Some(& $( $mut_ )* *self.ptr.add(n));
2965 self.post_inc_start((n as isize).wrapping_add(1));
2971 fn last(mut self) -> Option<$elem> {
2976 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2977 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2979 // manual unrolling is needed when there are conditional exits from the loop
2980 let mut accum = init;
2982 while len!(self) >= 4 {
2983 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2984 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2985 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2986 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2988 while !is_empty!(self) {
2989 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2996 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
2997 where Fold: FnMut(Acc, Self::Item) -> Acc,
2999 // Let LLVM unroll this, rather than using the default
3000 // impl that would force the manual unrolling above
3001 let mut accum = init;
3002 while let Some(x) = self.next() {
3003 accum = f(accum, x);
3009 #[rustc_inherit_overflow_checks]
3010 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3012 P: FnMut(Self::Item) -> bool,
3014 // The addition might panic on overflow.
3016 self.try_fold(0, move |i, x| {
3017 if predicate(x) { Err(i) }
3021 unsafe { assume(i < n) };
3027 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3028 P: FnMut(Self::Item) -> bool,
3029 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3031 // No need for an overflow check here, because `ExactSizeIterator`
3033 self.try_rfold(n, move |i, x| {
3035 if predicate(x) { Err(i) }
3039 unsafe { assume(i < n) };
3047 #[stable(feature = "rust1", since = "1.0.0")]
3048 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3050 fn next_back(&mut self) -> Option<$elem> {
3051 // could be implemented with slices, but this avoids bounds checks
3053 assume(!self.ptr.is_null());
3054 if mem::size_of::<T>() != 0 {
3055 assume(!self.end.is_null());
3057 if is_empty!(self) {
3060 Some(& $( $mut_ )* *self.pre_dec_end(1))
3066 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3067 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3069 // manual unrolling is needed when there are conditional exits from the loop
3070 let mut accum = init;
3072 while len!(self) >= 4 {
3073 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3074 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3075 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3076 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3078 // inlining is_empty everywhere makes a huge performance difference
3079 while !is_empty!(self) {
3080 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3087 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3088 where Fold: FnMut(Acc, Self::Item) -> Acc,
3090 // Let LLVM unroll this, rather than using the default
3091 // impl that would force the manual unrolling above
3092 let mut accum = init;
3093 while let Some(x) = self.next_back() {
3094 accum = f(accum, x);
3100 #[stable(feature = "fused", since = "1.26.0")]
3101 impl<T> FusedIterator for $name<'_, T> {}
3103 #[unstable(feature = "trusted_len", issue = "37572")]
3104 unsafe impl<T> TrustedLen for $name<'_, T> {}
3108 /// Immutable slice iterator
3110 /// This struct is created by the [`iter`] method on [slices].
3117 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3118 /// let slice = &[1, 2, 3];
3120 /// // Then, we iterate over it:
3121 /// for element in slice.iter() {
3122 /// println!("{}", element);
3126 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3127 /// [slices]: ../../std/primitive.slice.html
3128 #[stable(feature = "rust1", since = "1.0.0")]
3129 pub struct Iter<'a, T: 'a> {
3131 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3132 // ptr == end is a quick test for the Iterator being empty, that works
3133 // for both ZST and non-ZST.
3134 _marker: marker::PhantomData<&'a T>,
3137 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3138 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3139 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3140 f.debug_tuple("Iter")
3141 .field(&self.as_slice())
3146 #[stable(feature = "rust1", since = "1.0.0")]
3147 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3148 #[stable(feature = "rust1", since = "1.0.0")]
3149 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3151 impl<'a, T> Iter<'a, T> {
3152 /// Views the underlying data as a subslice of the original data.
3154 /// This has the same lifetime as the original slice, and so the
3155 /// iterator can continue to be used while this exists.
3162 /// // First, we declare a type which has the `iter` method to get the `Iter`
3163 /// // struct (&[usize here]):
3164 /// let slice = &[1, 2, 3];
3166 /// // Then, we get the iterator:
3167 /// let mut iter = slice.iter();
3168 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3169 /// println!("{:?}", iter.as_slice());
3171 /// // Next, we move to the second element of the slice:
3173 /// // Now `as_slice` returns "[2, 3]":
3174 /// println!("{:?}", iter.as_slice());
3176 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3177 pub fn as_slice(&self) -> &'a [T] {
3182 iterator!{struct Iter -> *const T, &'a T, const, {/* no mut */}, {
3183 fn is_sorted_by<F>(self, mut compare: F) -> bool
3186 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
3188 self.as_slice().windows(2).all(|w| {
3189 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
3194 #[stable(feature = "rust1", since = "1.0.0")]
3195 impl<T> Clone for Iter<'_, T> {
3196 fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
3199 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3200 impl<T> AsRef<[T]> for Iter<'_, T> {
3201 fn as_ref(&self) -> &[T] {
3206 /// Mutable slice iterator.
3208 /// This struct is created by the [`iter_mut`] method on [slices].
3215 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3216 /// // struct (&[usize here]):
3217 /// let mut slice = &mut [1, 2, 3];
3219 /// // Then, we iterate over it and increment each element value:
3220 /// for element in slice.iter_mut() {
3224 /// // We now have "[2, 3, 4]":
3225 /// println!("{:?}", slice);
3228 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3229 /// [slices]: ../../std/primitive.slice.html
3230 #[stable(feature = "rust1", since = "1.0.0")]
3231 pub struct IterMut<'a, T: 'a> {
3233 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3234 // ptr == end is a quick test for the Iterator being empty, that works
3235 // for both ZST and non-ZST.
3236 _marker: marker::PhantomData<&'a mut T>,
3239 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3240 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3241 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3242 f.debug_tuple("IterMut")
3243 .field(&self.make_slice())
3248 #[stable(feature = "rust1", since = "1.0.0")]
3249 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3250 #[stable(feature = "rust1", since = "1.0.0")]
3251 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3253 impl<'a, T> IterMut<'a, T> {
3254 /// Views the underlying data as a subslice of the original data.
3256 /// To avoid creating `&mut` references that alias, this is forced
3257 /// to consume the iterator.
3264 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3265 /// // struct (&[usize here]):
3266 /// let mut slice = &mut [1, 2, 3];
3269 /// // Then, we get the iterator:
3270 /// let mut iter = slice.iter_mut();
3271 /// // We move to next element:
3273 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3274 /// println!("{:?}", iter.into_slice());
3277 /// // Now let's modify a value of the slice:
3279 /// // First we get back the iterator:
3280 /// let mut iter = slice.iter_mut();
3281 /// // We change the value of the first element of the slice returned by the `next` method:
3282 /// *iter.next().unwrap() += 1;
3284 /// // Now slice is "[2, 2, 3]":
3285 /// println!("{:?}", slice);
3287 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3288 pub fn into_slice(self) -> &'a mut [T] {
3289 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3292 /// Views the underlying data as a subslice of the original data.
3294 /// To avoid creating `&mut [T]` references that alias, the returned slice
3295 /// borrows its lifetime from the iterator the method is applied on.
3302 /// # #![feature(slice_iter_mut_as_slice)]
3303 /// let mut slice: &mut [usize] = &mut [1, 2, 3];
3305 /// // First, we get the iterator:
3306 /// let mut iter = slice.iter_mut();
3307 /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
3308 /// assert_eq!(iter.as_slice(), &[1, 2, 3]);
3310 /// // Next, we move to the second element of the slice:
3312 /// // Now `as_slice` returns "[2, 3]":
3313 /// assert_eq!(iter.as_slice(), &[2, 3]);
3315 #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
3316 pub fn as_slice(&self) -> &[T] {
3321 iterator!{struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
3323 /// An internal abstraction over the splitting iterators, so that
3324 /// splitn, splitn_mut etc can be implemented once.
3326 trait SplitIter: DoubleEndedIterator {
3327 /// Marks the underlying iterator as complete, extracting the remaining
3328 /// portion of the slice.
3329 fn finish(&mut self) -> Option<Self::Item>;
3332 /// An iterator over subslices separated by elements that match a predicate
3335 /// This struct is created by the [`split`] method on [slices].
3337 /// [`split`]: ../../std/primitive.slice.html#method.split
3338 /// [slices]: ../../std/primitive.slice.html
3339 #[stable(feature = "rust1", since = "1.0.0")]
3340 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3346 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3347 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool {
3348 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3349 f.debug_struct("Split")
3350 .field("v", &self.v)
3351 .field("finished", &self.finished)
3356 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3357 #[stable(feature = "rust1", since = "1.0.0")]
3358 impl<T, P> Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool {
3359 fn clone(&self) -> Self {
3362 pred: self.pred.clone(),
3363 finished: self.finished,
3368 #[stable(feature = "rust1", since = "1.0.0")]
3369 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3370 type Item = &'a [T];
3373 fn next(&mut self) -> Option<&'a [T]> {
3374 if self.finished { return None; }
3376 match self.v.iter().position(|x| (self.pred)(x)) {
3377 None => self.finish(),
3379 let ret = Some(&self.v[..idx]);
3380 self.v = &self.v[idx + 1..];
3387 fn size_hint(&self) -> (usize, Option<usize>) {
3391 (1, Some(self.v.len() + 1))
3396 #[stable(feature = "rust1", since = "1.0.0")]
3397 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3399 fn next_back(&mut self) -> Option<&'a [T]> {
3400 if self.finished { return None; }
3402 match self.v.iter().rposition(|x| (self.pred)(x)) {
3403 None => self.finish(),
3405 let ret = Some(&self.v[idx + 1..]);
3406 self.v = &self.v[..idx];
3413 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3415 fn finish(&mut self) -> Option<&'a [T]> {
3416 if self.finished { None } else { self.finished = true; Some(self.v) }
3420 #[stable(feature = "fused", since = "1.26.0")]
3421 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3423 /// An iterator over the subslices of the vector which are separated
3424 /// by elements that match `pred`.
3426 /// This struct is created by the [`split_mut`] method on [slices].
3428 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3429 /// [slices]: ../../std/primitive.slice.html
3430 #[stable(feature = "rust1", since = "1.0.0")]
3431 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3437 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3438 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3439 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3440 f.debug_struct("SplitMut")
3441 .field("v", &self.v)
3442 .field("finished", &self.finished)
3447 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3449 fn finish(&mut self) -> Option<&'a mut [T]> {
3453 self.finished = true;
3454 Some(mem::replace(&mut self.v, &mut []))
3459 #[stable(feature = "rust1", since = "1.0.0")]
3460 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3461 type Item = &'a mut [T];
3464 fn next(&mut self) -> Option<&'a mut [T]> {
3465 if self.finished { return None; }
3467 let idx_opt = { // work around borrowck limitations
3468 let pred = &mut self.pred;
3469 self.v.iter().position(|x| (*pred)(x))
3472 None => self.finish(),
3474 let tmp = mem::replace(&mut self.v, &mut []);
3475 let (head, tail) = tmp.split_at_mut(idx);
3476 self.v = &mut tail[1..];
3483 fn size_hint(&self) -> (usize, Option<usize>) {
3487 // if the predicate doesn't match anything, we yield one slice
3488 // if it matches every element, we yield len+1 empty slices.
3489 (1, Some(self.v.len() + 1))
3494 #[stable(feature = "rust1", since = "1.0.0")]
3495 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3496 P: FnMut(&T) -> bool,
3499 fn next_back(&mut self) -> Option<&'a mut [T]> {
3500 if self.finished { return None; }
3502 let idx_opt = { // work around borrowck limitations
3503 let pred = &mut self.pred;
3504 self.v.iter().rposition(|x| (*pred)(x))
3507 None => self.finish(),
3509 let tmp = mem::replace(&mut self.v, &mut []);
3510 let (head, tail) = tmp.split_at_mut(idx);
3512 Some(&mut tail[1..])
3518 #[stable(feature = "fused", since = "1.26.0")]
3519 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3521 /// An iterator over subslices separated by elements that match a predicate
3522 /// function, starting from the end of the slice.
3524 /// This struct is created by the [`rsplit`] method on [slices].
3526 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3527 /// [slices]: ../../std/primitive.slice.html
3528 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3529 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3530 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3531 inner: Split<'a, T, P>
3534 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3535 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool {
3536 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3537 f.debug_struct("RSplit")
3538 .field("v", &self.inner.v)
3539 .field("finished", &self.inner.finished)
3544 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3545 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3546 type Item = &'a [T];
3549 fn next(&mut self) -> Option<&'a [T]> {
3550 self.inner.next_back()
3554 fn size_hint(&self) -> (usize, Option<usize>) {
3555 self.inner.size_hint()
3559 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3560 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3562 fn next_back(&mut self) -> Option<&'a [T]> {
3567 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3568 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3570 fn finish(&mut self) -> Option<&'a [T]> {
3575 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3576 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3578 /// An iterator over the subslices of the vector which are separated
3579 /// by elements that match `pred`, starting from the end of the slice.
3581 /// This struct is created by the [`rsplit_mut`] method on [slices].
3583 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3584 /// [slices]: ../../std/primitive.slice.html
3585 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3586 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3587 inner: SplitMut<'a, T, P>
3590 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3591 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3592 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3593 f.debug_struct("RSplitMut")
3594 .field("v", &self.inner.v)
3595 .field("finished", &self.inner.finished)
3600 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3601 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3603 fn finish(&mut self) -> Option<&'a mut [T]> {
3608 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3609 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3610 type Item = &'a mut [T];
3613 fn next(&mut self) -> Option<&'a mut [T]> {
3614 self.inner.next_back()
3618 fn size_hint(&self) -> (usize, Option<usize>) {
3619 self.inner.size_hint()
3623 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3624 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3625 P: FnMut(&T) -> bool,
3628 fn next_back(&mut self) -> Option<&'a mut [T]> {
3633 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3634 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3636 /// An private iterator over subslices separated by elements that
3637 /// match a predicate function, splitting at most a fixed number of
3640 struct GenericSplitN<I> {
3645 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3649 fn next(&mut self) -> Option<T> {
3652 1 => { self.count -= 1; self.iter.finish() }
3653 _ => { self.count -= 1; self.iter.next() }
3658 fn size_hint(&self) -> (usize, Option<usize>) {
3659 let (lower, upper_opt) = self.iter.size_hint();
3660 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3664 /// An iterator over subslices separated by elements that match a predicate
3665 /// function, limited to a given number of splits.
3667 /// This struct is created by the [`splitn`] method on [slices].
3669 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3670 /// [slices]: ../../std/primitive.slice.html
3671 #[stable(feature = "rust1", since = "1.0.0")]
3672 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3673 inner: GenericSplitN<Split<'a, T, P>>
3676 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3677 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool {
3678 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3679 f.debug_struct("SplitN")
3680 .field("inner", &self.inner)
3685 /// An iterator over subslices separated by elements that match a
3686 /// predicate function, limited to a given number of splits, starting
3687 /// from the end of the slice.
3689 /// This struct is created by the [`rsplitn`] method on [slices].
3691 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3692 /// [slices]: ../../std/primitive.slice.html
3693 #[stable(feature = "rust1", since = "1.0.0")]
3694 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3695 inner: GenericSplitN<RSplit<'a, T, P>>
3698 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3699 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool {
3700 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3701 f.debug_struct("RSplitN")
3702 .field("inner", &self.inner)
3707 /// An iterator over subslices separated by elements that match a predicate
3708 /// function, limited to a given number of splits.
3710 /// This struct is created by the [`splitn_mut`] method on [slices].
3712 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3713 /// [slices]: ../../std/primitive.slice.html
3714 #[stable(feature = "rust1", since = "1.0.0")]
3715 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3716 inner: GenericSplitN<SplitMut<'a, T, P>>
3719 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3720 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3721 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3722 f.debug_struct("SplitNMut")
3723 .field("inner", &self.inner)
3728 /// An iterator over subslices separated by elements that match a
3729 /// predicate function, limited to a given number of splits, starting
3730 /// from the end of the slice.
3732 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3734 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3735 /// [slices]: ../../std/primitive.slice.html
3736 #[stable(feature = "rust1", since = "1.0.0")]
3737 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3738 inner: GenericSplitN<RSplitMut<'a, T, P>>
3741 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3742 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3743 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3744 f.debug_struct("RSplitNMut")
3745 .field("inner", &self.inner)
3750 macro_rules! forward_iterator {
3751 ($name:ident: $elem:ident, $iter_of:ty) => {
3752 #[stable(feature = "rust1", since = "1.0.0")]
3753 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3754 P: FnMut(&T) -> bool
3756 type Item = $iter_of;
3759 fn next(&mut self) -> Option<$iter_of> {
3764 fn size_hint(&self) -> (usize, Option<usize>) {
3765 self.inner.size_hint()
3769 #[stable(feature = "fused", since = "1.26.0")]
3770 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3771 where P: FnMut(&T) -> bool {}
3775 forward_iterator! { SplitN: T, &'a [T] }
3776 forward_iterator! { RSplitN: T, &'a [T] }
3777 forward_iterator! { SplitNMut: T, &'a mut [T] }
3778 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3780 /// An iterator over overlapping subslices of length `size`.
3782 /// This struct is created by the [`windows`] method on [slices].
3784 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3785 /// [slices]: ../../std/primitive.slice.html
3787 #[stable(feature = "rust1", since = "1.0.0")]
3788 pub struct Windows<'a, T:'a> {
3793 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3794 #[stable(feature = "rust1", since = "1.0.0")]
3795 impl<T> Clone for Windows<'_, T> {
3796 fn clone(&self) -> Self {
3804 #[stable(feature = "rust1", since = "1.0.0")]
3805 impl<'a, T> Iterator for Windows<'a, T> {
3806 type Item = &'a [T];
3809 fn next(&mut self) -> Option<&'a [T]> {
3810 if self.size > self.v.len() {
3813 let ret = Some(&self.v[..self.size]);
3814 self.v = &self.v[1..];
3820 fn size_hint(&self) -> (usize, Option<usize>) {
3821 if self.size > self.v.len() {
3824 let size = self.v.len() - self.size + 1;
3830 fn count(self) -> usize {
3835 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3836 let (end, overflow) = self.size.overflowing_add(n);
3837 if end > self.v.len() || overflow {
3841 let nth = &self.v[n..end];
3842 self.v = &self.v[n+1..];
3848 fn last(self) -> Option<Self::Item> {
3849 if self.size > self.v.len() {
3852 let start = self.v.len() - self.size;
3853 Some(&self.v[start..])
3858 #[stable(feature = "rust1", since = "1.0.0")]
3859 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
3861 fn next_back(&mut self) -> Option<&'a [T]> {
3862 if self.size > self.v.len() {
3865 let ret = Some(&self.v[self.v.len()-self.size..]);
3866 self.v = &self.v[..self.v.len()-1];
3872 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
3873 let (end, overflow) = self.v.len().overflowing_sub(n);
3874 if end < self.size || overflow {
3878 let ret = &self.v[end-self.size..end];
3879 self.v = &self.v[..end-1];
3885 #[stable(feature = "rust1", since = "1.0.0")]
3886 impl<T> ExactSizeIterator for Windows<'_, T> {}
3888 #[unstable(feature = "trusted_len", issue = "37572")]
3889 unsafe impl<T> TrustedLen for Windows<'_, T> {}
3891 #[stable(feature = "fused", since = "1.26.0")]
3892 impl<T> FusedIterator for Windows<'_, T> {}
3895 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
3896 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3897 from_raw_parts(self.v.as_ptr().add(i), self.size)
3899 fn may_have_side_effect() -> bool { false }
3902 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
3903 /// time), starting at the beginning of the slice.
3905 /// When the slice len is not evenly divided by the chunk size, the last slice
3906 /// of the iteration will be the remainder.
3908 /// This struct is created by the [`chunks`] method on [slices].
3910 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
3911 /// [slices]: ../../std/primitive.slice.html
3913 #[stable(feature = "rust1", since = "1.0.0")]
3914 pub struct Chunks<'a, T:'a> {
3919 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3920 #[stable(feature = "rust1", since = "1.0.0")]
3921 impl<T> Clone for Chunks<'_, T> {
3922 fn clone(&self) -> Self {
3925 chunk_size: self.chunk_size,
3930 #[stable(feature = "rust1", since = "1.0.0")]
3931 impl<'a, T> Iterator for Chunks<'a, T> {
3932 type Item = &'a [T];
3935 fn next(&mut self) -> Option<&'a [T]> {
3936 if self.v.is_empty() {
3939 let chunksz = cmp::min(self.v.len(), self.chunk_size);
3940 let (fst, snd) = self.v.split_at(chunksz);
3947 fn size_hint(&self) -> (usize, Option<usize>) {
3948 if self.v.is_empty() {
3951 let n = self.v.len() / self.chunk_size;
3952 let rem = self.v.len() % self.chunk_size;
3953 let n = if rem > 0 { n+1 } else { n };
3959 fn count(self) -> usize {
3964 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3965 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3966 if start >= self.v.len() || overflow {
3970 let end = match start.checked_add(self.chunk_size) {
3971 Some(sum) => cmp::min(self.v.len(), sum),
3972 None => self.v.len(),
3974 let nth = &self.v[start..end];
3975 self.v = &self.v[end..];
3981 fn last(self) -> Option<Self::Item> {
3982 if self.v.is_empty() {
3985 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3986 Some(&self.v[start..])
3991 #[stable(feature = "rust1", since = "1.0.0")]
3992 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
3994 fn next_back(&mut self) -> Option<&'a [T]> {
3995 if self.v.is_empty() {
3998 let remainder = self.v.len() % self.chunk_size;
3999 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4000 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4007 #[stable(feature = "rust1", since = "1.0.0")]
4008 impl<T> ExactSizeIterator for Chunks<'_, T> {}
4010 #[unstable(feature = "trusted_len", issue = "37572")]
4011 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
4013 #[stable(feature = "fused", since = "1.26.0")]
4014 impl<T> FusedIterator for Chunks<'_, T> {}
4017 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
4018 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4019 let start = i * self.chunk_size;
4020 let end = match start.checked_add(self.chunk_size) {
4021 None => self.v.len(),
4022 Some(end) => cmp::min(end, self.v.len()),
4024 from_raw_parts(self.v.as_ptr().add(start), end - start)
4026 fn may_have_side_effect() -> bool { false }
4029 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4030 /// elements at a time), starting at the beginning of the slice.
4032 /// When the slice len is not evenly divided by the chunk size, the last slice
4033 /// of the iteration will be the remainder.
4035 /// This struct is created by the [`chunks_mut`] method on [slices].
4037 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
4038 /// [slices]: ../../std/primitive.slice.html
4040 #[stable(feature = "rust1", since = "1.0.0")]
4041 pub struct ChunksMut<'a, T:'a> {
4046 #[stable(feature = "rust1", since = "1.0.0")]
4047 impl<'a, T> Iterator for ChunksMut<'a, T> {
4048 type Item = &'a mut [T];
4051 fn next(&mut self) -> Option<&'a mut [T]> {
4052 if self.v.is_empty() {
4055 let sz = cmp::min(self.v.len(), self.chunk_size);
4056 let tmp = mem::replace(&mut self.v, &mut []);
4057 let (head, tail) = tmp.split_at_mut(sz);
4064 fn size_hint(&self) -> (usize, Option<usize>) {
4065 if self.v.is_empty() {
4068 let n = self.v.len() / self.chunk_size;
4069 let rem = self.v.len() % self.chunk_size;
4070 let n = if rem > 0 { n + 1 } else { n };
4076 fn count(self) -> usize {
4081 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4082 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4083 if start >= self.v.len() || overflow {
4087 let end = match start.checked_add(self.chunk_size) {
4088 Some(sum) => cmp::min(self.v.len(), sum),
4089 None => self.v.len(),
4091 let tmp = mem::replace(&mut self.v, &mut []);
4092 let (head, tail) = tmp.split_at_mut(end);
4093 let (_, nth) = head.split_at_mut(start);
4100 fn last(self) -> Option<Self::Item> {
4101 if self.v.is_empty() {
4104 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4105 Some(&mut self.v[start..])
4110 #[stable(feature = "rust1", since = "1.0.0")]
4111 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4113 fn next_back(&mut self) -> Option<&'a mut [T]> {
4114 if self.v.is_empty() {
4117 let remainder = self.v.len() % self.chunk_size;
4118 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4119 let tmp = mem::replace(&mut self.v, &mut []);
4120 let tmp_len = tmp.len();
4121 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4128 #[stable(feature = "rust1", since = "1.0.0")]
4129 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4131 #[unstable(feature = "trusted_len", issue = "37572")]
4132 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4134 #[stable(feature = "fused", since = "1.26.0")]
4135 impl<T> FusedIterator for ChunksMut<'_, T> {}
4138 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4139 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4140 let start = i * self.chunk_size;
4141 let end = match start.checked_add(self.chunk_size) {
4142 None => self.v.len(),
4143 Some(end) => cmp::min(end, self.v.len()),
4145 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4147 fn may_have_side_effect() -> bool { false }
4150 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4151 /// time), starting at the beginning of the slice.
4153 /// When the slice len is not evenly divided by the chunk size, the last
4154 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4155 /// the [`remainder`] function from the iterator.
4157 /// This struct is created by the [`chunks_exact`] method on [slices].
4159 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4160 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4161 /// [slices]: ../../std/primitive.slice.html
4163 #[stable(feature = "chunks_exact", since = "1.31.0")]
4164 pub struct ChunksExact<'a, T:'a> {
4170 impl<'a, T> ChunksExact<'a, T> {
4171 /// Returns the remainder of the original slice that is not going to be
4172 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4174 #[stable(feature = "chunks_exact", since = "1.31.0")]
4175 pub fn remainder(&self) -> &'a [T] {
4180 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4181 #[stable(feature = "chunks_exact", since = "1.31.0")]
4182 impl<T> Clone for ChunksExact<'_, T> {
4183 fn clone(&self) -> Self {
4187 chunk_size: self.chunk_size,
4192 #[stable(feature = "chunks_exact", since = "1.31.0")]
4193 impl<'a, T> Iterator for ChunksExact<'a, T> {
4194 type Item = &'a [T];
4197 fn next(&mut self) -> Option<&'a [T]> {
4198 if self.v.len() < self.chunk_size {
4201 let (fst, snd) = self.v.split_at(self.chunk_size);
4208 fn size_hint(&self) -> (usize, Option<usize>) {
4209 let n = self.v.len() / self.chunk_size;
4214 fn count(self) -> usize {
4219 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4220 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4221 if start >= self.v.len() || overflow {
4225 let (_, snd) = self.v.split_at(start);
4232 fn last(mut self) -> Option<Self::Item> {
4237 #[stable(feature = "chunks_exact", since = "1.31.0")]
4238 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4240 fn next_back(&mut self) -> Option<&'a [T]> {
4241 if self.v.len() < self.chunk_size {
4244 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4251 #[stable(feature = "chunks_exact", since = "1.31.0")]
4252 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4253 fn is_empty(&self) -> bool {
4258 #[unstable(feature = "trusted_len", issue = "37572")]
4259 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4261 #[stable(feature = "chunks_exact", since = "1.31.0")]
4262 impl<T> FusedIterator for ChunksExact<'_, T> {}
4265 #[stable(feature = "chunks_exact", since = "1.31.0")]
4266 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4267 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4268 let start = i * self.chunk_size;
4269 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4271 fn may_have_side_effect() -> bool { false }
4274 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4275 /// elements at a time), starting at the beginning of the slice.
4277 /// When the slice len is not evenly divided by the chunk size, the last up to
4278 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4279 /// [`into_remainder`] function from the iterator.
4281 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4283 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4284 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4285 /// [slices]: ../../std/primitive.slice.html
4287 #[stable(feature = "chunks_exact", since = "1.31.0")]
4288 pub struct ChunksExactMut<'a, T:'a> {
4294 impl<'a, T> ChunksExactMut<'a, T> {
4295 /// Returns the remainder of the original slice that is not going to be
4296 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4298 #[stable(feature = "chunks_exact", since = "1.31.0")]
4299 pub fn into_remainder(self) -> &'a mut [T] {
4304 #[stable(feature = "chunks_exact", since = "1.31.0")]
4305 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4306 type Item = &'a mut [T];
4309 fn next(&mut self) -> Option<&'a mut [T]> {
4310 if self.v.len() < self.chunk_size {
4313 let tmp = mem::replace(&mut self.v, &mut []);
4314 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4321 fn size_hint(&self) -> (usize, Option<usize>) {
4322 let n = self.v.len() / self.chunk_size;
4327 fn count(self) -> usize {
4332 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4333 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4334 if start >= self.v.len() || overflow {
4338 let tmp = mem::replace(&mut self.v, &mut []);
4339 let (_, snd) = tmp.split_at_mut(start);
4346 fn last(mut self) -> Option<Self::Item> {
4351 #[stable(feature = "chunks_exact", since = "1.31.0")]
4352 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4354 fn next_back(&mut self) -> Option<&'a mut [T]> {
4355 if self.v.len() < self.chunk_size {
4358 let tmp = mem::replace(&mut self.v, &mut []);
4359 let tmp_len = tmp.len();
4360 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4367 #[stable(feature = "chunks_exact", since = "1.31.0")]
4368 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4369 fn is_empty(&self) -> bool {
4374 #[unstable(feature = "trusted_len", issue = "37572")]
4375 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4377 #[stable(feature = "chunks_exact", since = "1.31.0")]
4378 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4381 #[stable(feature = "chunks_exact", since = "1.31.0")]
4382 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4383 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4384 let start = i * self.chunk_size;
4385 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4387 fn may_have_side_effect() -> bool { false }
4390 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4391 /// time), starting at the end of the slice.
4393 /// When the slice len is not evenly divided by the chunk size, the last slice
4394 /// of the iteration will be the remainder.
4396 /// This struct is created by the [`rchunks`] method on [slices].
4398 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4399 /// [slices]: ../../std/primitive.slice.html
4401 #[stable(feature = "rchunks", since = "1.31.0")]
4402 pub struct RChunks<'a, T:'a> {
4407 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4408 #[stable(feature = "rchunks", since = "1.31.0")]
4409 impl<T> Clone for RChunks<'_, T> {
4410 fn clone(&self) -> Self {
4413 chunk_size: self.chunk_size,
4418 #[stable(feature = "rchunks", since = "1.31.0")]
4419 impl<'a, T> Iterator for RChunks<'a, T> {
4420 type Item = &'a [T];
4423 fn next(&mut self) -> Option<&'a [T]> {
4424 if self.v.is_empty() {
4427 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4428 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4435 fn size_hint(&self) -> (usize, Option<usize>) {
4436 if self.v.is_empty() {
4439 let n = self.v.len() / self.chunk_size;
4440 let rem = self.v.len() % self.chunk_size;
4441 let n = if rem > 0 { n+1 } else { n };
4447 fn count(self) -> usize {
4452 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4453 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4454 if end >= self.v.len() || overflow {
4458 // Can't underflow because of the check above
4459 let end = self.v.len() - end;
4460 let start = match end.checked_sub(self.chunk_size) {
4464 let nth = &self.v[start..end];
4465 self.v = &self.v[0..start];
4471 fn last(self) -> Option<Self::Item> {
4472 if self.v.is_empty() {
4475 let rem = self.v.len() % self.chunk_size;
4476 let end = if rem == 0 { self.chunk_size } else { rem };
4477 Some(&self.v[0..end])
4482 #[stable(feature = "rchunks", since = "1.31.0")]
4483 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4485 fn next_back(&mut self) -> Option<&'a [T]> {
4486 if self.v.is_empty() {
4489 let remainder = self.v.len() % self.chunk_size;
4490 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4491 let (fst, snd) = self.v.split_at(chunksz);
4498 #[stable(feature = "rchunks", since = "1.31.0")]
4499 impl<T> ExactSizeIterator for RChunks<'_, T> {}
4501 #[unstable(feature = "trusted_len", issue = "37572")]
4502 unsafe impl<T> TrustedLen for RChunks<'_, T> {}
4504 #[stable(feature = "rchunks", since = "1.31.0")]
4505 impl<T> FusedIterator for RChunks<'_, T> {}
4508 #[stable(feature = "rchunks", since = "1.31.0")]
4509 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4510 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4511 let end = self.v.len() - i * self.chunk_size;
4512 let start = match end.checked_sub(self.chunk_size) {
4514 Some(start) => start,
4516 from_raw_parts(self.v.as_ptr().add(start), end - start)
4518 fn may_have_side_effect() -> bool { false }
4521 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4522 /// elements at a time), starting at the end of the slice.
4524 /// When the slice len is not evenly divided by the chunk size, the last slice
4525 /// of the iteration will be the remainder.
4527 /// This struct is created by the [`rchunks_mut`] method on [slices].
4529 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4530 /// [slices]: ../../std/primitive.slice.html
4532 #[stable(feature = "rchunks", since = "1.31.0")]
4533 pub struct RChunksMut<'a, T:'a> {
4538 #[stable(feature = "rchunks", since = "1.31.0")]
4539 impl<'a, T> Iterator for RChunksMut<'a, T> {
4540 type Item = &'a mut [T];
4543 fn next(&mut self) -> Option<&'a mut [T]> {
4544 if self.v.is_empty() {
4547 let sz = cmp::min(self.v.len(), self.chunk_size);
4548 let tmp = mem::replace(&mut self.v, &mut []);
4549 let tmp_len = tmp.len();
4550 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4557 fn size_hint(&self) -> (usize, Option<usize>) {
4558 if self.v.is_empty() {
4561 let n = self.v.len() / self.chunk_size;
4562 let rem = self.v.len() % self.chunk_size;
4563 let n = if rem > 0 { n + 1 } else { n };
4569 fn count(self) -> usize {
4574 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4575 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4576 if end >= self.v.len() || overflow {
4580 // Can't underflow because of the check above
4581 let end = self.v.len() - end;
4582 let start = match end.checked_sub(self.chunk_size) {
4586 let tmp = mem::replace(&mut self.v, &mut []);
4587 let (head, tail) = tmp.split_at_mut(start);
4588 let (nth, _) = tail.split_at_mut(end - start);
4595 fn last(self) -> Option<Self::Item> {
4596 if self.v.is_empty() {
4599 let rem = self.v.len() % self.chunk_size;
4600 let end = if rem == 0 { self.chunk_size } else { rem };
4601 Some(&mut self.v[0..end])
4606 #[stable(feature = "rchunks", since = "1.31.0")]
4607 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
4609 fn next_back(&mut self) -> Option<&'a mut [T]> {
4610 if self.v.is_empty() {
4613 let remainder = self.v.len() % self.chunk_size;
4614 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4615 let tmp = mem::replace(&mut self.v, &mut []);
4616 let (head, tail) = tmp.split_at_mut(sz);
4623 #[stable(feature = "rchunks", since = "1.31.0")]
4624 impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
4626 #[unstable(feature = "trusted_len", issue = "37572")]
4627 unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
4629 #[stable(feature = "rchunks", since = "1.31.0")]
4630 impl<T> FusedIterator for RChunksMut<'_, T> {}
4633 #[stable(feature = "rchunks", since = "1.31.0")]
4634 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
4635 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4636 let end = self.v.len() - i * self.chunk_size;
4637 let start = match end.checked_sub(self.chunk_size) {
4639 Some(start) => start,
4641 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4643 fn may_have_side_effect() -> bool { false }
4646 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4647 /// time), starting at the end of the slice.
4649 /// When the slice len is not evenly divided by the chunk size, the last
4650 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4651 /// the [`remainder`] function from the iterator.
4653 /// This struct is created by the [`rchunks_exact`] method on [slices].
4655 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
4656 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4657 /// [slices]: ../../std/primitive.slice.html
4659 #[stable(feature = "rchunks", since = "1.31.0")]
4660 pub struct RChunksExact<'a, T:'a> {
4666 impl<'a, T> RChunksExact<'a, T> {
4667 /// Returns the remainder of the original slice that is not going to be
4668 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4670 #[stable(feature = "rchunks", since = "1.31.0")]
4671 pub fn remainder(&self) -> &'a [T] {
4676 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4677 #[stable(feature = "rchunks", since = "1.31.0")]
4678 impl<'a, T> Clone for RChunksExact<'a, T> {
4679 fn clone(&self) -> RChunksExact<'a, T> {
4683 chunk_size: self.chunk_size,
4688 #[stable(feature = "rchunks", since = "1.31.0")]
4689 impl<'a, T> Iterator for RChunksExact<'a, T> {
4690 type Item = &'a [T];
4693 fn next(&mut self) -> Option<&'a [T]> {
4694 if self.v.len() < self.chunk_size {
4697 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4704 fn size_hint(&self) -> (usize, Option<usize>) {
4705 let n = self.v.len() / self.chunk_size;
4710 fn count(self) -> usize {
4715 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4716 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4717 if end >= self.v.len() || overflow {
4721 let (fst, _) = self.v.split_at(self.v.len() - end);
4728 fn last(mut self) -> Option<Self::Item> {
4733 #[stable(feature = "rchunks", since = "1.31.0")]
4734 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
4736 fn next_back(&mut self) -> Option<&'a [T]> {
4737 if self.v.len() < self.chunk_size {
4740 let (fst, snd) = self.v.split_at(self.chunk_size);
4747 #[stable(feature = "rchunks", since = "1.31.0")]
4748 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
4749 fn is_empty(&self) -> bool {
4754 #[unstable(feature = "trusted_len", issue = "37572")]
4755 unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
4757 #[stable(feature = "rchunks", since = "1.31.0")]
4758 impl<T> FusedIterator for RChunksExact<'_, T> {}
4761 #[stable(feature = "rchunks", since = "1.31.0")]
4762 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
4763 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4764 let end = self.v.len() - i * self.chunk_size;
4765 let start = end - self.chunk_size;
4766 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4768 fn may_have_side_effect() -> bool { false }
4771 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4772 /// elements at a time), starting at the end of the slice.
4774 /// When the slice len is not evenly divided by the chunk size, the last up to
4775 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4776 /// [`into_remainder`] function from the iterator.
4778 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
4780 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
4781 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4782 /// [slices]: ../../std/primitive.slice.html
4784 #[stable(feature = "rchunks", since = "1.31.0")]
4785 pub struct RChunksExactMut<'a, T:'a> {
4791 impl<'a, T> RChunksExactMut<'a, T> {
4792 /// Returns the remainder of the original slice that is not going to be
4793 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4795 #[stable(feature = "rchunks", since = "1.31.0")]
4796 pub fn into_remainder(self) -> &'a mut [T] {
4801 #[stable(feature = "rchunks", since = "1.31.0")]
4802 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
4803 type Item = &'a mut [T];
4806 fn next(&mut self) -> Option<&'a mut [T]> {
4807 if self.v.len() < self.chunk_size {
4810 let tmp = mem::replace(&mut self.v, &mut []);
4811 let tmp_len = tmp.len();
4812 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4819 fn size_hint(&self) -> (usize, Option<usize>) {
4820 let n = self.v.len() / self.chunk_size;
4825 fn count(self) -> usize {
4830 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4831 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4832 if end >= self.v.len() || overflow {
4836 let tmp = mem::replace(&mut self.v, &mut []);
4837 let tmp_len = tmp.len();
4838 let (fst, _) = tmp.split_at_mut(tmp_len - end);
4845 fn last(mut self) -> Option<Self::Item> {
4850 #[stable(feature = "rchunks", since = "1.31.0")]
4851 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
4853 fn next_back(&mut self) -> Option<&'a mut [T]> {
4854 if self.v.len() < self.chunk_size {
4857 let tmp = mem::replace(&mut self.v, &mut []);
4858 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4865 #[stable(feature = "rchunks", since = "1.31.0")]
4866 impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
4867 fn is_empty(&self) -> bool {
4872 #[unstable(feature = "trusted_len", issue = "37572")]
4873 unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
4875 #[stable(feature = "rchunks", since = "1.31.0")]
4876 impl<T> FusedIterator for RChunksExactMut<'_, T> {}
4879 #[stable(feature = "rchunks", since = "1.31.0")]
4880 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
4881 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4882 let end = self.v.len() - i * self.chunk_size;
4883 let start = end - self.chunk_size;
4884 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4886 fn may_have_side_effect() -> bool { false }
4893 /// Forms a slice from a pointer and a length.
4895 /// The `len` argument is the number of **elements**, not the number of bytes.
4899 /// This function is unsafe as there is no guarantee that the given pointer is
4900 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
4901 /// lifetime for the returned slice.
4903 /// `data` must be non-null and aligned, even for zero-length slices. One
4904 /// reason for this is that enum layout optimizations may rely on references
4905 /// (including slices of any length) being aligned and non-null to distinguish
4906 /// them from other data. You can obtain a pointer that is usable as `data`
4907 /// for zero-length slices using [`NonNull::dangling()`].
4909 /// The total size of the slice must be no larger than `isize::MAX` **bytes**
4910 /// in memory. See the safety documentation of [`pointer::offset`].
4914 /// The lifetime for the returned slice is inferred from its usage. To
4915 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
4916 /// source lifetime is safe in the context, such as by providing a helper
4917 /// function taking the lifetime of a host value for the slice, or by explicit
4925 /// // manifest a slice for a single element
4927 /// let ptr = &x as *const _;
4928 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
4929 /// assert_eq!(slice[0], 42);
4932 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
4933 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
4935 #[stable(feature = "rust1", since = "1.0.0")]
4936 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
4937 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4938 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4939 "attempt to create slice covering half the address space");
4940 Repr { raw: FatPtr { data, len } }.rust
4943 /// Performs the same functionality as [`from_raw_parts`], except that a
4944 /// mutable slice is returned.
4946 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
4947 /// as not being able to provide a non-aliasing guarantee of the returned
4948 /// mutable slice. `data` must be non-null and aligned even for zero-length
4949 /// slices as with [`from_raw_parts`]. The total size of the slice must be no
4950 /// larger than `isize::MAX` **bytes** in memory.
4952 /// See the documentation of [`from_raw_parts`] for more details.
4954 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
4956 #[stable(feature = "rust1", since = "1.0.0")]
4957 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
4958 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4959 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4960 "attempt to create slice covering half the address space");
4961 Repr { raw: FatPtr { data, len } }.rust_mut
4964 /// Converts a reference to T into a slice of length 1 (without copying).
4965 #[stable(feature = "from_ref", since = "1.28.0")]
4966 pub fn from_ref<T>(s: &T) -> &[T] {
4968 from_raw_parts(s, 1)
4972 /// Converts a reference to T into a slice of length 1 (without copying).
4973 #[stable(feature = "from_ref", since = "1.28.0")]
4974 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
4976 from_raw_parts_mut(s, 1)
4980 // This function is public only because there is no other way to unit test heapsort.
4981 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
4983 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
4984 where F: FnMut(&T, &T) -> bool
4986 sort::heapsort(v, &mut is_less);
4990 // Comparison traits
4994 /// Calls implementation provided memcmp.
4996 /// Interprets the data as u8.
4998 /// Returns 0 for equal, < 0 for less than and > 0 for greater
5000 // FIXME(#32610): Return type should be c_int
5001 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
5004 #[stable(feature = "rust1", since = "1.0.0")]
5005 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
5006 fn eq(&self, other: &[B]) -> bool {
5007 SlicePartialEq::equal(self, other)
5010 fn ne(&self, other: &[B]) -> bool {
5011 SlicePartialEq::not_equal(self, other)
5015 #[stable(feature = "rust1", since = "1.0.0")]
5016 impl<T: Eq> Eq for [T] {}
5018 /// Implements comparison of vectors lexicographically.
5019 #[stable(feature = "rust1", since = "1.0.0")]
5020 impl<T: Ord> Ord for [T] {
5021 fn cmp(&self, other: &[T]) -> Ordering {
5022 SliceOrd::compare(self, other)
5026 /// Implements comparison of vectors lexicographically.
5027 #[stable(feature = "rust1", since = "1.0.0")]
5028 impl<T: PartialOrd> PartialOrd for [T] {
5029 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
5030 SlicePartialOrd::partial_compare(self, other)
5035 // intermediate trait for specialization of slice's PartialEq
5036 trait SlicePartialEq<B> {
5037 fn equal(&self, other: &[B]) -> bool;
5039 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
5042 // Generic slice equality
5043 impl<A, B> SlicePartialEq<B> for [A]
5044 where A: PartialEq<B>
5046 default fn equal(&self, other: &[B]) -> bool {
5047 if self.len() != other.len() {
5051 for i in 0..self.len() {
5052 if !self[i].eq(&other[i]) {
5061 // Use memcmp for bytewise equality when the types allow
5062 impl<A> SlicePartialEq<A> for [A]
5063 where A: PartialEq<A> + BytewiseEquality
5065 fn equal(&self, other: &[A]) -> bool {
5066 if self.len() != other.len() {
5069 if self.as_ptr() == other.as_ptr() {
5073 let size = mem::size_of_val(self);
5074 memcmp(self.as_ptr() as *const u8,
5075 other.as_ptr() as *const u8, size) == 0
5081 // intermediate trait for specialization of slice's PartialOrd
5082 trait SlicePartialOrd<B> {
5083 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
5086 impl<A> SlicePartialOrd<A> for [A]
5089 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5090 let l = cmp::min(self.len(), other.len());
5092 // Slice to the loop iteration range to enable bound check
5093 // elimination in the compiler
5094 let lhs = &self[..l];
5095 let rhs = &other[..l];
5098 match lhs[i].partial_cmp(&rhs[i]) {
5099 Some(Ordering::Equal) => (),
5100 non_eq => return non_eq,
5104 self.len().partial_cmp(&other.len())
5108 impl<A> SlicePartialOrd<A> for [A]
5111 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5112 Some(SliceOrd::compare(self, other))
5117 // intermediate trait for specialization of slice's Ord
5119 fn compare(&self, other: &[B]) -> Ordering;
5122 impl<A> SliceOrd<A> for [A]
5125 default fn compare(&self, other: &[A]) -> Ordering {
5126 let l = cmp::min(self.len(), other.len());
5128 // Slice to the loop iteration range to enable bound check
5129 // elimination in the compiler
5130 let lhs = &self[..l];
5131 let rhs = &other[..l];
5134 match lhs[i].cmp(&rhs[i]) {
5135 Ordering::Equal => (),
5136 non_eq => return non_eq,
5140 self.len().cmp(&other.len())
5144 // memcmp compares a sequence of unsigned bytes lexicographically.
5145 // this matches the order we want for [u8], but no others (not even [i8]).
5146 impl SliceOrd<u8> for [u8] {
5148 fn compare(&self, other: &[u8]) -> Ordering {
5149 let order = unsafe {
5150 memcmp(self.as_ptr(), other.as_ptr(),
5151 cmp::min(self.len(), other.len()))
5154 self.len().cmp(&other.len())
5155 } else if order < 0 {
5164 /// Trait implemented for types that can be compared for equality using
5165 /// their bytewise representation
5166 trait BytewiseEquality { }
5168 macro_rules! impl_marker_for {
5169 ($traitname:ident, $($ty:ty)*) => {
5171 impl $traitname for $ty { }
5176 impl_marker_for!(BytewiseEquality,
5177 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
5180 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5181 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5184 fn may_have_side_effect() -> bool { false }
5188 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5189 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5190 &mut *self.ptr.add(i)
5192 fn may_have_side_effect() -> bool { false }
5195 trait SliceContains: Sized {
5196 fn slice_contains(&self, x: &[Self]) -> bool;
5199 impl<T> SliceContains for T where T: PartialEq {
5200 default fn slice_contains(&self, x: &[Self]) -> bool {
5201 x.iter().any(|y| *y == *self)
5205 impl SliceContains for u8 {
5206 fn slice_contains(&self, x: &[Self]) -> bool {
5207 memchr::memchr(*self, x).is_some()
5211 impl SliceContains for i8 {
5212 fn slice_contains(&self, x: &[Self]) -> bool {
5213 let byte = *self as u8;
5214 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5215 memchr::memchr(byte, bytes).is_some()