1 //! Slice management and manipulation.
3 //! For more details see [`std::slice`].
5 //! [`std::slice`]: ../../std/slice/index.html
7 #![stable(feature = "rust1", since = "1.0.0")]
9 // How this module is organized.
11 // The library infrastructure for slices is fairly messy. There's
12 // a lot of stuff defined here. Let's keep it clean.
14 // The layout of this file is thus:
16 // * Inherent methods. This is where most of the slice API resides.
17 // * Implementations of a few common traits with important slice ops.
18 // * Definitions of a bunch of iterators.
20 // * The `raw` and `bytes` submodules.
21 // * Boilerplate trait implementations.
23 use cmp::Ordering::{self, Less, Equal, Greater};
26 use intrinsics::assume;
29 use ops::{FnMut, Try, self};
31 use option::Option::{None, Some};
33 use result::Result::{Ok, Err};
36 use marker::{Copy, Send, Sync, Sized, self};
37 use iter_private::TrustedRandomAccess;
39 #[unstable(feature = "slice_internals", issue = "0",
40 reason = "exposed from core to be reused in std; use the memchr crate")]
41 /// Pure rust memchr implementation, taken from rust-memchr
48 union Repr<'a, T: 'a> {
50 rust_mut: &'a mut [T],
67 /// Returns the number of elements in the slice.
72 /// let a = [1, 2, 3];
73 /// assert_eq!(a.len(), 3);
75 #[stable(feature = "rust1", since = "1.0.0")]
77 #[rustc_const_unstable(feature = "const_slice_len")]
78 pub const fn len(&self) -> usize {
80 Repr { rust: self }.raw.len
84 /// Returns `true` if the slice has a length of 0.
89 /// let a = [1, 2, 3];
90 /// assert!(!a.is_empty());
92 #[stable(feature = "rust1", since = "1.0.0")]
94 #[rustc_const_unstable(feature = "const_slice_len")]
95 pub const fn is_empty(&self) -> bool {
99 /// Returns the first element of the slice, or `None` if it is empty.
104 /// let v = [10, 40, 30];
105 /// assert_eq!(Some(&10), v.first());
107 /// let w: &[i32] = &[];
108 /// assert_eq!(None, w.first());
110 #[stable(feature = "rust1", since = "1.0.0")]
112 pub fn first(&self) -> Option<&T> {
116 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
121 /// let x = &mut [0, 1, 2];
123 /// if let Some(first) = x.first_mut() {
126 /// assert_eq!(x, &[5, 1, 2]);
128 #[stable(feature = "rust1", since = "1.0.0")]
130 pub fn first_mut(&mut self) -> Option<&mut T> {
134 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
139 /// let x = &[0, 1, 2];
141 /// if let Some((first, elements)) = x.split_first() {
142 /// assert_eq!(first, &0);
143 /// assert_eq!(elements, &[1, 2]);
146 #[stable(feature = "slice_splits", since = "1.5.0")]
148 pub fn split_first(&self) -> Option<(&T, &[T])> {
149 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
152 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
157 /// let x = &mut [0, 1, 2];
159 /// if let Some((first, elements)) = x.split_first_mut() {
164 /// assert_eq!(x, &[3, 4, 5]);
166 #[stable(feature = "slice_splits", since = "1.5.0")]
168 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
169 if self.is_empty() { None } else {
170 let split = self.split_at_mut(1);
171 Some((&mut split.0[0], split.1))
175 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
180 /// let x = &[0, 1, 2];
182 /// if let Some((last, elements)) = x.split_last() {
183 /// assert_eq!(last, &2);
184 /// assert_eq!(elements, &[0, 1]);
187 #[stable(feature = "slice_splits", since = "1.5.0")]
189 pub fn split_last(&self) -> Option<(&T, &[T])> {
190 let len = self.len();
191 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
194 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
199 /// let x = &mut [0, 1, 2];
201 /// if let Some((last, elements)) = x.split_last_mut() {
206 /// assert_eq!(x, &[4, 5, 3]);
208 #[stable(feature = "slice_splits", since = "1.5.0")]
210 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
211 let len = self.len();
212 if len == 0 { None } else {
213 let split = self.split_at_mut(len - 1);
214 Some((&mut split.1[0], split.0))
219 /// Returns the last element of the slice, or `None` if it is empty.
224 /// let v = [10, 40, 30];
225 /// assert_eq!(Some(&30), v.last());
227 /// let w: &[i32] = &[];
228 /// assert_eq!(None, w.last());
230 #[stable(feature = "rust1", since = "1.0.0")]
232 pub fn last(&self) -> Option<&T> {
233 let last_idx = self.len().checked_sub(1)?;
237 /// Returns a mutable pointer to the last item in the slice.
242 /// let x = &mut [0, 1, 2];
244 /// if let Some(last) = x.last_mut() {
247 /// assert_eq!(x, &[0, 1, 10]);
249 #[stable(feature = "rust1", since = "1.0.0")]
251 pub fn last_mut(&mut self) -> Option<&mut T> {
252 let last_idx = self.len().checked_sub(1)?;
253 self.get_mut(last_idx)
256 /// Returns a reference to an element or subslice depending on the type of
259 /// - If given a position, returns a reference to the element at that
260 /// position or `None` if out of bounds.
261 /// - If given a range, returns the subslice corresponding to that range,
262 /// or `None` if out of bounds.
267 /// let v = [10, 40, 30];
268 /// assert_eq!(Some(&40), v.get(1));
269 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
270 /// assert_eq!(None, v.get(3));
271 /// assert_eq!(None, v.get(0..4));
273 #[stable(feature = "rust1", since = "1.0.0")]
275 pub fn get<I>(&self, index: I) -> Option<&I::Output>
276 where I: SliceIndex<Self>
281 /// Returns a mutable reference to an element or subslice depending on the
282 /// type of index (see [`get`]) or `None` if the index is out of bounds.
284 /// [`get`]: #method.get
289 /// let x = &mut [0, 1, 2];
291 /// if let Some(elem) = x.get_mut(1) {
294 /// assert_eq!(x, &[0, 42, 2]);
296 #[stable(feature = "rust1", since = "1.0.0")]
298 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
299 where I: SliceIndex<Self>
304 /// Returns a reference to an element or subslice, without doing bounds
307 /// This is generally not recommended, use with caution! For a safe
308 /// alternative see [`get`].
310 /// [`get`]: #method.get
315 /// let x = &[1, 2, 4];
318 /// assert_eq!(x.get_unchecked(1), &2);
321 #[stable(feature = "rust1", since = "1.0.0")]
323 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
324 where I: SliceIndex<Self>
326 index.get_unchecked(self)
329 /// Returns a mutable reference to an element or subslice, without doing
332 /// This is generally not recommended, use with caution! For a safe
333 /// alternative see [`get_mut`].
335 /// [`get_mut`]: #method.get_mut
340 /// let x = &mut [1, 2, 4];
343 /// let elem = x.get_unchecked_mut(1);
346 /// assert_eq!(x, &[1, 13, 4]);
348 #[stable(feature = "rust1", since = "1.0.0")]
350 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
351 where I: SliceIndex<Self>
353 index.get_unchecked_mut(self)
356 /// Returns a raw pointer to the slice's buffer.
358 /// The caller must ensure that the slice outlives the pointer this
359 /// function returns, or else it will end up pointing to garbage.
361 /// Modifying the container referenced by this slice may cause its buffer
362 /// to be reallocated, which would also make any pointers to it invalid.
367 /// let x = &[1, 2, 4];
368 /// let x_ptr = x.as_ptr();
371 /// for i in 0..x.len() {
372 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
376 #[stable(feature = "rust1", since = "1.0.0")]
378 pub const fn as_ptr(&self) -> *const T {
379 self as *const [T] as *const T
382 /// Returns an unsafe mutable pointer to the slice's buffer.
384 /// The caller must ensure that the slice outlives the pointer this
385 /// function returns, or else it will end up pointing to garbage.
387 /// Modifying the container referenced by this slice may cause its buffer
388 /// to be reallocated, which would also make any pointers to it invalid.
393 /// let x = &mut [1, 2, 4];
394 /// let x_ptr = x.as_mut_ptr();
397 /// for i in 0..x.len() {
398 /// *x_ptr.add(i) += 2;
401 /// assert_eq!(x, &[3, 4, 6]);
403 #[stable(feature = "rust1", since = "1.0.0")]
405 pub fn as_mut_ptr(&mut self) -> *mut T {
406 self as *mut [T] as *mut T
409 /// Swaps two elements in the slice.
413 /// * a - The index of the first element
414 /// * b - The index of the second element
418 /// Panics if `a` or `b` are out of bounds.
423 /// let mut v = ["a", "b", "c", "d"];
425 /// assert!(v == ["a", "d", "c", "b"]);
427 #[stable(feature = "rust1", since = "1.0.0")]
429 pub fn swap(&mut self, a: usize, b: usize) {
431 // Can't take two mutable loans from one vector, so instead just cast
432 // them to their raw pointers to do the swap
433 let pa: *mut T = &mut self[a];
434 let pb: *mut T = &mut self[b];
439 /// Reverses the order of elements in the slice, in place.
444 /// let mut v = [1, 2, 3];
446 /// assert!(v == [3, 2, 1]);
448 #[stable(feature = "rust1", since = "1.0.0")]
450 pub fn reverse(&mut self) {
451 let mut i: usize = 0;
454 // For very small types, all the individual reads in the normal
455 // path perform poorly. We can do better, given efficient unaligned
456 // load/store, by loading a larger chunk and reversing a register.
458 // Ideally LLVM would do this for us, as it knows better than we do
459 // whether unaligned reads are efficient (since that changes between
460 // different ARM versions, for example) and what the best chunk size
461 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
462 // the loop, so we need to do this ourselves. (Hypothesis: reverse
463 // is troublesome because the sides can be aligned differently --
464 // will be, when the length is odd -- so there's no way of emitting
465 // pre- and postludes to use fully-aligned SIMD in the middle.)
468 cfg!(any(target_arch = "x86", target_arch = "x86_64"));
470 if fast_unaligned && mem::size_of::<T>() == 1 {
471 // Use the llvm.bswap intrinsic to reverse u8s in a usize
472 let chunk = mem::size_of::<usize>();
473 while i + chunk - 1 < ln / 2 {
475 let pa: *mut T = self.get_unchecked_mut(i);
476 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
477 let va = ptr::read_unaligned(pa as *mut usize);
478 let vb = ptr::read_unaligned(pb as *mut usize);
479 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
480 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
486 if fast_unaligned && mem::size_of::<T>() == 2 {
487 // Use rotate-by-16 to reverse u16s in a u32
488 let chunk = mem::size_of::<u32>() / 2;
489 while i + chunk - 1 < ln / 2 {
491 let pa: *mut T = self.get_unchecked_mut(i);
492 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
493 let va = ptr::read_unaligned(pa as *mut u32);
494 let vb = ptr::read_unaligned(pb as *mut u32);
495 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
496 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
503 // Unsafe swap to avoid the bounds check in safe swap.
505 let pa: *mut T = self.get_unchecked_mut(i);
506 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
513 /// Returns an iterator over the slice.
518 /// let x = &[1, 2, 4];
519 /// let mut iterator = x.iter();
521 /// assert_eq!(iterator.next(), Some(&1));
522 /// assert_eq!(iterator.next(), Some(&2));
523 /// assert_eq!(iterator.next(), Some(&4));
524 /// assert_eq!(iterator.next(), None);
526 #[stable(feature = "rust1", since = "1.0.0")]
528 pub fn iter(&self) -> Iter<T> {
530 let ptr = self.as_ptr();
531 assume(!ptr.is_null());
533 let end = if mem::size_of::<T>() == 0 {
534 (ptr as *const u8).wrapping_add(self.len()) as *const T
542 _marker: marker::PhantomData
547 /// Returns an iterator that allows modifying each value.
552 /// let x = &mut [1, 2, 4];
553 /// for elem in x.iter_mut() {
556 /// assert_eq!(x, &[3, 4, 6]);
558 #[stable(feature = "rust1", since = "1.0.0")]
560 pub fn iter_mut(&mut self) -> IterMut<T> {
562 let ptr = self.as_mut_ptr();
563 assume(!ptr.is_null());
565 let end = if mem::size_of::<T>() == 0 {
566 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
574 _marker: marker::PhantomData
579 /// Returns an iterator over all contiguous windows of length
580 /// `size`. The windows overlap. If the slice is shorter than
581 /// `size`, the iterator returns no values.
585 /// Panics if `size` is 0.
590 /// let slice = ['r', 'u', 's', 't'];
591 /// let mut iter = slice.windows(2);
592 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
593 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
594 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
595 /// assert!(iter.next().is_none());
598 /// If the slice is shorter than `size`:
601 /// let slice = ['f', 'o', 'o'];
602 /// let mut iter = slice.windows(4);
603 /// assert!(iter.next().is_none());
605 #[stable(feature = "rust1", since = "1.0.0")]
607 pub fn windows(&self, size: usize) -> Windows<T> {
609 Windows { v: self, size }
612 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
613 /// beginning of the slice.
615 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
616 /// slice, then the last chunk will not have length `chunk_size`.
618 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
619 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
620 /// slice of the slice.
624 /// Panics if `chunk_size` is 0.
629 /// let slice = ['l', 'o', 'r', 'e', 'm'];
630 /// let mut iter = slice.chunks(2);
631 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
632 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
633 /// assert_eq!(iter.next().unwrap(), &['m']);
634 /// assert!(iter.next().is_none());
637 /// [`chunks_exact`]: #method.chunks_exact
638 /// [`rchunks`]: #method.rchunks
639 #[stable(feature = "rust1", since = "1.0.0")]
641 pub fn chunks(&self, chunk_size: usize) -> Chunks<T> {
642 assert!(chunk_size != 0);
643 Chunks { v: self, chunk_size }
646 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
647 /// beginning of the slice.
649 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
650 /// length of the slice, then the last chunk will not have length `chunk_size`.
652 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
653 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
654 /// the end of the slice of the slice.
658 /// Panics if `chunk_size` is 0.
663 /// let v = &mut [0, 0, 0, 0, 0];
664 /// let mut count = 1;
666 /// for chunk in v.chunks_mut(2) {
667 /// for elem in chunk.iter_mut() {
672 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
675 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
676 /// [`rchunks_mut`]: #method.rchunks_mut
677 #[stable(feature = "rust1", since = "1.0.0")]
679 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
680 assert!(chunk_size != 0);
681 ChunksMut { v: self, chunk_size }
684 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
685 /// beginning of the slice.
687 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
688 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
689 /// from the `remainder` function of the iterator.
691 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
692 /// resulting code better than in the case of [`chunks`].
694 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
695 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
699 /// Panics if `chunk_size` is 0.
704 /// let slice = ['l', 'o', 'r', 'e', 'm'];
705 /// let mut iter = slice.chunks_exact(2);
706 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
707 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
708 /// assert!(iter.next().is_none());
709 /// assert_eq!(iter.remainder(), &['m']);
712 /// [`chunks`]: #method.chunks
713 /// [`rchunks_exact`]: #method.rchunks_exact
714 #[stable(feature = "chunks_exact", since = "1.31.0")]
716 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<T> {
717 assert!(chunk_size != 0);
718 let rem = self.len() % chunk_size;
719 let len = self.len() - rem;
720 let (fst, snd) = self.split_at(len);
721 ChunksExact { v: fst, rem: snd, chunk_size }
724 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
725 /// beginning of the slice.
727 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
728 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
729 /// retrieved from the `into_remainder` function of the iterator.
731 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
732 /// resulting code better than in the case of [`chunks_mut`].
734 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
735 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
736 /// the slice of the slice.
740 /// Panics if `chunk_size` is 0.
745 /// let v = &mut [0, 0, 0, 0, 0];
746 /// let mut count = 1;
748 /// for chunk in v.chunks_exact_mut(2) {
749 /// for elem in chunk.iter_mut() {
754 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
757 /// [`chunks_mut`]: #method.chunks_mut
758 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
759 #[stable(feature = "chunks_exact", since = "1.31.0")]
761 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<T> {
762 assert!(chunk_size != 0);
763 let rem = self.len() % chunk_size;
764 let len = self.len() - rem;
765 let (fst, snd) = self.split_at_mut(len);
766 ChunksExactMut { v: fst, rem: snd, chunk_size }
769 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
772 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
773 /// slice, then the last chunk will not have length `chunk_size`.
775 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
776 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
781 /// Panics if `chunk_size` is 0.
786 /// let slice = ['l', 'o', 'r', 'e', 'm'];
787 /// let mut iter = slice.rchunks(2);
788 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
789 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
790 /// assert_eq!(iter.next().unwrap(), &['l']);
791 /// assert!(iter.next().is_none());
794 /// [`rchunks_exact`]: #method.rchunks_exact
795 /// [`chunks`]: #method.chunks
796 #[stable(feature = "rchunks", since = "1.31.0")]
798 pub fn rchunks(&self, chunk_size: usize) -> RChunks<T> {
799 assert!(chunk_size != 0);
800 RChunks { v: self, chunk_size }
803 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
806 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
807 /// length of the slice, then the last chunk will not have length `chunk_size`.
809 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
810 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
811 /// beginning of the slice.
815 /// Panics if `chunk_size` is 0.
820 /// let v = &mut [0, 0, 0, 0, 0];
821 /// let mut count = 1;
823 /// for chunk in v.rchunks_mut(2) {
824 /// for elem in chunk.iter_mut() {
829 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
832 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
833 /// [`chunks_mut`]: #method.chunks_mut
834 #[stable(feature = "rchunks", since = "1.31.0")]
836 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<T> {
837 assert!(chunk_size != 0);
838 RChunksMut { v: self, chunk_size }
841 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
842 /// beginning of the slice.
844 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
845 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
846 /// from the `remainder` function of the iterator.
848 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
849 /// resulting code better than in the case of [`chunks`].
851 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
852 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
853 /// slice of the slice.
857 /// Panics if `chunk_size` is 0.
862 /// let slice = ['l', 'o', 'r', 'e', 'm'];
863 /// let mut iter = slice.rchunks_exact(2);
864 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
865 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
866 /// assert!(iter.next().is_none());
867 /// assert_eq!(iter.remainder(), &['l']);
870 /// [`chunks`]: #method.chunks
871 /// [`rchunks`]: #method.rchunks
872 /// [`chunks_exact`]: #method.chunks_exact
873 #[stable(feature = "rchunks", since = "1.31.0")]
875 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<T> {
876 assert!(chunk_size != 0);
877 let rem = self.len() % chunk_size;
878 let (fst, snd) = self.split_at(rem);
879 RChunksExact { v: snd, rem: fst, chunk_size }
882 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
885 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
886 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
887 /// retrieved from the `into_remainder` function of the iterator.
889 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
890 /// resulting code better than in the case of [`chunks_mut`].
892 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
893 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
894 /// of the slice of the slice.
898 /// Panics if `chunk_size` is 0.
903 /// let v = &mut [0, 0, 0, 0, 0];
904 /// let mut count = 1;
906 /// for chunk in v.rchunks_exact_mut(2) {
907 /// for elem in chunk.iter_mut() {
912 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
915 /// [`chunks_mut`]: #method.chunks_mut
916 /// [`rchunks_mut`]: #method.rchunks_mut
917 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
918 #[stable(feature = "rchunks", since = "1.31.0")]
920 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<T> {
921 assert!(chunk_size != 0);
922 let rem = self.len() % chunk_size;
923 let (fst, snd) = self.split_at_mut(rem);
924 RChunksExactMut { v: snd, rem: fst, chunk_size }
927 /// Divides one slice into two at an index.
929 /// The first will contain all indices from `[0, mid)` (excluding
930 /// the index `mid` itself) and the second will contain all
931 /// indices from `[mid, len)` (excluding the index `len` itself).
935 /// Panics if `mid > len`.
940 /// let v = [1, 2, 3, 4, 5, 6];
943 /// let (left, right) = v.split_at(0);
944 /// assert!(left == []);
945 /// assert!(right == [1, 2, 3, 4, 5, 6]);
949 /// let (left, right) = v.split_at(2);
950 /// assert!(left == [1, 2]);
951 /// assert!(right == [3, 4, 5, 6]);
955 /// let (left, right) = v.split_at(6);
956 /// assert!(left == [1, 2, 3, 4, 5, 6]);
957 /// assert!(right == []);
960 #[stable(feature = "rust1", since = "1.0.0")]
962 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
963 (&self[..mid], &self[mid..])
966 /// Divides one mutable slice into two at an index.
968 /// The first will contain all indices from `[0, mid)` (excluding
969 /// the index `mid` itself) and the second will contain all
970 /// indices from `[mid, len)` (excluding the index `len` itself).
974 /// Panics if `mid > len`.
979 /// let mut v = [1, 0, 3, 0, 5, 6];
980 /// // scoped to restrict the lifetime of the borrows
982 /// let (left, right) = v.split_at_mut(2);
983 /// assert!(left == [1, 0]);
984 /// assert!(right == [3, 0, 5, 6]);
988 /// assert!(v == [1, 2, 3, 4, 5, 6]);
990 #[stable(feature = "rust1", since = "1.0.0")]
992 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
993 let len = self.len();
994 let ptr = self.as_mut_ptr();
999 (from_raw_parts_mut(ptr, mid),
1000 from_raw_parts_mut(ptr.add(mid), len - mid))
1004 /// Returns an iterator over subslices separated by elements that match
1005 /// `pred`. The matched element is not contained in the subslices.
1010 /// let slice = [10, 40, 33, 20];
1011 /// let mut iter = slice.split(|num| num % 3 == 0);
1013 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1014 /// assert_eq!(iter.next().unwrap(), &[20]);
1015 /// assert!(iter.next().is_none());
1018 /// If the first element is matched, an empty slice will be the first item
1019 /// returned by the iterator. Similarly, if the last element in the slice
1020 /// is matched, an empty slice will be the last item returned by the
1024 /// let slice = [10, 40, 33];
1025 /// let mut iter = slice.split(|num| num % 3 == 0);
1027 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1028 /// assert_eq!(iter.next().unwrap(), &[]);
1029 /// assert!(iter.next().is_none());
1032 /// If two matched elements are directly adjacent, an empty slice will be
1033 /// present between them:
1036 /// let slice = [10, 6, 33, 20];
1037 /// let mut iter = slice.split(|num| num % 3 == 0);
1039 /// assert_eq!(iter.next().unwrap(), &[10]);
1040 /// assert_eq!(iter.next().unwrap(), &[]);
1041 /// assert_eq!(iter.next().unwrap(), &[20]);
1042 /// assert!(iter.next().is_none());
1044 #[stable(feature = "rust1", since = "1.0.0")]
1046 pub fn split<F>(&self, pred: F) -> Split<T, F>
1047 where F: FnMut(&T) -> bool
1056 /// Returns an iterator over mutable subslices separated by elements that
1057 /// match `pred`. The matched element is not contained in the subslices.
1062 /// let mut v = [10, 40, 30, 20, 60, 50];
1064 /// for group in v.split_mut(|num| *num % 3 == 0) {
1067 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1069 #[stable(feature = "rust1", since = "1.0.0")]
1071 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<T, F>
1072 where F: FnMut(&T) -> bool
1074 SplitMut { v: self, pred, finished: false }
1077 /// Returns an iterator over subslices separated by elements that match
1078 /// `pred`, starting at the end of the slice and working backwards.
1079 /// The matched element is not contained in the subslices.
1084 /// let slice = [11, 22, 33, 0, 44, 55];
1085 /// let mut iter = slice.rsplit(|num| *num == 0);
1087 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1088 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1089 /// assert_eq!(iter.next(), None);
1092 /// As with `split()`, if the first or last element is matched, an empty
1093 /// slice will be the first (or last) item returned by the iterator.
1096 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1097 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1098 /// assert_eq!(it.next().unwrap(), &[]);
1099 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1100 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1101 /// assert_eq!(it.next().unwrap(), &[]);
1102 /// assert_eq!(it.next(), None);
1104 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1106 pub fn rsplit<F>(&self, pred: F) -> RSplit<T, F>
1107 where F: FnMut(&T) -> bool
1109 RSplit { inner: self.split(pred) }
1112 /// Returns an iterator over mutable subslices separated by elements that
1113 /// match `pred`, starting at the end of the slice and working
1114 /// backwards. The matched element is not contained in the subslices.
1119 /// let mut v = [100, 400, 300, 200, 600, 500];
1121 /// let mut count = 0;
1122 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1124 /// group[0] = count;
1126 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1129 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1131 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<T, F>
1132 where F: FnMut(&T) -> bool
1134 RSplitMut { inner: self.split_mut(pred) }
1137 /// Returns an iterator over subslices separated by elements that match
1138 /// `pred`, limited to returning at most `n` items. The matched element is
1139 /// not contained in the subslices.
1141 /// The last element returned, if any, will contain the remainder of the
1146 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1147 /// `[20, 60, 50]`):
1150 /// let v = [10, 40, 30, 20, 60, 50];
1152 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1153 /// println!("{:?}", group);
1156 #[stable(feature = "rust1", since = "1.0.0")]
1158 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<T, F>
1159 where F: FnMut(&T) -> bool
1162 inner: GenericSplitN {
1163 iter: self.split(pred),
1169 /// Returns an iterator over subslices separated by elements that match
1170 /// `pred`, limited to returning at most `n` items. The matched element is
1171 /// not contained in the subslices.
1173 /// The last element returned, if any, will contain the remainder of the
1179 /// let mut v = [10, 40, 30, 20, 60, 50];
1181 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1184 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1186 #[stable(feature = "rust1", since = "1.0.0")]
1188 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<T, F>
1189 where F: FnMut(&T) -> bool
1192 inner: GenericSplitN {
1193 iter: self.split_mut(pred),
1199 /// Returns an iterator over subslices separated by elements that match
1200 /// `pred` limited to returning at most `n` items. This starts at the end of
1201 /// the slice and works backwards. The matched element is not contained in
1204 /// The last element returned, if any, will contain the remainder of the
1209 /// Print the slice split once, starting from the end, by numbers divisible
1210 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1213 /// let v = [10, 40, 30, 20, 60, 50];
1215 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1216 /// println!("{:?}", group);
1219 #[stable(feature = "rust1", since = "1.0.0")]
1221 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<T, F>
1222 where F: FnMut(&T) -> bool
1225 inner: GenericSplitN {
1226 iter: self.rsplit(pred),
1232 /// Returns an iterator over subslices separated by elements that match
1233 /// `pred` limited to returning at most `n` items. This starts at the end of
1234 /// the slice and works backwards. The matched element is not contained in
1237 /// The last element returned, if any, will contain the remainder of the
1243 /// let mut s = [10, 40, 30, 20, 60, 50];
1245 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1248 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1250 #[stable(feature = "rust1", since = "1.0.0")]
1252 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<T, F>
1253 where F: FnMut(&T) -> bool
1256 inner: GenericSplitN {
1257 iter: self.rsplit_mut(pred),
1263 /// Returns `true` if the slice contains an element with the given value.
1268 /// let v = [10, 40, 30];
1269 /// assert!(v.contains(&30));
1270 /// assert!(!v.contains(&50));
1272 #[stable(feature = "rust1", since = "1.0.0")]
1273 pub fn contains(&self, x: &T) -> bool
1276 x.slice_contains(self)
1279 /// Returns `true` if `needle` is a prefix of the slice.
1284 /// let v = [10, 40, 30];
1285 /// assert!(v.starts_with(&[10]));
1286 /// assert!(v.starts_with(&[10, 40]));
1287 /// assert!(!v.starts_with(&[50]));
1288 /// assert!(!v.starts_with(&[10, 50]));
1291 /// Always returns `true` if `needle` is an empty slice:
1294 /// let v = &[10, 40, 30];
1295 /// assert!(v.starts_with(&[]));
1296 /// let v: &[u8] = &[];
1297 /// assert!(v.starts_with(&[]));
1299 #[stable(feature = "rust1", since = "1.0.0")]
1300 pub fn starts_with(&self, needle: &[T]) -> bool
1303 let n = needle.len();
1304 self.len() >= n && needle == &self[..n]
1307 /// Returns `true` if `needle` is a suffix of the slice.
1312 /// let v = [10, 40, 30];
1313 /// assert!(v.ends_with(&[30]));
1314 /// assert!(v.ends_with(&[40, 30]));
1315 /// assert!(!v.ends_with(&[50]));
1316 /// assert!(!v.ends_with(&[50, 30]));
1319 /// Always returns `true` if `needle` is an empty slice:
1322 /// let v = &[10, 40, 30];
1323 /// assert!(v.ends_with(&[]));
1324 /// let v: &[u8] = &[];
1325 /// assert!(v.ends_with(&[]));
1327 #[stable(feature = "rust1", since = "1.0.0")]
1328 pub fn ends_with(&self, needle: &[T]) -> bool
1331 let (m, n) = (self.len(), needle.len());
1332 m >= n && needle == &self[m-n..]
1335 /// Binary searches this sorted slice for a given element.
1337 /// If the value is found then [`Result::Ok`] is returned, containing the
1338 /// index of the matching element. If there are multiple matches, then any
1339 /// one of the matches could be returned. If the value is not found then
1340 /// [`Result::Err`] is returned, containing the index where a matching
1341 /// element could be inserted while maintaining sorted order.
1345 /// Looks up a series of four elements. The first is found, with a
1346 /// uniquely determined position; the second and third are not
1347 /// found; the fourth could match any position in `[1, 4]`.
1350 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1352 /// assert_eq!(s.binary_search(&13), Ok(9));
1353 /// assert_eq!(s.binary_search(&4), Err(7));
1354 /// assert_eq!(s.binary_search(&100), Err(13));
1355 /// let r = s.binary_search(&1);
1356 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1358 #[stable(feature = "rust1", since = "1.0.0")]
1359 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1362 self.binary_search_by(|p| p.cmp(x))
1365 /// Binary searches this sorted slice with a comparator function.
1367 /// The comparator function should implement an order consistent
1368 /// with the sort order of the underlying slice, returning an
1369 /// order code that indicates whether its argument is `Less`,
1370 /// `Equal` or `Greater` the desired target.
1372 /// If the value is found then [`Result::Ok`] is returned, containing the
1373 /// index of the matching element. If there are multiple matches, then any
1374 /// one of the matches could be returned. If the value is not found then
1375 /// [`Result::Err`] is returned, containing the index where a matching
1376 /// element could be inserted while maintaining sorted order.
1380 /// Looks up a series of four elements. The first is found, with a
1381 /// uniquely determined position; the second and third are not
1382 /// found; the fourth could match any position in `[1, 4]`.
1385 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1388 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1390 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1392 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1394 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1395 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1397 #[stable(feature = "rust1", since = "1.0.0")]
1399 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1400 where F: FnMut(&'a T) -> Ordering
1403 let mut size = s.len();
1407 let mut base = 0usize;
1409 let half = size / 2;
1410 let mid = base + half;
1411 // mid is always in [0, size), that means mid is >= 0 and < size.
1412 // mid >= 0: by definition
1413 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1414 let cmp = f(unsafe { s.get_unchecked(mid) });
1415 base = if cmp == Greater { base } else { mid };
1418 // base is always in [0, size) because base <= mid.
1419 let cmp = f(unsafe { s.get_unchecked(base) });
1420 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1424 /// Binary searches this sorted slice with a key extraction function.
1426 /// Assumes that the slice is sorted by the key, for instance with
1427 /// [`sort_by_key`] using the same key extraction function.
1429 /// If the value is found then [`Result::Ok`] is returned, containing the
1430 /// index of the matching element. If there are multiple matches, then any
1431 /// one of the matches could be returned. If the value is not found then
1432 /// [`Result::Err`] is returned, containing the index where a matching
1433 /// element could be inserted while maintaining sorted order.
1435 /// [`sort_by_key`]: #method.sort_by_key
1439 /// Looks up a series of four elements in a slice of pairs sorted by
1440 /// their second elements. The first is found, with a uniquely
1441 /// determined position; the second and third are not found; the
1442 /// fourth could match any position in `[1, 4]`.
1445 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1446 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1447 /// (1, 21), (2, 34), (4, 55)];
1449 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1450 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1451 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1452 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1453 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1455 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1457 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1458 where F: FnMut(&'a T) -> B,
1461 self.binary_search_by(|k| f(k).cmp(b))
1464 /// Sorts the slice, but may not preserve the order of equal elements.
1466 /// This sort is unstable (i.e., may reorder equal elements), in-place
1467 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1469 /// # Current implementation
1471 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1472 /// which combines the fast average case of randomized quicksort with the fast worst case of
1473 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1474 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1475 /// deterministic behavior.
1477 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1478 /// slice consists of several concatenated sorted sequences.
1483 /// let mut v = [-5, 4, 1, -3, 2];
1485 /// v.sort_unstable();
1486 /// assert!(v == [-5, -3, 1, 2, 4]);
1489 /// [pdqsort]: https://github.com/orlp/pdqsort
1490 #[stable(feature = "sort_unstable", since = "1.20.0")]
1492 pub fn sort_unstable(&mut self)
1495 sort::quicksort(self, |a, b| a.lt(b));
1498 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1501 /// This sort is unstable (i.e., may reorder equal elements), in-place
1502 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1504 /// The comparator function must define a total ordering for the elements in the slice. If
1505 /// the ordering is not total, the order of the elements is unspecified. An order is a
1506 /// total order if it is (for all a, b and c):
1508 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1509 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1511 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1512 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1515 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1516 /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
1517 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1520 /// # Current implementation
1522 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1523 /// which combines the fast average case of randomized quicksort with the fast worst case of
1524 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1525 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1526 /// deterministic behavior.
1528 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1529 /// slice consists of several concatenated sorted sequences.
1534 /// let mut v = [5, 4, 1, 3, 2];
1535 /// v.sort_unstable_by(|a, b| a.cmp(b));
1536 /// assert!(v == [1, 2, 3, 4, 5]);
1538 /// // reverse sorting
1539 /// v.sort_unstable_by(|a, b| b.cmp(a));
1540 /// assert!(v == [5, 4, 3, 2, 1]);
1543 /// [pdqsort]: https://github.com/orlp/pdqsort
1544 #[stable(feature = "sort_unstable", since = "1.20.0")]
1546 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1547 where F: FnMut(&T, &T) -> Ordering
1549 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1552 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1555 /// This sort is unstable (i.e., may reorder equal elements), in-place
1556 /// (i.e., does not allocate), and `O(m n log(m n))` worst-case, where the key function is
1559 /// # Current implementation
1561 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1562 /// which combines the fast average case of randomized quicksort with the fast worst case of
1563 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1564 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1565 /// deterministic behavior.
1570 /// let mut v = [-5i32, 4, 1, -3, 2];
1572 /// v.sort_unstable_by_key(|k| k.abs());
1573 /// assert!(v == [1, 2, -3, 4, -5]);
1576 /// [pdqsort]: https://github.com/orlp/pdqsort
1577 #[stable(feature = "sort_unstable", since = "1.20.0")]
1579 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1580 where F: FnMut(&T) -> K, K: Ord
1582 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1585 /// Moves all consecutive repeated elements to the end of the slice according to the
1586 /// [`PartialEq`] trait implementation.
1588 /// Returns two slices. The first contains no consecutive repeated elements.
1589 /// The second contains all the duplicates in no specified order.
1591 /// If the slice is sorted, the first returned slice contains no duplicates.
1596 /// #![feature(slice_partition_dedup)]
1598 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1600 /// let (dedup, duplicates) = slice.partition_dedup();
1602 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1603 /// assert_eq!(duplicates, [2, 3, 1]);
1605 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1607 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1610 self.partition_dedup_by(|a, b| a == b)
1613 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1614 /// a given equality relation.
1616 /// Returns two slices. The first contains no consecutive repeated elements.
1617 /// The second contains all the duplicates in no specified order.
1619 /// The `same_bucket` function is passed references to two elements from the slice and
1620 /// must determine if the elements compare equal. The elements are passed in opposite order
1621 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1622 /// at the end of the slice.
1624 /// If the slice is sorted, the first returned slice contains no duplicates.
1629 /// #![feature(slice_partition_dedup)]
1631 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1633 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1635 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1636 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1638 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1640 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1641 where F: FnMut(&mut T, &mut T) -> bool
1643 // Although we have a mutable reference to `self`, we cannot make
1644 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1645 // must ensure that the slice is in a valid state at all times.
1647 // The way that we handle this is by using swaps; we iterate
1648 // over all the elements, swapping as we go so that at the end
1649 // the elements we wish to keep are in the front, and those we
1650 // wish to reject are at the back. We can then split the slice.
1651 // This operation is still O(n).
1653 // Example: We start in this state, where `r` represents "next
1654 // read" and `w` represents "next_write`.
1657 // +---+---+---+---+---+---+
1658 // | 0 | 1 | 1 | 2 | 3 | 3 |
1659 // +---+---+---+---+---+---+
1662 // Comparing self[r] against self[w-1], this is not a duplicate, so
1663 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1664 // r and w, leaving us with:
1667 // +---+---+---+---+---+---+
1668 // | 0 | 1 | 1 | 2 | 3 | 3 |
1669 // +---+---+---+---+---+---+
1672 // Comparing self[r] against self[w-1], this value is a duplicate,
1673 // so we increment `r` but leave everything else unchanged:
1676 // +---+---+---+---+---+---+
1677 // | 0 | 1 | 1 | 2 | 3 | 3 |
1678 // +---+---+---+---+---+---+
1681 // Comparing self[r] against self[w-1], this is not a duplicate,
1682 // so swap self[r] and self[w] and advance r and w:
1685 // +---+---+---+---+---+---+
1686 // | 0 | 1 | 2 | 1 | 3 | 3 |
1687 // +---+---+---+---+---+---+
1690 // Not a duplicate, repeat:
1693 // +---+---+---+---+---+---+
1694 // | 0 | 1 | 2 | 3 | 1 | 3 |
1695 // +---+---+---+---+---+---+
1698 // Duplicate, advance r. End of slice. Split at w.
1700 let len = self.len();
1702 return (self, &mut [])
1705 let ptr = self.as_mut_ptr();
1706 let mut next_read: usize = 1;
1707 let mut next_write: usize = 1;
1710 // Avoid bounds checks by using raw pointers.
1711 while next_read < len {
1712 let ptr_read = ptr.add(next_read);
1713 let prev_ptr_write = ptr.add(next_write - 1);
1714 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1715 if next_read != next_write {
1716 let ptr_write = prev_ptr_write.offset(1);
1717 mem::swap(&mut *ptr_read, &mut *ptr_write);
1725 self.split_at_mut(next_write)
1728 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1729 /// to the same key.
1731 /// Returns two slices. The first contains no consecutive repeated elements.
1732 /// The second contains all the duplicates in no specified order.
1734 /// If the slice is sorted, the first returned slice contains no duplicates.
1739 /// #![feature(slice_partition_dedup)]
1741 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
1743 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
1745 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
1746 /// assert_eq!(duplicates, [21, 30, 13]);
1748 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1750 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
1751 where F: FnMut(&mut T) -> K,
1754 self.partition_dedup_by(|a, b| key(a) == key(b))
1757 /// Rotates the slice in-place such that the first `mid` elements of the
1758 /// slice move to the end while the last `self.len() - mid` elements move to
1759 /// the front. After calling `rotate_left`, the element previously at index
1760 /// `mid` will become the first element in the slice.
1764 /// This function will panic if `mid` is greater than the length of the
1765 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
1770 /// Takes linear (in `self.len()`) time.
1775 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1776 /// a.rotate_left(2);
1777 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
1780 /// Rotating a subslice:
1783 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1784 /// a[1..5].rotate_left(1);
1785 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
1787 #[stable(feature = "slice_rotate", since = "1.26.0")]
1788 pub fn rotate_left(&mut self, mid: usize) {
1789 assert!(mid <= self.len());
1790 let k = self.len() - mid;
1793 let p = self.as_mut_ptr();
1794 rotate::ptr_rotate(mid, p.add(mid), k);
1798 /// Rotates the slice in-place such that the first `self.len() - k`
1799 /// elements of the slice move to the end while the last `k` elements move
1800 /// to the front. After calling `rotate_right`, the element previously at
1801 /// index `self.len() - k` will become the first element in the slice.
1805 /// This function will panic if `k` is greater than the length of the
1806 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
1811 /// Takes linear (in `self.len()`) time.
1816 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1817 /// a.rotate_right(2);
1818 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
1821 /// Rotate a subslice:
1824 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
1825 /// a[1..5].rotate_right(1);
1826 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
1828 #[stable(feature = "slice_rotate", since = "1.26.0")]
1829 pub fn rotate_right(&mut self, k: usize) {
1830 assert!(k <= self.len());
1831 let mid = self.len() - k;
1834 let p = self.as_mut_ptr();
1835 rotate::ptr_rotate(mid, p.add(mid), k);
1839 /// Copies the elements from `src` into `self`.
1841 /// The length of `src` must be the same as `self`.
1843 /// If `src` implements `Copy`, it can be more performant to use
1844 /// [`copy_from_slice`].
1848 /// This function will panic if the two slices have different lengths.
1852 /// Cloning two elements from a slice into another:
1855 /// let src = [1, 2, 3, 4];
1856 /// let mut dst = [0, 0];
1858 /// // Because the slices have to be the same length,
1859 /// // we slice the source slice from four elements
1860 /// // to two. It will panic if we don't do this.
1861 /// dst.clone_from_slice(&src[2..]);
1863 /// assert_eq!(src, [1, 2, 3, 4]);
1864 /// assert_eq!(dst, [3, 4]);
1867 /// Rust enforces that there can only be one mutable reference with no
1868 /// immutable references to a particular piece of data in a particular
1869 /// scope. Because of this, attempting to use `clone_from_slice` on a
1870 /// single slice will result in a compile failure:
1873 /// let mut slice = [1, 2, 3, 4, 5];
1875 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
1878 /// To work around this, we can use [`split_at_mut`] to create two distinct
1879 /// sub-slices from a slice:
1882 /// let mut slice = [1, 2, 3, 4, 5];
1885 /// let (left, right) = slice.split_at_mut(2);
1886 /// left.clone_from_slice(&right[1..]);
1889 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1892 /// [`copy_from_slice`]: #method.copy_from_slice
1893 /// [`split_at_mut`]: #method.split_at_mut
1894 #[stable(feature = "clone_from_slice", since = "1.7.0")]
1895 pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
1896 assert!(self.len() == src.len(),
1897 "destination and source slices have different lengths");
1898 // NOTE: We need to explicitly slice them to the same length
1899 // for bounds checking to be elided, and the optimizer will
1900 // generate memcpy for simple cases (for example T = u8).
1901 let len = self.len();
1902 let src = &src[..len];
1904 self[i].clone_from(&src[i]);
1909 /// Copies all elements from `src` into `self`, using a memcpy.
1911 /// The length of `src` must be the same as `self`.
1913 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
1917 /// This function will panic if the two slices have different lengths.
1921 /// Copying two elements from a slice into another:
1924 /// let src = [1, 2, 3, 4];
1925 /// let mut dst = [0, 0];
1927 /// // Because the slices have to be the same length,
1928 /// // we slice the source slice from four elements
1929 /// // to two. It will panic if we don't do this.
1930 /// dst.copy_from_slice(&src[2..]);
1932 /// assert_eq!(src, [1, 2, 3, 4]);
1933 /// assert_eq!(dst, [3, 4]);
1936 /// Rust enforces that there can only be one mutable reference with no
1937 /// immutable references to a particular piece of data in a particular
1938 /// scope. Because of this, attempting to use `copy_from_slice` on a
1939 /// single slice will result in a compile failure:
1942 /// let mut slice = [1, 2, 3, 4, 5];
1944 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
1947 /// To work around this, we can use [`split_at_mut`] to create two distinct
1948 /// sub-slices from a slice:
1951 /// let mut slice = [1, 2, 3, 4, 5];
1954 /// let (left, right) = slice.split_at_mut(2);
1955 /// left.copy_from_slice(&right[1..]);
1958 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
1961 /// [`clone_from_slice`]: #method.clone_from_slice
1962 /// [`split_at_mut`]: #method.split_at_mut
1963 #[stable(feature = "copy_from_slice", since = "1.9.0")]
1964 pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
1965 assert_eq!(self.len(), src.len(),
1966 "destination and source slices have different lengths");
1968 ptr::copy_nonoverlapping(
1969 src.as_ptr(), self.as_mut_ptr(), self.len());
1973 /// Copies elements from one part of the slice to another part of itself,
1974 /// using a memmove.
1976 /// `src` is the range within `self` to copy from. `dest` is the starting
1977 /// index of the range within `self` to copy to, which will have the same
1978 /// length as `src`. The two ranges may overlap. The ends of the two ranges
1979 /// must be less than or equal to `self.len()`.
1983 /// This function will panic if either range exceeds the end of the slice,
1984 /// or if the end of `src` is before the start.
1988 /// Copying four bytes within a slice:
1991 /// # #![feature(copy_within)]
1992 /// let mut bytes = *b"Hello, World!";
1994 /// bytes.copy_within(1..5, 8);
1996 /// assert_eq!(&bytes, b"Hello, Wello!");
1998 #[unstable(feature = "copy_within", issue = "54236")]
1999 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2003 let src_start = match src.start_bound() {
2004 ops::Bound::Included(&n) => n,
2005 ops::Bound::Excluded(&n) => n
2007 .unwrap_or_else(|| slice_index_overflow_fail()),
2008 ops::Bound::Unbounded => 0,
2010 let src_end = match src.end_bound() {
2011 ops::Bound::Included(&n) => n
2013 .unwrap_or_else(|| slice_index_overflow_fail()),
2014 ops::Bound::Excluded(&n) => n,
2015 ops::Bound::Unbounded => self.len(),
2017 assert!(src_start <= src_end, "src end is before src start");
2018 assert!(src_end <= self.len(), "src is out of bounds");
2019 let count = src_end - src_start;
2020 assert!(dest <= self.len() - count, "dest is out of bounds");
2023 self.get_unchecked(src_start),
2024 self.get_unchecked_mut(dest),
2030 /// Swaps all elements in `self` with those in `other`.
2032 /// The length of `other` must be the same as `self`.
2036 /// This function will panic if the two slices have different lengths.
2040 /// Swapping two elements across slices:
2043 /// let mut slice1 = [0, 0];
2044 /// let mut slice2 = [1, 2, 3, 4];
2046 /// slice1.swap_with_slice(&mut slice2[2..]);
2048 /// assert_eq!(slice1, [3, 4]);
2049 /// assert_eq!(slice2, [1, 2, 0, 0]);
2052 /// Rust enforces that there can only be one mutable reference to a
2053 /// particular piece of data in a particular scope. Because of this,
2054 /// attempting to use `swap_with_slice` on a single slice will result in
2055 /// a compile failure:
2058 /// let mut slice = [1, 2, 3, 4, 5];
2059 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2062 /// To work around this, we can use [`split_at_mut`] to create two distinct
2063 /// mutable sub-slices from a slice:
2066 /// let mut slice = [1, 2, 3, 4, 5];
2069 /// let (left, right) = slice.split_at_mut(2);
2070 /// left.swap_with_slice(&mut right[1..]);
2073 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2076 /// [`split_at_mut`]: #method.split_at_mut
2077 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2078 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2079 assert!(self.len() == other.len(),
2080 "destination and source slices have different lengths");
2082 ptr::swap_nonoverlapping(
2083 self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2087 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2088 fn align_to_offsets<U>(&self) -> (usize, usize) {
2089 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2090 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2092 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2093 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2094 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2096 // Formula to calculate this is:
2098 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2099 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2101 // Expanded and simplified:
2103 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2104 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2106 // Luckily since all this is constant-evaluated... performance here matters not!
2108 fn gcd(a: usize, b: usize) -> usize {
2109 // iterative stein’s algorithm
2110 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2111 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2112 let (ctz_a, mut ctz_b) = unsafe {
2113 if a == 0 { return b; }
2114 if b == 0 { return a; }
2115 (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b))
2117 let k = ctz_a.min(ctz_b);
2118 let mut a = a >> ctz_a;
2121 // remove all factors of 2 from b
2124 ::mem::swap(&mut a, &mut b);
2131 ctz_b = ::intrinsics::cttz_nonzero(b);
2136 let gcd: usize = gcd(::mem::size_of::<T>(), ::mem::size_of::<U>());
2137 let ts: usize = ::mem::size_of::<U>() / gcd;
2138 let us: usize = ::mem::size_of::<T>() / gcd;
2140 // Armed with this knowledge, we can find how many `U`s we can fit!
2141 let us_len = self.len() / ts * us;
2142 // And how many `T`s will be in the trailing slice!
2143 let ts_len = self.len() % ts;
2147 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2150 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2151 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2152 /// middle slice the greatest length possible for a given type and input slice, but only
2153 /// your algorithm's performance should depend on that, not its correctness.
2155 /// This method has no purpose when either input element `T` or output element `U` are
2156 /// zero-sized and will return the original slice without splitting anything.
2160 /// This method is essentially a `transmute` with respect to the elements in the returned
2161 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2169 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2170 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2171 /// // less_efficient_algorithm_for_bytes(prefix);
2172 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2173 /// // less_efficient_algorithm_for_bytes(suffix);
2176 #[stable(feature = "slice_align_to", since = "1.30.0")]
2177 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2178 // Note that most of this function will be constant-evaluated,
2179 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2180 // handle ZSTs specially, which is – don't handle them at all.
2181 return (self, &[], &[]);
2184 // First, find at what point do we split between the first and 2nd slice. Easy with
2185 // ptr.align_offset.
2186 let ptr = self.as_ptr();
2187 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2188 if offset > self.len() {
2191 let (left, rest) = self.split_at(offset);
2192 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2193 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2195 from_raw_parts(rest.as_ptr() as *const U, us_len),
2196 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len))
2200 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2203 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2204 /// slice of a new type, and the suffix slice. The method does a best effort to make the
2205 /// middle slice the greatest length possible for a given type and input slice, but only
2206 /// your algorithm's performance should depend on that, not its correctness.
2208 /// This method has no purpose when either input element `T` or output element `U` are
2209 /// zero-sized and will return the original slice without splitting anything.
2213 /// This method is essentially a `transmute` with respect to the elements in the returned
2214 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2222 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2223 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2224 /// // less_efficient_algorithm_for_bytes(prefix);
2225 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2226 /// // less_efficient_algorithm_for_bytes(suffix);
2229 #[stable(feature = "slice_align_to", since = "1.30.0")]
2230 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2231 // Note that most of this function will be constant-evaluated,
2232 if ::mem::size_of::<U>() == 0 || ::mem::size_of::<T>() == 0 {
2233 // handle ZSTs specially, which is – don't handle them at all.
2234 return (self, &mut [], &mut []);
2237 // First, find at what point do we split between the first and 2nd slice. Easy with
2238 // ptr.align_offset.
2239 let ptr = self.as_ptr();
2240 let offset = ::ptr::align_offset(ptr, ::mem::align_of::<U>());
2241 if offset > self.len() {
2242 (self, &mut [], &mut [])
2244 let (left, rest) = self.split_at_mut(offset);
2245 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2246 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2247 let mut_ptr = rest.as_mut_ptr();
2249 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2250 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len))
2254 /// Checks if the elements of this slice are sorted.
2256 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2257 /// slice yields exactly zero or one element, `true` is returned.
2259 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2260 /// implies that this function returns `false` if any two consecutive items are not
2266 /// #![feature(is_sorted)]
2267 /// let empty: [i32; 0] = [];
2269 /// assert!([1, 2, 2, 9].is_sorted());
2270 /// assert!(![1, 3, 2, 4].is_sorted());
2271 /// assert!([0].is_sorted());
2272 /// assert!(empty.is_sorted());
2273 /// assert!(![0.0, 1.0, std::f32::NAN].is_sorted());
2276 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2277 pub fn is_sorted(&self) -> bool
2281 self.is_sorted_by(|a, b| a.partial_cmp(b))
2284 /// Checks if the elements of this slice are sorted using the given comparator function.
2286 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2287 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2288 /// [`is_sorted`]; see its documentation for more information.
2290 /// [`is_sorted`]: #method.is_sorted
2291 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2292 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2294 F: FnMut(&T, &T) -> Option<Ordering>
2296 for pair in self.windows(2) {
2297 if compare(&pair[0], &pair[1])
2298 .map(|o| o == Ordering::Greater)
2308 /// Checks if the elements of this slice are sorted using the given key extraction function.
2310 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2311 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2312 /// documentation for more information.
2314 /// [`is_sorted`]: #method.is_sorted
2319 /// #![feature(is_sorted)]
2321 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2322 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2325 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2326 pub fn is_sorted_by_key<F, K>(&self, mut f: F) -> bool
2331 self.is_sorted_by(|a, b| f(a).partial_cmp(&f(b)))
2335 #[lang = "slice_u8"]
2338 /// Checks if all bytes in this slice are within the ASCII range.
2339 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2341 pub fn is_ascii(&self) -> bool {
2342 self.iter().all(|b| b.is_ascii())
2345 /// Checks that two slices are an ASCII case-insensitive match.
2347 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2348 /// but without allocating and copying temporaries.
2349 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2351 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2352 self.len() == other.len() &&
2353 self.iter().zip(other).all(|(a, b)| {
2354 a.eq_ignore_ascii_case(b)
2358 /// Converts this slice to its ASCII upper case equivalent in-place.
2360 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2361 /// but non-ASCII letters are unchanged.
2363 /// To return a new uppercased value without modifying the existing one, use
2364 /// [`to_ascii_uppercase`].
2366 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2367 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2369 pub fn make_ascii_uppercase(&mut self) {
2371 byte.make_ascii_uppercase();
2375 /// Converts this slice to its ASCII lower case equivalent in-place.
2377 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2378 /// but non-ASCII letters are unchanged.
2380 /// To return a new lowercased value without modifying the existing one, use
2381 /// [`to_ascii_lowercase`].
2383 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2384 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2386 pub fn make_ascii_lowercase(&mut self) {
2388 byte.make_ascii_lowercase();
2394 #[stable(feature = "rust1", since = "1.0.0")]
2395 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2396 impl<T, I> ops::Index<I> for [T]
2397 where I: SliceIndex<[T]>
2399 type Output = I::Output;
2402 fn index(&self, index: I) -> &I::Output {
2407 #[stable(feature = "rust1", since = "1.0.0")]
2408 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2409 impl<T, I> ops::IndexMut<I> for [T]
2410 where I: SliceIndex<[T]>
2413 fn index_mut(&mut self, index: I) -> &mut I::Output {
2414 index.index_mut(self)
2420 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2421 panic!("index {} out of range for slice of length {}", index, len);
2426 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2427 panic!("slice index starts at {} but ends at {}", index, end);
2432 fn slice_index_overflow_fail() -> ! {
2433 panic!("attempted to index slice up to maximum usize");
2436 mod private_slice_index {
2438 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2441 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2442 impl Sealed for usize {}
2443 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2444 impl Sealed for ops::Range<usize> {}
2445 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2446 impl Sealed for ops::RangeTo<usize> {}
2447 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2448 impl Sealed for ops::RangeFrom<usize> {}
2449 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2450 impl Sealed for ops::RangeFull {}
2451 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2452 impl Sealed for ops::RangeInclusive<usize> {}
2453 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2454 impl Sealed for ops::RangeToInclusive<usize> {}
2457 /// A helper trait used for indexing operations.
2458 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2459 #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
2460 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2461 /// The output type returned by methods.
2462 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2463 type Output: ?Sized;
2465 /// Returns a shared reference to the output at this location, if in
2467 #[unstable(feature = "slice_index_methods", issue = "0")]
2468 fn get(self, slice: &T) -> Option<&Self::Output>;
2470 /// Returns a mutable reference to the output at this location, if in
2472 #[unstable(feature = "slice_index_methods", issue = "0")]
2473 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2475 /// Returns a shared reference to the output at this location, without
2476 /// performing any bounds checking.
2477 #[unstable(feature = "slice_index_methods", issue = "0")]
2478 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2480 /// Returns a mutable reference to the output at this location, without
2481 /// performing any bounds checking.
2482 #[unstable(feature = "slice_index_methods", issue = "0")]
2483 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2485 /// Returns a shared reference to the output at this location, panicking
2486 /// if out of bounds.
2487 #[unstable(feature = "slice_index_methods", issue = "0")]
2488 fn index(self, slice: &T) -> &Self::Output;
2490 /// Returns a mutable reference to the output at this location, panicking
2491 /// if out of bounds.
2492 #[unstable(feature = "slice_index_methods", issue = "0")]
2493 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2496 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2497 impl<T> SliceIndex<[T]> for usize {
2501 fn get(self, slice: &[T]) -> Option<&T> {
2502 if self < slice.len() {
2504 Some(self.get_unchecked(slice))
2512 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2513 if self < slice.len() {
2515 Some(self.get_unchecked_mut(slice))
2523 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2524 &*slice.as_ptr().add(self)
2528 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2529 &mut *slice.as_mut_ptr().add(self)
2533 fn index(self, slice: &[T]) -> &T {
2534 // N.B., use intrinsic indexing
2539 fn index_mut(self, slice: &mut [T]) -> &mut T {
2540 // N.B., use intrinsic indexing
2545 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2546 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2550 fn get(self, slice: &[T]) -> Option<&[T]> {
2551 if self.start > self.end || self.end > slice.len() {
2555 Some(self.get_unchecked(slice))
2561 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2562 if self.start > self.end || self.end > slice.len() {
2566 Some(self.get_unchecked_mut(slice))
2572 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2573 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2577 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2578 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2582 fn index(self, slice: &[T]) -> &[T] {
2583 if self.start > self.end {
2584 slice_index_order_fail(self.start, self.end);
2585 } else if self.end > slice.len() {
2586 slice_index_len_fail(self.end, slice.len());
2589 self.get_unchecked(slice)
2594 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2595 if self.start > self.end {
2596 slice_index_order_fail(self.start, self.end);
2597 } else if self.end > slice.len() {
2598 slice_index_len_fail(self.end, slice.len());
2601 self.get_unchecked_mut(slice)
2606 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2607 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2611 fn get(self, slice: &[T]) -> Option<&[T]> {
2612 (0..self.end).get(slice)
2616 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2617 (0..self.end).get_mut(slice)
2621 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2622 (0..self.end).get_unchecked(slice)
2626 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2627 (0..self.end).get_unchecked_mut(slice)
2631 fn index(self, slice: &[T]) -> &[T] {
2632 (0..self.end).index(slice)
2636 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2637 (0..self.end).index_mut(slice)
2641 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2642 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2646 fn get(self, slice: &[T]) -> Option<&[T]> {
2647 (self.start..slice.len()).get(slice)
2651 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2652 (self.start..slice.len()).get_mut(slice)
2656 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2657 (self.start..slice.len()).get_unchecked(slice)
2661 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2662 (self.start..slice.len()).get_unchecked_mut(slice)
2666 fn index(self, slice: &[T]) -> &[T] {
2667 (self.start..slice.len()).index(slice)
2671 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2672 (self.start..slice.len()).index_mut(slice)
2676 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2677 impl<T> SliceIndex<[T]> for ops::RangeFull {
2681 fn get(self, slice: &[T]) -> Option<&[T]> {
2686 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2691 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2696 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2701 fn index(self, slice: &[T]) -> &[T] {
2706 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2712 #[stable(feature = "inclusive_range", since = "1.26.0")]
2713 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2717 fn get(self, slice: &[T]) -> Option<&[T]> {
2718 if *self.end() == usize::max_value() { None }
2719 else { (*self.start()..self.end() + 1).get(slice) }
2723 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2724 if *self.end() == usize::max_value() { None }
2725 else { (*self.start()..self.end() + 1).get_mut(slice) }
2729 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2730 (*self.start()..self.end() + 1).get_unchecked(slice)
2734 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2735 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2739 fn index(self, slice: &[T]) -> &[T] {
2740 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2741 (*self.start()..self.end() + 1).index(slice)
2745 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2746 if *self.end() == usize::max_value() { slice_index_overflow_fail(); }
2747 (*self.start()..self.end() + 1).index_mut(slice)
2751 #[stable(feature = "inclusive_range", since = "1.26.0")]
2752 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
2756 fn get(self, slice: &[T]) -> Option<&[T]> {
2757 (0..=self.end).get(slice)
2761 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2762 (0..=self.end).get_mut(slice)
2766 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2767 (0..=self.end).get_unchecked(slice)
2771 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2772 (0..=self.end).get_unchecked_mut(slice)
2776 fn index(self, slice: &[T]) -> &[T] {
2777 (0..=self.end).index(slice)
2781 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2782 (0..=self.end).index_mut(slice)
2786 ////////////////////////////////////////////////////////////////////////////////
2788 ////////////////////////////////////////////////////////////////////////////////
2790 #[stable(feature = "rust1", since = "1.0.0")]
2791 impl<T> Default for &[T] {
2792 /// Creates an empty slice.
2793 fn default() -> Self { &[] }
2796 #[stable(feature = "mut_slice_default", since = "1.5.0")]
2797 impl<T> Default for &mut [T] {
2798 /// Creates a mutable empty slice.
2799 fn default() -> Self { &mut [] }
2806 #[stable(feature = "rust1", since = "1.0.0")]
2807 impl<'a, T> IntoIterator for &'a [T] {
2809 type IntoIter = Iter<'a, T>;
2811 fn into_iter(self) -> Iter<'a, T> {
2816 #[stable(feature = "rust1", since = "1.0.0")]
2817 impl<'a, T> IntoIterator for &'a mut [T] {
2818 type Item = &'a mut T;
2819 type IntoIter = IterMut<'a, T>;
2821 fn into_iter(self) -> IterMut<'a, T> {
2826 // Macro helper functions
2828 fn size_from_ptr<T>(_: *const T) -> usize {
2832 // Inlining is_empty and len makes a huge performance difference
2833 macro_rules! is_empty {
2834 // The way we encode the length of a ZST iterator, this works both for ZST
2836 ($self: ident) => {$self.ptr == $self.end}
2838 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
2839 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
2841 ($self: ident) => {{
2842 let start = $self.ptr;
2843 let diff = ($self.end as usize).wrapping_sub(start as usize);
2844 let size = size_from_ptr(start);
2848 // Using division instead of `offset_from` helps LLVM remove bounds checks
2854 // The shared definition of the `Iter` and `IterMut` iterators
2855 macro_rules! iterator {
2856 (struct $name:ident -> $ptr:ty, $elem:ty, $raw_mut:tt, $( $mut_:tt )*) => {
2857 impl<'a, T> $name<'a, T> {
2858 // Helper function for creating a slice from the iterator.
2860 fn make_slice(&self) -> &'a [T] {
2861 unsafe { from_raw_parts(self.ptr, len!(self)) }
2864 // Helper function for moving the start of the iterator forwards by `offset` elements,
2865 // returning the old start.
2866 // Unsafe because the offset must be in-bounds or one-past-the-end.
2868 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
2869 if mem::size_of::<T>() == 0 {
2870 // This is *reducing* the length. `ptr` never changes with ZST.
2871 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2875 self.ptr = self.ptr.offset(offset);
2880 // Helper function for moving the end of the iterator backwards by `offset` elements,
2881 // returning the new end.
2882 // Unsafe because the offset must be in-bounds or one-past-the-end.
2884 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
2885 if mem::size_of::<T>() == 0 {
2886 self.end = (self.end as * $raw_mut u8).wrapping_offset(-offset) as * $raw_mut T;
2889 self.end = self.end.offset(-offset);
2895 #[stable(feature = "rust1", since = "1.0.0")]
2896 impl<'a, T> ExactSizeIterator for $name<'a, T> {
2898 fn len(&self) -> usize {
2903 fn is_empty(&self) -> bool {
2908 #[stable(feature = "rust1", since = "1.0.0")]
2909 impl<'a, T> Iterator for $name<'a, T> {
2913 fn next(&mut self) -> Option<$elem> {
2914 // could be implemented with slices, but this avoids bounds checks
2916 assume(!self.ptr.is_null());
2917 if mem::size_of::<T>() != 0 {
2918 assume(!self.end.is_null());
2920 if is_empty!(self) {
2923 Some(& $( $mut_ )* *self.post_inc_start(1))
2929 fn size_hint(&self) -> (usize, Option<usize>) {
2930 let exact = len!(self);
2931 (exact, Some(exact))
2935 fn count(self) -> usize {
2940 fn nth(&mut self, n: usize) -> Option<$elem> {
2941 if n >= len!(self) {
2942 // This iterator is now empty.
2943 if mem::size_of::<T>() == 0 {
2944 // We have to do it this way as `ptr` may never be 0, but `end`
2945 // could be (due to wrapping).
2946 self.end = self.ptr;
2948 self.ptr = self.end;
2952 // We are in bounds. `offset` does the right thing even for ZSTs.
2954 let elem = Some(& $( $mut_ )* *self.ptr.add(n));
2955 self.post_inc_start((n as isize).wrapping_add(1));
2961 fn last(mut self) -> Option<$elem> {
2966 fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R where
2967 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
2969 // manual unrolling is needed when there are conditional exits from the loop
2970 let mut accum = init;
2972 while len!(self) >= 4 {
2973 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2974 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2975 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2976 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2978 while !is_empty!(self) {
2979 accum = f(accum, & $( $mut_ )* *self.post_inc_start(1))?;
2986 fn fold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
2987 where Fold: FnMut(Acc, Self::Item) -> Acc,
2989 // Let LLVM unroll this, rather than using the default
2990 // impl that would force the manual unrolling above
2991 let mut accum = init;
2992 while let Some(x) = self.next() {
2993 accum = f(accum, x);
2999 #[rustc_inherit_overflow_checks]
3000 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3002 P: FnMut(Self::Item) -> bool,
3004 // The addition might panic on overflow.
3006 self.try_fold(0, move |i, x| {
3007 if predicate(x) { Err(i) }
3011 unsafe { assume(i < n) };
3017 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3018 P: FnMut(Self::Item) -> bool,
3019 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3021 // No need for an overflow check here, because `ExactSizeIterator`
3023 self.try_rfold(n, move |i, x| {
3025 if predicate(x) { Err(i) }
3029 unsafe { assume(i < n) };
3035 #[stable(feature = "rust1", since = "1.0.0")]
3036 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3038 fn next_back(&mut self) -> Option<$elem> {
3039 // could be implemented with slices, but this avoids bounds checks
3041 assume(!self.ptr.is_null());
3042 if mem::size_of::<T>() != 0 {
3043 assume(!self.end.is_null());
3045 if is_empty!(self) {
3048 Some(& $( $mut_ )* *self.pre_dec_end(1))
3054 fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
3055 Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
3057 // manual unrolling is needed when there are conditional exits from the loop
3058 let mut accum = init;
3060 while len!(self) >= 4 {
3061 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3062 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3063 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3064 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3066 // inlining is_empty everywhere makes a huge performance difference
3067 while !is_empty!(self) {
3068 accum = f(accum, & $( $mut_ )* *self.pre_dec_end(1))?;
3075 fn rfold<Acc, Fold>(mut self, init: Acc, mut f: Fold) -> Acc
3076 where Fold: FnMut(Acc, Self::Item) -> Acc,
3078 // Let LLVM unroll this, rather than using the default
3079 // impl that would force the manual unrolling above
3080 let mut accum = init;
3081 while let Some(x) = self.next_back() {
3082 accum = f(accum, x);
3088 #[stable(feature = "fused", since = "1.26.0")]
3089 impl<'a, T> FusedIterator for $name<'a, T> {}
3091 #[unstable(feature = "trusted_len", issue = "37572")]
3092 unsafe impl<'a, T> TrustedLen for $name<'a, T> {}
3096 /// Immutable slice iterator
3098 /// This struct is created by the [`iter`] method on [slices].
3105 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3106 /// let slice = &[1, 2, 3];
3108 /// // Then, we iterate over it:
3109 /// for element in slice.iter() {
3110 /// println!("{}", element);
3114 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3115 /// [slices]: ../../std/primitive.slice.html
3116 #[stable(feature = "rust1", since = "1.0.0")]
3117 pub struct Iter<'a, T: 'a> {
3119 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3120 // ptr == end is a quick test for the Iterator being empty, that works
3121 // for both ZST and non-ZST.
3122 _marker: marker::PhantomData<&'a T>,
3125 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3126 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3127 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3128 f.debug_tuple("Iter")
3129 .field(&self.as_slice())
3134 #[stable(feature = "rust1", since = "1.0.0")]
3135 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3136 #[stable(feature = "rust1", since = "1.0.0")]
3137 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3139 impl<'a, T> Iter<'a, T> {
3140 /// View the underlying data as a subslice of the original data.
3142 /// This has the same lifetime as the original slice, and so the
3143 /// iterator can continue to be used while this exists.
3150 /// // First, we declare a type which has the `iter` method to get the `Iter`
3151 /// // struct (&[usize here]):
3152 /// let slice = &[1, 2, 3];
3154 /// // Then, we get the iterator:
3155 /// let mut iter = slice.iter();
3156 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3157 /// println!("{:?}", iter.as_slice());
3159 /// // Next, we move to the second element of the slice:
3161 /// // Now `as_slice` returns "[2, 3]":
3162 /// println!("{:?}", iter.as_slice());
3164 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3165 pub fn as_slice(&self) -> &'a [T] {
3170 iterator!{struct Iter -> *const T, &'a T, const, /* no mut */}
3172 #[stable(feature = "rust1", since = "1.0.0")]
3173 impl<T> Clone for Iter<'_, T> {
3174 fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
3177 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3178 impl<T> AsRef<[T]> for Iter<'_, T> {
3179 fn as_ref(&self) -> &[T] {
3184 /// Mutable slice iterator.
3186 /// This struct is created by the [`iter_mut`] method on [slices].
3193 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3194 /// // struct (&[usize here]):
3195 /// let mut slice = &mut [1, 2, 3];
3197 /// // Then, we iterate over it and increment each element value:
3198 /// for element in slice.iter_mut() {
3202 /// // We now have "[2, 3, 4]":
3203 /// println!("{:?}", slice);
3206 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3207 /// [slices]: ../../std/primitive.slice.html
3208 #[stable(feature = "rust1", since = "1.0.0")]
3209 pub struct IterMut<'a, T: 'a> {
3211 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3212 // ptr == end is a quick test for the Iterator being empty, that works
3213 // for both ZST and non-ZST.
3214 _marker: marker::PhantomData<&'a mut T>,
3217 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3218 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3219 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3220 f.debug_tuple("IterMut")
3221 .field(&self.make_slice())
3226 #[stable(feature = "rust1", since = "1.0.0")]
3227 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3228 #[stable(feature = "rust1", since = "1.0.0")]
3229 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3231 impl<'a, T> IterMut<'a, T> {
3232 /// View the underlying data as a subslice of the original data.
3234 /// To avoid creating `&mut` references that alias, this is forced
3235 /// to consume the iterator.
3242 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3243 /// // struct (&[usize here]):
3244 /// let mut slice = &mut [1, 2, 3];
3247 /// // Then, we get the iterator:
3248 /// let mut iter = slice.iter_mut();
3249 /// // We move to next element:
3251 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3252 /// println!("{:?}", iter.into_slice());
3255 /// // Now let's modify a value of the slice:
3257 /// // First we get back the iterator:
3258 /// let mut iter = slice.iter_mut();
3259 /// // We change the value of the first element of the slice returned by the `next` method:
3260 /// *iter.next().unwrap() += 1;
3262 /// // Now slice is "[2, 2, 3]":
3263 /// println!("{:?}", slice);
3265 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3266 pub fn into_slice(self) -> &'a mut [T] {
3267 unsafe { from_raw_parts_mut(self.ptr, len!(self)) }
3271 iterator!{struct IterMut -> *mut T, &'a mut T, mut, mut}
3273 /// An internal abstraction over the splitting iterators, so that
3274 /// splitn, splitn_mut etc can be implemented once.
3276 trait SplitIter: DoubleEndedIterator {
3277 /// Marks the underlying iterator as complete, extracting the remaining
3278 /// portion of the slice.
3279 fn finish(&mut self) -> Option<Self::Item>;
3282 /// An iterator over subslices separated by elements that match a predicate
3285 /// This struct is created by the [`split`] method on [slices].
3287 /// [`split`]: ../../std/primitive.slice.html#method.split
3288 /// [slices]: ../../std/primitive.slice.html
3289 #[stable(feature = "rust1", since = "1.0.0")]
3290 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
3296 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3297 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool {
3298 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3299 f.debug_struct("Split")
3300 .field("v", &self.v)
3301 .field("finished", &self.finished)
3306 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3307 #[stable(feature = "rust1", since = "1.0.0")]
3308 impl<T, P> Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool {
3309 fn clone(&self) -> Self {
3312 pred: self.pred.clone(),
3313 finished: self.finished,
3318 #[stable(feature = "rust1", since = "1.0.0")]
3319 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3320 type Item = &'a [T];
3323 fn next(&mut self) -> Option<&'a [T]> {
3324 if self.finished { return None; }
3326 match self.v.iter().position(|x| (self.pred)(x)) {
3327 None => self.finish(),
3329 let ret = Some(&self.v[..idx]);
3330 self.v = &self.v[idx + 1..];
3337 fn size_hint(&self) -> (usize, Option<usize>) {
3341 (1, Some(self.v.len() + 1))
3346 #[stable(feature = "rust1", since = "1.0.0")]
3347 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
3349 fn next_back(&mut self) -> Option<&'a [T]> {
3350 if self.finished { return None; }
3352 match self.v.iter().rposition(|x| (self.pred)(x)) {
3353 None => self.finish(),
3355 let ret = Some(&self.v[idx + 1..]);
3356 self.v = &self.v[..idx];
3363 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
3365 fn finish(&mut self) -> Option<&'a [T]> {
3366 if self.finished { None } else { self.finished = true; Some(self.v) }
3370 #[stable(feature = "fused", since = "1.26.0")]
3371 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3373 /// An iterator over the subslices of the vector which are separated
3374 /// by elements that match `pred`.
3376 /// This struct is created by the [`split_mut`] method on [slices].
3378 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3379 /// [slices]: ../../std/primitive.slice.html
3380 #[stable(feature = "rust1", since = "1.0.0")]
3381 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3387 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3388 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3389 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3390 f.debug_struct("SplitMut")
3391 .field("v", &self.v)
3392 .field("finished", &self.finished)
3397 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3399 fn finish(&mut self) -> Option<&'a mut [T]> {
3403 self.finished = true;
3404 Some(mem::replace(&mut self.v, &mut []))
3409 #[stable(feature = "rust1", since = "1.0.0")]
3410 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3411 type Item = &'a mut [T];
3414 fn next(&mut self) -> Option<&'a mut [T]> {
3415 if self.finished { return None; }
3417 let idx_opt = { // work around borrowck limitations
3418 let pred = &mut self.pred;
3419 self.v.iter().position(|x| (*pred)(x))
3422 None => self.finish(),
3424 let tmp = mem::replace(&mut self.v, &mut []);
3425 let (head, tail) = tmp.split_at_mut(idx);
3426 self.v = &mut tail[1..];
3433 fn size_hint(&self) -> (usize, Option<usize>) {
3437 // if the predicate doesn't match anything, we yield one slice
3438 // if it matches every element, we yield len+1 empty slices.
3439 (1, Some(self.v.len() + 1))
3444 #[stable(feature = "rust1", since = "1.0.0")]
3445 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
3446 P: FnMut(&T) -> bool,
3449 fn next_back(&mut self) -> Option<&'a mut [T]> {
3450 if self.finished { return None; }
3452 let idx_opt = { // work around borrowck limitations
3453 let pred = &mut self.pred;
3454 self.v.iter().rposition(|x| (*pred)(x))
3457 None => self.finish(),
3459 let tmp = mem::replace(&mut self.v, &mut []);
3460 let (head, tail) = tmp.split_at_mut(idx);
3462 Some(&mut tail[1..])
3468 #[stable(feature = "fused", since = "1.26.0")]
3469 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3471 /// An iterator over subslices separated by elements that match a predicate
3472 /// function, starting from the end of the slice.
3474 /// This struct is created by the [`rsplit`] method on [slices].
3476 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3477 /// [slices]: ../../std/primitive.slice.html
3478 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3479 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3480 pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool {
3481 inner: Split<'a, T, P>
3484 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3485 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool {
3486 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3487 f.debug_struct("RSplit")
3488 .field("v", &self.inner.v)
3489 .field("finished", &self.inner.finished)
3494 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3495 impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3496 type Item = &'a [T];
3499 fn next(&mut self) -> Option<&'a [T]> {
3500 self.inner.next_back()
3504 fn size_hint(&self) -> (usize, Option<usize>) {
3505 self.inner.size_hint()
3509 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3510 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3512 fn next_back(&mut self) -> Option<&'a [T]> {
3517 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3518 impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool {
3520 fn finish(&mut self) -> Option<&'a [T]> {
3525 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3526 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3528 /// An iterator over the subslices of the vector which are separated
3529 /// by elements that match `pred`, starting from the end of the slice.
3531 /// This struct is created by the [`rsplit_mut`] method on [slices].
3533 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3534 /// [slices]: ../../std/primitive.slice.html
3535 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3536 pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
3537 inner: SplitMut<'a, T, P>
3540 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3541 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {
3542 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3543 f.debug_struct("RSplitMut")
3544 .field("v", &self.inner.v)
3545 .field("finished", &self.inner.finished)
3550 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3551 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3553 fn finish(&mut self) -> Option<&'a mut [T]> {
3558 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3559 impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {
3560 type Item = &'a mut [T];
3563 fn next(&mut self) -> Option<&'a mut [T]> {
3564 self.inner.next_back()
3568 fn size_hint(&self) -> (usize, Option<usize>) {
3569 self.inner.size_hint()
3573 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3574 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where
3575 P: FnMut(&T) -> bool,
3578 fn next_back(&mut self) -> Option<&'a mut [T]> {
3583 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3584 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3586 /// An private iterator over subslices separated by elements that
3587 /// match a predicate function, splitting at most a fixed number of
3590 struct GenericSplitN<I> {
3595 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
3599 fn next(&mut self) -> Option<T> {
3602 1 => { self.count -= 1; self.iter.finish() }
3603 _ => { self.count -= 1; self.iter.next() }
3608 fn size_hint(&self) -> (usize, Option<usize>) {
3609 let (lower, upper_opt) = self.iter.size_hint();
3610 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3614 /// An iterator over subslices separated by elements that match a predicate
3615 /// function, limited to a given number of splits.
3617 /// This struct is created by the [`splitn`] method on [slices].
3619 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3620 /// [slices]: ../../std/primitive.slice.html
3621 #[stable(feature = "rust1", since = "1.0.0")]
3622 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3623 inner: GenericSplitN<Split<'a, T, P>>
3626 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3627 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool {
3628 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3629 f.debug_struct("SplitN")
3630 .field("inner", &self.inner)
3635 /// An iterator over subslices separated by elements that match a
3636 /// predicate function, limited to a given number of splits, starting
3637 /// from the end of the slice.
3639 /// This struct is created by the [`rsplitn`] method on [slices].
3641 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
3642 /// [slices]: ../../std/primitive.slice.html
3643 #[stable(feature = "rust1", since = "1.0.0")]
3644 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3645 inner: GenericSplitN<RSplit<'a, T, P>>
3648 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3649 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool {
3650 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3651 f.debug_struct("RSplitN")
3652 .field("inner", &self.inner)
3657 /// An iterator over subslices separated by elements that match a predicate
3658 /// function, limited to a given number of splits.
3660 /// This struct is created by the [`splitn_mut`] method on [slices].
3662 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
3663 /// [slices]: ../../std/primitive.slice.html
3664 #[stable(feature = "rust1", since = "1.0.0")]
3665 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3666 inner: GenericSplitN<SplitMut<'a, T, P>>
3669 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3670 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3671 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3672 f.debug_struct("SplitNMut")
3673 .field("inner", &self.inner)
3678 /// An iterator over subslices separated by elements that match a
3679 /// predicate function, limited to a given number of splits, starting
3680 /// from the end of the slice.
3682 /// This struct is created by the [`rsplitn_mut`] method on [slices].
3684 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
3685 /// [slices]: ../../std/primitive.slice.html
3686 #[stable(feature = "rust1", since = "1.0.0")]
3687 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
3688 inner: GenericSplitN<RSplitMut<'a, T, P>>
3691 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3692 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool {
3693 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
3694 f.debug_struct("RSplitNMut")
3695 .field("inner", &self.inner)
3700 macro_rules! forward_iterator {
3701 ($name:ident: $elem:ident, $iter_of:ty) => {
3702 #[stable(feature = "rust1", since = "1.0.0")]
3703 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
3704 P: FnMut(&T) -> bool
3706 type Item = $iter_of;
3709 fn next(&mut self) -> Option<$iter_of> {
3714 fn size_hint(&self) -> (usize, Option<usize>) {
3715 self.inner.size_hint()
3719 #[stable(feature = "fused", since = "1.26.0")]
3720 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
3721 where P: FnMut(&T) -> bool {}
3725 forward_iterator! { SplitN: T, &'a [T] }
3726 forward_iterator! { RSplitN: T, &'a [T] }
3727 forward_iterator! { SplitNMut: T, &'a mut [T] }
3728 forward_iterator! { RSplitNMut: T, &'a mut [T] }
3730 /// An iterator over overlapping subslices of length `size`.
3732 /// This struct is created by the [`windows`] method on [slices].
3734 /// [`windows`]: ../../std/primitive.slice.html#method.windows
3735 /// [slices]: ../../std/primitive.slice.html
3737 #[stable(feature = "rust1", since = "1.0.0")]
3738 pub struct Windows<'a, T:'a> {
3743 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3744 #[stable(feature = "rust1", since = "1.0.0")]
3745 impl<T> Clone for Windows<'_, T> {
3746 fn clone(&self) -> Self {
3754 #[stable(feature = "rust1", since = "1.0.0")]
3755 impl<'a, T> Iterator for Windows<'a, T> {
3756 type Item = &'a [T];
3759 fn next(&mut self) -> Option<&'a [T]> {
3760 if self.size > self.v.len() {
3763 let ret = Some(&self.v[..self.size]);
3764 self.v = &self.v[1..];
3770 fn size_hint(&self) -> (usize, Option<usize>) {
3771 if self.size > self.v.len() {
3774 let size = self.v.len() - self.size + 1;
3780 fn count(self) -> usize {
3785 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3786 let (end, overflow) = self.size.overflowing_add(n);
3787 if end > self.v.len() || overflow {
3791 let nth = &self.v[n..end];
3792 self.v = &self.v[n+1..];
3798 fn last(self) -> Option<Self::Item> {
3799 if self.size > self.v.len() {
3802 let start = self.v.len() - self.size;
3803 Some(&self.v[start..])
3808 #[stable(feature = "rust1", since = "1.0.0")]
3809 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
3811 fn next_back(&mut self) -> Option<&'a [T]> {
3812 if self.size > self.v.len() {
3815 let ret = Some(&self.v[self.v.len()-self.size..]);
3816 self.v = &self.v[..self.v.len()-1];
3822 #[stable(feature = "rust1", since = "1.0.0")]
3823 impl<T> ExactSizeIterator for Windows<'_, T> {}
3825 #[unstable(feature = "trusted_len", issue = "37572")]
3826 unsafe impl<T> TrustedLen for Windows<'_, T> {}
3828 #[stable(feature = "fused", since = "1.26.0")]
3829 impl<T> FusedIterator for Windows<'_, T> {}
3832 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
3833 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3834 from_raw_parts(self.v.as_ptr().add(i), self.size)
3836 fn may_have_side_effect() -> bool { false }
3839 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
3840 /// time), starting at the beginning of the slice.
3842 /// When the slice len is not evenly divided by the chunk size, the last slice
3843 /// of the iteration will be the remainder.
3845 /// This struct is created by the [`chunks`] method on [slices].
3847 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
3848 /// [slices]: ../../std/primitive.slice.html
3850 #[stable(feature = "rust1", since = "1.0.0")]
3851 pub struct Chunks<'a, T:'a> {
3856 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3857 #[stable(feature = "rust1", since = "1.0.0")]
3858 impl<T> Clone for Chunks<'_, T> {
3859 fn clone(&self) -> Self {
3862 chunk_size: self.chunk_size,
3867 #[stable(feature = "rust1", since = "1.0.0")]
3868 impl<'a, T> Iterator for Chunks<'a, T> {
3869 type Item = &'a [T];
3872 fn next(&mut self) -> Option<&'a [T]> {
3873 if self.v.is_empty() {
3876 let chunksz = cmp::min(self.v.len(), self.chunk_size);
3877 let (fst, snd) = self.v.split_at(chunksz);
3884 fn size_hint(&self) -> (usize, Option<usize>) {
3885 if self.v.is_empty() {
3888 let n = self.v.len() / self.chunk_size;
3889 let rem = self.v.len() % self.chunk_size;
3890 let n = if rem > 0 { n+1 } else { n };
3896 fn count(self) -> usize {
3901 fn nth(&mut self, n: usize) -> Option<Self::Item> {
3902 let (start, overflow) = n.overflowing_mul(self.chunk_size);
3903 if start >= self.v.len() || overflow {
3907 let end = match start.checked_add(self.chunk_size) {
3908 Some(sum) => cmp::min(self.v.len(), sum),
3909 None => self.v.len(),
3911 let nth = &self.v[start..end];
3912 self.v = &self.v[end..];
3918 fn last(self) -> Option<Self::Item> {
3919 if self.v.is_empty() {
3922 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
3923 Some(&self.v[start..])
3928 #[stable(feature = "rust1", since = "1.0.0")]
3929 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
3931 fn next_back(&mut self) -> Option<&'a [T]> {
3932 if self.v.is_empty() {
3935 let remainder = self.v.len() % self.chunk_size;
3936 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
3937 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
3944 #[stable(feature = "rust1", since = "1.0.0")]
3945 impl<T> ExactSizeIterator for Chunks<'_, T> {}
3947 #[unstable(feature = "trusted_len", issue = "37572")]
3948 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
3950 #[stable(feature = "fused", since = "1.26.0")]
3951 impl<T> FusedIterator for Chunks<'_, T> {}
3954 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
3955 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
3956 let start = i * self.chunk_size;
3957 let end = match start.checked_add(self.chunk_size) {
3958 None => self.v.len(),
3959 Some(end) => cmp::min(end, self.v.len()),
3961 from_raw_parts(self.v.as_ptr().add(start), end - start)
3963 fn may_have_side_effect() -> bool { false }
3966 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
3967 /// elements at a time), starting at the beginning of the slice.
3969 /// When the slice len is not evenly divided by the chunk size, the last slice
3970 /// of the iteration will be the remainder.
3972 /// This struct is created by the [`chunks_mut`] method on [slices].
3974 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
3975 /// [slices]: ../../std/primitive.slice.html
3977 #[stable(feature = "rust1", since = "1.0.0")]
3978 pub struct ChunksMut<'a, T:'a> {
3983 #[stable(feature = "rust1", since = "1.0.0")]
3984 impl<'a, T> Iterator for ChunksMut<'a, T> {
3985 type Item = &'a mut [T];
3988 fn next(&mut self) -> Option<&'a mut [T]> {
3989 if self.v.is_empty() {
3992 let sz = cmp::min(self.v.len(), self.chunk_size);
3993 let tmp = mem::replace(&mut self.v, &mut []);
3994 let (head, tail) = tmp.split_at_mut(sz);
4001 fn size_hint(&self) -> (usize, Option<usize>) {
4002 if self.v.is_empty() {
4005 let n = self.v.len() / self.chunk_size;
4006 let rem = self.v.len() % self.chunk_size;
4007 let n = if rem > 0 { n + 1 } else { n };
4013 fn count(self) -> usize {
4018 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4019 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4020 if start >= self.v.len() || overflow {
4024 let end = match start.checked_add(self.chunk_size) {
4025 Some(sum) => cmp::min(self.v.len(), sum),
4026 None => self.v.len(),
4028 let tmp = mem::replace(&mut self.v, &mut []);
4029 let (head, tail) = tmp.split_at_mut(end);
4030 let (_, nth) = head.split_at_mut(start);
4037 fn last(self) -> Option<Self::Item> {
4038 if self.v.is_empty() {
4041 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4042 Some(&mut self.v[start..])
4047 #[stable(feature = "rust1", since = "1.0.0")]
4048 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4050 fn next_back(&mut self) -> Option<&'a mut [T]> {
4051 if self.v.is_empty() {
4054 let remainder = self.v.len() % self.chunk_size;
4055 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4056 let tmp = mem::replace(&mut self.v, &mut []);
4057 let tmp_len = tmp.len();
4058 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4065 #[stable(feature = "rust1", since = "1.0.0")]
4066 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4068 #[unstable(feature = "trusted_len", issue = "37572")]
4069 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4071 #[stable(feature = "fused", since = "1.26.0")]
4072 impl<T> FusedIterator for ChunksMut<'_, T> {}
4075 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4076 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4077 let start = i * self.chunk_size;
4078 let end = match start.checked_add(self.chunk_size) {
4079 None => self.v.len(),
4080 Some(end) => cmp::min(end, self.v.len()),
4082 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4084 fn may_have_side_effect() -> bool { false }
4087 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4088 /// time), starting at the beginning of the slice.
4090 /// When the slice len is not evenly divided by the chunk size, the last
4091 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4092 /// the [`remainder`] function from the iterator.
4094 /// This struct is created by the [`chunks_exact`] method on [slices].
4096 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4097 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4098 /// [slices]: ../../std/primitive.slice.html
4100 #[stable(feature = "chunks_exact", since = "1.31.0")]
4101 pub struct ChunksExact<'a, T:'a> {
4107 impl<'a, T> ChunksExact<'a, T> {
4108 /// Return the remainder of the original slice that is not going to be
4109 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4111 #[stable(feature = "chunks_exact", since = "1.31.0")]
4112 pub fn remainder(&self) -> &'a [T] {
4117 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4118 #[stable(feature = "chunks_exact", since = "1.31.0")]
4119 impl<T> Clone for ChunksExact<'_, T> {
4120 fn clone(&self) -> Self {
4124 chunk_size: self.chunk_size,
4129 #[stable(feature = "chunks_exact", since = "1.31.0")]
4130 impl<'a, T> Iterator for ChunksExact<'a, T> {
4131 type Item = &'a [T];
4134 fn next(&mut self) -> Option<&'a [T]> {
4135 if self.v.len() < self.chunk_size {
4138 let (fst, snd) = self.v.split_at(self.chunk_size);
4145 fn size_hint(&self) -> (usize, Option<usize>) {
4146 let n = self.v.len() / self.chunk_size;
4151 fn count(self) -> usize {
4156 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4157 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4158 if start >= self.v.len() || overflow {
4162 let (_, snd) = self.v.split_at(start);
4169 fn last(mut self) -> Option<Self::Item> {
4174 #[stable(feature = "chunks_exact", since = "1.31.0")]
4175 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4177 fn next_back(&mut self) -> Option<&'a [T]> {
4178 if self.v.len() < self.chunk_size {
4181 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4188 #[stable(feature = "chunks_exact", since = "1.31.0")]
4189 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4190 fn is_empty(&self) -> bool {
4195 #[unstable(feature = "trusted_len", issue = "37572")]
4196 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4198 #[stable(feature = "chunks_exact", since = "1.31.0")]
4199 impl<T> FusedIterator for ChunksExact<'_, T> {}
4202 #[stable(feature = "chunks_exact", since = "1.31.0")]
4203 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4204 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4205 let start = i * self.chunk_size;
4206 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4208 fn may_have_side_effect() -> bool { false }
4211 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4212 /// elements at a time), starting at the beginning of the slice.
4214 /// When the slice len is not evenly divided by the chunk size, the last up to
4215 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4216 /// [`into_remainder`] function from the iterator.
4218 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4220 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4221 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4222 /// [slices]: ../../std/primitive.slice.html
4224 #[stable(feature = "chunks_exact", since = "1.31.0")]
4225 pub struct ChunksExactMut<'a, T:'a> {
4231 impl<'a, T> ChunksExactMut<'a, T> {
4232 /// Return the remainder of the original slice that is not going to be
4233 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4235 #[stable(feature = "chunks_exact", since = "1.31.0")]
4236 pub fn into_remainder(self) -> &'a mut [T] {
4241 #[stable(feature = "chunks_exact", since = "1.31.0")]
4242 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4243 type Item = &'a mut [T];
4246 fn next(&mut self) -> Option<&'a mut [T]> {
4247 if self.v.len() < self.chunk_size {
4250 let tmp = mem::replace(&mut self.v, &mut []);
4251 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4258 fn size_hint(&self) -> (usize, Option<usize>) {
4259 let n = self.v.len() / self.chunk_size;
4264 fn count(self) -> usize {
4269 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4270 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4271 if start >= self.v.len() || overflow {
4275 let tmp = mem::replace(&mut self.v, &mut []);
4276 let (_, snd) = tmp.split_at_mut(start);
4283 fn last(mut self) -> Option<Self::Item> {
4288 #[stable(feature = "chunks_exact", since = "1.31.0")]
4289 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4291 fn next_back(&mut self) -> Option<&'a mut [T]> {
4292 if self.v.len() < self.chunk_size {
4295 let tmp = mem::replace(&mut self.v, &mut []);
4296 let tmp_len = tmp.len();
4297 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4304 #[stable(feature = "chunks_exact", since = "1.31.0")]
4305 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4306 fn is_empty(&self) -> bool {
4311 #[unstable(feature = "trusted_len", issue = "37572")]
4312 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4314 #[stable(feature = "chunks_exact", since = "1.31.0")]
4315 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4318 #[stable(feature = "chunks_exact", since = "1.31.0")]
4319 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4320 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4321 let start = i * self.chunk_size;
4322 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4324 fn may_have_side_effect() -> bool { false }
4327 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4328 /// time), starting at the end of the slice.
4330 /// When the slice len is not evenly divided by the chunk size, the last slice
4331 /// of the iteration will be the remainder.
4333 /// This struct is created by the [`rchunks`] method on [slices].
4335 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4336 /// [slices]: ../../std/primitive.slice.html
4338 #[stable(feature = "rchunks", since = "1.31.0")]
4339 pub struct RChunks<'a, T:'a> {
4344 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4345 #[stable(feature = "rchunks", since = "1.31.0")]
4346 impl<'a, T> Clone for RChunks<'a, T> {
4347 fn clone(&self) -> RChunks<'a, T> {
4350 chunk_size: self.chunk_size,
4355 #[stable(feature = "rchunks", since = "1.31.0")]
4356 impl<'a, T> Iterator for RChunks<'a, T> {
4357 type Item = &'a [T];
4360 fn next(&mut self) -> Option<&'a [T]> {
4361 if self.v.is_empty() {
4364 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4365 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4372 fn size_hint(&self) -> (usize, Option<usize>) {
4373 if self.v.is_empty() {
4376 let n = self.v.len() / self.chunk_size;
4377 let rem = self.v.len() % self.chunk_size;
4378 let n = if rem > 0 { n+1 } else { n };
4384 fn count(self) -> usize {
4389 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4390 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4391 if end >= self.v.len() || overflow {
4395 // Can't underflow because of the check above
4396 let end = self.v.len() - end;
4397 let start = match end.checked_sub(self.chunk_size) {
4401 let nth = &self.v[start..end];
4402 self.v = &self.v[0..start];
4408 fn last(self) -> Option<Self::Item> {
4409 if self.v.is_empty() {
4412 let rem = self.v.len() % self.chunk_size;
4413 let end = if rem == 0 { self.chunk_size } else { rem };
4414 Some(&self.v[0..end])
4419 #[stable(feature = "rchunks", since = "1.31.0")]
4420 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4422 fn next_back(&mut self) -> Option<&'a [T]> {
4423 if self.v.is_empty() {
4426 let remainder = self.v.len() % self.chunk_size;
4427 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4428 let (fst, snd) = self.v.split_at(chunksz);
4435 #[stable(feature = "rchunks", since = "1.31.0")]
4436 impl<'a, T> ExactSizeIterator for RChunks<'a, T> {}
4438 #[unstable(feature = "trusted_len", issue = "37572")]
4439 unsafe impl<'a, T> TrustedLen for RChunks<'a, T> {}
4441 #[stable(feature = "rchunks", since = "1.31.0")]
4442 impl<'a, T> FusedIterator for RChunks<'a, T> {}
4445 #[stable(feature = "rchunks", since = "1.31.0")]
4446 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4447 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4448 let end = self.v.len() - i * self.chunk_size;
4449 let start = match end.checked_sub(self.chunk_size) {
4451 Some(start) => start,
4453 from_raw_parts(self.v.as_ptr().add(start), end - start)
4455 fn may_have_side_effect() -> bool { false }
4458 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4459 /// elements at a time), starting at the end of the slice.
4461 /// When the slice len is not evenly divided by the chunk size, the last slice
4462 /// of the iteration will be the remainder.
4464 /// This struct is created by the [`rchunks_mut`] method on [slices].
4466 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4467 /// [slices]: ../../std/primitive.slice.html
4469 #[stable(feature = "rchunks", since = "1.31.0")]
4470 pub struct RChunksMut<'a, T:'a> {
4475 #[stable(feature = "rchunks", since = "1.31.0")]
4476 impl<'a, T> Iterator for RChunksMut<'a, T> {
4477 type Item = &'a mut [T];
4480 fn next(&mut self) -> Option<&'a mut [T]> {
4481 if self.v.is_empty() {
4484 let sz = cmp::min(self.v.len(), self.chunk_size);
4485 let tmp = mem::replace(&mut self.v, &mut []);
4486 let tmp_len = tmp.len();
4487 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4494 fn size_hint(&self) -> (usize, Option<usize>) {
4495 if self.v.is_empty() {
4498 let n = self.v.len() / self.chunk_size;
4499 let rem = self.v.len() % self.chunk_size;
4500 let n = if rem > 0 { n + 1 } else { n };
4506 fn count(self) -> usize {
4511 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4512 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4513 if end >= self.v.len() || overflow {
4517 // Can't underflow because of the check above
4518 let end = self.v.len() - end;
4519 let start = match end.checked_sub(self.chunk_size) {
4523 let tmp = mem::replace(&mut self.v, &mut []);
4524 let (head, tail) = tmp.split_at_mut(start);
4525 let (nth, _) = tail.split_at_mut(end - start);
4532 fn last(self) -> Option<Self::Item> {
4533 if self.v.is_empty() {
4536 let rem = self.v.len() % self.chunk_size;
4537 let end = if rem == 0 { self.chunk_size } else { rem };
4538 Some(&mut self.v[0..end])
4543 #[stable(feature = "rchunks", since = "1.31.0")]
4544 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
4546 fn next_back(&mut self) -> Option<&'a mut [T]> {
4547 if self.v.is_empty() {
4550 let remainder = self.v.len() % self.chunk_size;
4551 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4552 let tmp = mem::replace(&mut self.v, &mut []);
4553 let (head, tail) = tmp.split_at_mut(sz);
4560 #[stable(feature = "rchunks", since = "1.31.0")]
4561 impl<'a, T> ExactSizeIterator for RChunksMut<'a, T> {}
4563 #[unstable(feature = "trusted_len", issue = "37572")]
4564 unsafe impl<'a, T> TrustedLen for RChunksMut<'a, T> {}
4566 #[stable(feature = "rchunks", since = "1.31.0")]
4567 impl<'a, T> FusedIterator for RChunksMut<'a, T> {}
4570 #[stable(feature = "rchunks", since = "1.31.0")]
4571 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
4572 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4573 let end = self.v.len() - i * self.chunk_size;
4574 let start = match end.checked_sub(self.chunk_size) {
4576 Some(start) => start,
4578 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4580 fn may_have_side_effect() -> bool { false }
4583 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4584 /// time), starting at the end of the slice.
4586 /// When the slice len is not evenly divided by the chunk size, the last
4587 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4588 /// the [`remainder`] function from the iterator.
4590 /// This struct is created by the [`rchunks_exact`] method on [slices].
4592 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
4593 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4594 /// [slices]: ../../std/primitive.slice.html
4596 #[stable(feature = "rchunks", since = "1.31.0")]
4597 pub struct RChunksExact<'a, T:'a> {
4603 impl<'a, T> RChunksExact<'a, T> {
4604 /// Return the remainder of the original slice that is not going to be
4605 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4607 #[stable(feature = "rchunks", since = "1.31.0")]
4608 pub fn remainder(&self) -> &'a [T] {
4613 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4614 #[stable(feature = "rchunks", since = "1.31.0")]
4615 impl<'a, T> Clone for RChunksExact<'a, T> {
4616 fn clone(&self) -> RChunksExact<'a, T> {
4620 chunk_size: self.chunk_size,
4625 #[stable(feature = "rchunks", since = "1.31.0")]
4626 impl<'a, T> Iterator for RChunksExact<'a, T> {
4627 type Item = &'a [T];
4630 fn next(&mut self) -> Option<&'a [T]> {
4631 if self.v.len() < self.chunk_size {
4634 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4641 fn size_hint(&self) -> (usize, Option<usize>) {
4642 let n = self.v.len() / self.chunk_size;
4647 fn count(self) -> usize {
4652 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4653 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4654 if end >= self.v.len() || overflow {
4658 let (fst, _) = self.v.split_at(self.v.len() - end);
4665 fn last(mut self) -> Option<Self::Item> {
4670 #[stable(feature = "rchunks", since = "1.31.0")]
4671 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
4673 fn next_back(&mut self) -> Option<&'a [T]> {
4674 if self.v.len() < self.chunk_size {
4677 let (fst, snd) = self.v.split_at(self.chunk_size);
4684 #[stable(feature = "rchunks", since = "1.31.0")]
4685 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
4686 fn is_empty(&self) -> bool {
4691 #[unstable(feature = "trusted_len", issue = "37572")]
4692 unsafe impl<'a, T> TrustedLen for RChunksExact<'a, T> {}
4694 #[stable(feature = "rchunks", since = "1.31.0")]
4695 impl<'a, T> FusedIterator for RChunksExact<'a, T> {}
4698 #[stable(feature = "rchunks", since = "1.31.0")]
4699 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
4700 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4701 let end = self.v.len() - i * self.chunk_size;
4702 let start = end - self.chunk_size;
4703 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4705 fn may_have_side_effect() -> bool { false }
4708 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4709 /// elements at a time), starting at the end of the slice.
4711 /// When the slice len is not evenly divided by the chunk size, the last up to
4712 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4713 /// [`into_remainder`] function from the iterator.
4715 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
4717 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
4718 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4719 /// [slices]: ../../std/primitive.slice.html
4721 #[stable(feature = "rchunks", since = "1.31.0")]
4722 pub struct RChunksExactMut<'a, T:'a> {
4728 impl<'a, T> RChunksExactMut<'a, T> {
4729 /// Return the remainder of the original slice that is not going to be
4730 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4732 #[stable(feature = "rchunks", since = "1.31.0")]
4733 pub fn into_remainder(self) -> &'a mut [T] {
4738 #[stable(feature = "rchunks", since = "1.31.0")]
4739 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
4740 type Item = &'a mut [T];
4743 fn next(&mut self) -> Option<&'a mut [T]> {
4744 if self.v.len() < self.chunk_size {
4747 let tmp = mem::replace(&mut self.v, &mut []);
4748 let tmp_len = tmp.len();
4749 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4756 fn size_hint(&self) -> (usize, Option<usize>) {
4757 let n = self.v.len() / self.chunk_size;
4762 fn count(self) -> usize {
4767 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4768 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4769 if end >= self.v.len() || overflow {
4773 let tmp = mem::replace(&mut self.v, &mut []);
4774 let tmp_len = tmp.len();
4775 let (fst, _) = tmp.split_at_mut(tmp_len - end);
4782 fn last(mut self) -> Option<Self::Item> {
4787 #[stable(feature = "rchunks", since = "1.31.0")]
4788 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
4790 fn next_back(&mut self) -> Option<&'a mut [T]> {
4791 if self.v.len() < self.chunk_size {
4794 let tmp = mem::replace(&mut self.v, &mut []);
4795 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4802 #[stable(feature = "rchunks", since = "1.31.0")]
4803 impl<'a, T> ExactSizeIterator for RChunksExactMut<'a, T> {
4804 fn is_empty(&self) -> bool {
4809 #[unstable(feature = "trusted_len", issue = "37572")]
4810 unsafe impl<'a, T> TrustedLen for RChunksExactMut<'a, T> {}
4812 #[stable(feature = "rchunks", since = "1.31.0")]
4813 impl<'a, T> FusedIterator for RChunksExactMut<'a, T> {}
4816 #[stable(feature = "rchunks", since = "1.31.0")]
4817 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
4818 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4819 let end = self.v.len() - i * self.chunk_size;
4820 let start = end - self.chunk_size;
4821 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4823 fn may_have_side_effect() -> bool { false }
4830 /// Forms a slice from a pointer and a length.
4832 /// The `len` argument is the number of **elements**, not the number of bytes.
4836 /// This function is unsafe as there is no guarantee that the given pointer is
4837 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
4838 /// lifetime for the returned slice.
4840 /// `data` must be non-null and aligned, even for zero-length slices. One
4841 /// reason for this is that enum layout optimizations may rely on references
4842 /// (including slices of any length) being aligned and non-null to distinguish
4843 /// them from other data. You can obtain a pointer that is usable as `data`
4844 /// for zero-length slices using [`NonNull::dangling()`].
4846 /// The total size of the slice must be no larger than `isize::MAX` **bytes**
4847 /// in memory. See the safety documentation of [`pointer::offset`].
4851 /// The lifetime for the returned slice is inferred from its usage. To
4852 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
4853 /// source lifetime is safe in the context, such as by providing a helper
4854 /// function taking the lifetime of a host value for the slice, or by explicit
4862 /// // manifest a slice for a single element
4864 /// let ptr = &x as *const _;
4865 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
4866 /// assert_eq!(slice[0], 42);
4869 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
4870 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
4872 #[stable(feature = "rust1", since = "1.0.0")]
4873 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
4874 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4875 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4876 "attempt to create slice covering half the address space");
4877 Repr { raw: FatPtr { data, len } }.rust
4880 /// Performs the same functionality as [`from_raw_parts`], except that a
4881 /// mutable slice is returned.
4883 /// This function is unsafe for the same reasons as [`from_raw_parts`], as well
4884 /// as not being able to provide a non-aliasing guarantee of the returned
4885 /// mutable slice. `data` must be non-null and aligned even for zero-length
4886 /// slices as with [`from_raw_parts`]. The total size of the slice must be no
4887 /// larger than `isize::MAX` **bytes** in memory.
4889 /// See the documentation of [`from_raw_parts`] for more details.
4891 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
4893 #[stable(feature = "rust1", since = "1.0.0")]
4894 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
4895 debug_assert!(data as usize % mem::align_of::<T>() == 0, "attempt to create unaligned slice");
4896 debug_assert!(mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
4897 "attempt to create slice covering half the address space");
4898 Repr { raw: FatPtr { data, len } }.rust_mut
4901 /// Converts a reference to T into a slice of length 1 (without copying).
4902 #[stable(feature = "from_ref", since = "1.28.0")]
4903 pub fn from_ref<T>(s: &T) -> &[T] {
4905 from_raw_parts(s, 1)
4909 /// Converts a reference to T into a slice of length 1 (without copying).
4910 #[stable(feature = "from_ref", since = "1.28.0")]
4911 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
4913 from_raw_parts_mut(s, 1)
4917 // This function is public only because there is no other way to unit test heapsort.
4918 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")]
4920 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
4921 where F: FnMut(&T, &T) -> bool
4923 sort::heapsort(v, &mut is_less);
4927 // Comparison traits
4931 /// Calls implementation provided memcmp.
4933 /// Interprets the data as u8.
4935 /// Returns 0 for equal, < 0 for less than and > 0 for greater
4937 // FIXME(#32610): Return type should be c_int
4938 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
4941 #[stable(feature = "rust1", since = "1.0.0")]
4942 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
4943 fn eq(&self, other: &[B]) -> bool {
4944 SlicePartialEq::equal(self, other)
4947 fn ne(&self, other: &[B]) -> bool {
4948 SlicePartialEq::not_equal(self, other)
4952 #[stable(feature = "rust1", since = "1.0.0")]
4953 impl<T: Eq> Eq for [T] {}
4955 /// Implements comparison of vectors lexicographically.
4956 #[stable(feature = "rust1", since = "1.0.0")]
4957 impl<T: Ord> Ord for [T] {
4958 fn cmp(&self, other: &[T]) -> Ordering {
4959 SliceOrd::compare(self, other)
4963 /// Implements comparison of vectors lexicographically.
4964 #[stable(feature = "rust1", since = "1.0.0")]
4965 impl<T: PartialOrd> PartialOrd for [T] {
4966 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
4967 SlicePartialOrd::partial_compare(self, other)
4972 // intermediate trait for specialization of slice's PartialEq
4973 trait SlicePartialEq<B> {
4974 fn equal(&self, other: &[B]) -> bool;
4976 fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) }
4979 // Generic slice equality
4980 impl<A, B> SlicePartialEq<B> for [A]
4981 where A: PartialEq<B>
4983 default fn equal(&self, other: &[B]) -> bool {
4984 if self.len() != other.len() {
4988 for i in 0..self.len() {
4989 if !self[i].eq(&other[i]) {
4998 // Use memcmp for bytewise equality when the types allow
4999 impl<A> SlicePartialEq<A> for [A]
5000 where A: PartialEq<A> + BytewiseEquality
5002 fn equal(&self, other: &[A]) -> bool {
5003 if self.len() != other.len() {
5006 if self.as_ptr() == other.as_ptr() {
5010 let size = mem::size_of_val(self);
5011 memcmp(self.as_ptr() as *const u8,
5012 other.as_ptr() as *const u8, size) == 0
5018 // intermediate trait for specialization of slice's PartialOrd
5019 trait SlicePartialOrd<B> {
5020 fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
5023 impl<A> SlicePartialOrd<A> for [A]
5026 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5027 let l = cmp::min(self.len(), other.len());
5029 // Slice to the loop iteration range to enable bound check
5030 // elimination in the compiler
5031 let lhs = &self[..l];
5032 let rhs = &other[..l];
5035 match lhs[i].partial_cmp(&rhs[i]) {
5036 Some(Ordering::Equal) => (),
5037 non_eq => return non_eq,
5041 self.len().partial_cmp(&other.len())
5045 impl<A> SlicePartialOrd<A> for [A]
5048 default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
5049 Some(SliceOrd::compare(self, other))
5054 // intermediate trait for specialization of slice's Ord
5056 fn compare(&self, other: &[B]) -> Ordering;
5059 impl<A> SliceOrd<A> for [A]
5062 default fn compare(&self, other: &[A]) -> Ordering {
5063 let l = cmp::min(self.len(), other.len());
5065 // Slice to the loop iteration range to enable bound check
5066 // elimination in the compiler
5067 let lhs = &self[..l];
5068 let rhs = &other[..l];
5071 match lhs[i].cmp(&rhs[i]) {
5072 Ordering::Equal => (),
5073 non_eq => return non_eq,
5077 self.len().cmp(&other.len())
5081 // memcmp compares a sequence of unsigned bytes lexicographically.
5082 // this matches the order we want for [u8], but no others (not even [i8]).
5083 impl SliceOrd<u8> for [u8] {
5085 fn compare(&self, other: &[u8]) -> Ordering {
5086 let order = unsafe {
5087 memcmp(self.as_ptr(), other.as_ptr(),
5088 cmp::min(self.len(), other.len()))
5091 self.len().cmp(&other.len())
5092 } else if order < 0 {
5101 /// Trait implemented for types that can be compared for equality using
5102 /// their bytewise representation
5103 trait BytewiseEquality { }
5105 macro_rules! impl_marker_for {
5106 ($traitname:ident, $($ty:ty)*) => {
5108 impl $traitname for $ty { }
5113 impl_marker_for!(BytewiseEquality,
5114 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
5117 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5118 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5121 fn may_have_side_effect() -> bool { false }
5125 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5126 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5127 &mut *self.ptr.add(i)
5129 fn may_have_side_effect() -> bool { false }
5132 trait SliceContains: Sized {
5133 fn slice_contains(&self, x: &[Self]) -> bool;
5136 impl<T> SliceContains for T where T: PartialEq {
5137 default fn slice_contains(&self, x: &[Self]) -> bool {
5138 x.iter().any(|y| *y == *self)
5142 impl SliceContains for u8 {
5143 fn slice_contains(&self, x: &[Self]) -> bool {
5144 memchr::memchr(*self, x).is_some()
5148 impl SliceContains for i8 {
5149 fn slice_contains(&self, x: &[Self]) -> bool {
5150 let byte = *self as u8;
5151 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5152 memchr::memchr(byte, bytes).is_some()