1 // ignore-tidy-filelength
2 // ignore-tidy-undocumented-unsafe
4 //! Slice management and manipulation.
6 //! For more details see [`std::slice`].
8 //! [`std::slice`]: ../../std/slice/index.html
10 #![stable(feature = "rust1", since = "1.0.0")]
12 // How this module is organized.
14 // The library infrastructure for slices is fairly messy. There's
15 // a lot of stuff defined here. Let's keep it clean.
17 // The layout of this file is thus:
19 // * Inherent methods. This is where most of the slice API resides.
20 // * Implementations of a few common traits with important slice ops.
21 // * Definitions of a bunch of iterators.
23 // * The `raw` and `bytes` submodules.
24 // * Boilerplate trait implementations.
27 use crate::cmp::Ordering::{self, Equal, Greater, Less};
29 use crate::intrinsics::{assume, exact_div, is_aligned_and_not_null, unchecked_sub};
32 use crate::marker::{self, Copy, Send, Sized, Sync};
34 use crate::ops::{self, FnMut, Range};
35 use crate::option::Option;
36 use crate::option::Option::{None, Some};
37 use crate::ptr::{self, NonNull};
38 use crate::result::Result;
39 use crate::result::Result::{Err, Ok};
42 feature = "slice_internals",
44 reason = "exposed from core to be reused in std; use the memchr crate"
46 /// Pure rust memchr implementation, taken from rust-memchr
59 /// Returns the number of elements in the slice.
64 /// let a = [1, 2, 3];
65 /// assert_eq!(a.len(), 3);
67 #[stable(feature = "rust1", since = "1.0.0")]
68 #[rustc_const_stable(feature = "const_slice_len", since = "1.32.0")]
70 // SAFETY: const sound because we transmute out the length field as a usize (which it must be)
71 #[allow(unused_attributes)]
72 #[allow_internal_unstable(const_fn_union)]
73 pub const fn len(&self) -> usize {
74 unsafe { crate::ptr::Repr { rust: self }.raw.len }
77 /// Returns `true` if the slice has a length of 0.
82 /// let a = [1, 2, 3];
83 /// assert!(!a.is_empty());
85 #[stable(feature = "rust1", since = "1.0.0")]
86 #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.32.0")]
88 pub const fn is_empty(&self) -> bool {
92 /// Returns the first element of the slice, or `None` if it is empty.
97 /// let v = [10, 40, 30];
98 /// assert_eq!(Some(&10), v.first());
100 /// let w: &[i32] = &[];
101 /// assert_eq!(None, w.first());
103 #[stable(feature = "rust1", since = "1.0.0")]
105 pub fn first(&self) -> Option<&T> {
109 /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
114 /// let x = &mut [0, 1, 2];
116 /// if let Some(first) = x.first_mut() {
119 /// assert_eq!(x, &[5, 1, 2]);
121 #[stable(feature = "rust1", since = "1.0.0")]
123 pub fn first_mut(&mut self) -> Option<&mut T> {
127 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
132 /// let x = &[0, 1, 2];
134 /// if let Some((first, elements)) = x.split_first() {
135 /// assert_eq!(first, &0);
136 /// assert_eq!(elements, &[1, 2]);
139 #[stable(feature = "slice_splits", since = "1.5.0")]
141 pub fn split_first(&self) -> Option<(&T, &[T])> {
142 if self.is_empty() { None } else { Some((&self[0], &self[1..])) }
145 /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
150 /// let x = &mut [0, 1, 2];
152 /// if let Some((first, elements)) = x.split_first_mut() {
157 /// assert_eq!(x, &[3, 4, 5]);
159 #[stable(feature = "slice_splits", since = "1.5.0")]
161 pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
165 let split = self.split_at_mut(1);
166 Some((&mut split.0[0], split.1))
170 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
175 /// let x = &[0, 1, 2];
177 /// if let Some((last, elements)) = x.split_last() {
178 /// assert_eq!(last, &2);
179 /// assert_eq!(elements, &[0, 1]);
182 #[stable(feature = "slice_splits", since = "1.5.0")]
184 pub fn split_last(&self) -> Option<(&T, &[T])> {
185 let len = self.len();
186 if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) }
189 /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
194 /// let x = &mut [0, 1, 2];
196 /// if let Some((last, elements)) = x.split_last_mut() {
201 /// assert_eq!(x, &[4, 5, 3]);
203 #[stable(feature = "slice_splits", since = "1.5.0")]
205 pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
206 let len = self.len();
210 let split = self.split_at_mut(len - 1);
211 Some((&mut split.1[0], split.0))
215 /// Returns the last element of the slice, or `None` if it is empty.
220 /// let v = [10, 40, 30];
221 /// assert_eq!(Some(&30), v.last());
223 /// let w: &[i32] = &[];
224 /// assert_eq!(None, w.last());
226 #[stable(feature = "rust1", since = "1.0.0")]
228 pub fn last(&self) -> Option<&T> {
229 let last_idx = self.len().checked_sub(1)?;
233 /// Returns a mutable pointer to the last item in the slice.
238 /// let x = &mut [0, 1, 2];
240 /// if let Some(last) = x.last_mut() {
243 /// assert_eq!(x, &[0, 1, 10]);
245 #[stable(feature = "rust1", since = "1.0.0")]
247 pub fn last_mut(&mut self) -> Option<&mut T> {
248 let last_idx = self.len().checked_sub(1)?;
249 self.get_mut(last_idx)
252 /// Returns a reference to an element or subslice depending on the type of
255 /// - If given a position, returns a reference to the element at that
256 /// position or `None` if out of bounds.
257 /// - If given a range, returns the subslice corresponding to that range,
258 /// or `None` if out of bounds.
263 /// let v = [10, 40, 30];
264 /// assert_eq!(Some(&40), v.get(1));
265 /// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
266 /// assert_eq!(None, v.get(3));
267 /// assert_eq!(None, v.get(0..4));
269 #[stable(feature = "rust1", since = "1.0.0")]
271 pub fn get<I>(&self, index: I) -> Option<&I::Output>
278 /// Returns a mutable reference to an element or subslice depending on the
279 /// type of index (see [`get`]) or `None` if the index is out of bounds.
281 /// [`get`]: #method.get
286 /// let x = &mut [0, 1, 2];
288 /// if let Some(elem) = x.get_mut(1) {
291 /// assert_eq!(x, &[0, 42, 2]);
293 #[stable(feature = "rust1", since = "1.0.0")]
295 pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
302 /// Returns a reference to an element or subslice, without doing bounds
305 /// This is generally not recommended, use with caution!
306 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
307 /// even if the resulting reference is not used.
308 /// For a safe alternative see [`get`].
310 /// [`get`]: #method.get
311 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
316 /// let x = &[1, 2, 4];
319 /// assert_eq!(x.get_unchecked(1), &2);
322 #[stable(feature = "rust1", since = "1.0.0")]
324 pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
328 index.get_unchecked(self)
331 /// Returns a mutable reference to an element or subslice, without doing
334 /// This is generally not recommended, use with caution!
335 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
336 /// even if the resulting reference is not used.
337 /// For a safe alternative see [`get_mut`].
339 /// [`get_mut`]: #method.get_mut
340 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
345 /// let x = &mut [1, 2, 4];
348 /// let elem = x.get_unchecked_mut(1);
351 /// assert_eq!(x, &[1, 13, 4]);
353 #[stable(feature = "rust1", since = "1.0.0")]
355 pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
359 index.get_unchecked_mut(self)
362 /// Returns a raw pointer to the slice's buffer.
364 /// The caller must ensure that the slice outlives the pointer this
365 /// function returns, or else it will end up pointing to garbage.
367 /// The caller must also ensure that the memory the pointer (non-transitively) points to
368 /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
369 /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
371 /// Modifying the container referenced by this slice may cause its buffer
372 /// to be reallocated, which would also make any pointers to it invalid.
377 /// let x = &[1, 2, 4];
378 /// let x_ptr = x.as_ptr();
381 /// for i in 0..x.len() {
382 /// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
387 /// [`as_mut_ptr`]: #method.as_mut_ptr
388 #[stable(feature = "rust1", since = "1.0.0")]
389 #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")]
391 pub const fn as_ptr(&self) -> *const T {
392 self as *const [T] as *const T
395 /// Returns an unsafe mutable pointer to the slice's buffer.
397 /// The caller must ensure that the slice outlives the pointer this
398 /// function returns, or else it will end up pointing to garbage.
400 /// Modifying the container referenced by this slice may cause its buffer
401 /// to be reallocated, which would also make any pointers to it invalid.
406 /// let x = &mut [1, 2, 4];
407 /// let x_ptr = x.as_mut_ptr();
410 /// for i in 0..x.len() {
411 /// *x_ptr.add(i) += 2;
414 /// assert_eq!(x, &[3, 4, 6]);
416 #[stable(feature = "rust1", since = "1.0.0")]
418 pub fn as_mut_ptr(&mut self) -> *mut T {
419 self as *mut [T] as *mut T
422 /// Returns the two raw pointers spanning the slice.
424 /// The returned range is half-open, which means that the end pointer
425 /// points *one past* the last element of the slice. This way, an empty
426 /// slice is represented by two equal pointers, and the difference between
427 /// the two pointers represents the size of the size.
429 /// See [`as_ptr`] for warnings on using these pointers. The end pointer
430 /// requires extra caution, as it does not point to a valid element in the
433 /// This function is useful for interacting with foreign interfaces which
434 /// use two pointers to refer to a range of elements in memory, as is
437 /// It can also be useful to check if a pointer to an element refers to an
438 /// element of this slice:
441 /// #![feature(slice_ptr_range)]
443 /// let a = [1, 2, 3];
444 /// let x = &a[1] as *const _;
445 /// let y = &5 as *const _;
447 /// assert!(a.as_ptr_range().contains(&x));
448 /// assert!(!a.as_ptr_range().contains(&y));
451 /// [`as_ptr`]: #method.as_ptr
452 #[unstable(feature = "slice_ptr_range", issue = "65807")]
454 pub fn as_ptr_range(&self) -> Range<*const T> {
455 // The `add` here is safe, because:
457 // - Both pointers are part of the same object, as pointing directly
458 // past the object also counts.
460 // - The size of the slice is never larger than isize::MAX bytes, as
462 // - https://github.com/rust-lang/unsafe-code-guidelines/issues/102#issuecomment-473340447
463 // - https://doc.rust-lang.org/reference/behavior-considered-undefined.html
464 // - https://doc.rust-lang.org/core/slice/fn.from_raw_parts.html#safety
465 // (This doesn't seem normative yet, but the very same assumption is
466 // made in many places, including the Index implementation of slices.)
468 // - There is no wrapping around involved, as slices do not wrap past
469 // the end of the address space.
471 // See the documentation of pointer::add.
472 let start = self.as_ptr();
473 let end = unsafe { start.add(self.len()) };
477 /// Returns the two unsafe mutable pointers spanning the slice.
479 /// The returned range is half-open, which means that the end pointer
480 /// points *one past* the last element of the slice. This way, an empty
481 /// slice is represented by two equal pointers, and the difference between
482 /// the two pointers represents the size of the size.
484 /// See [`as_mut_ptr`] for warnings on using these pointers. The end
485 /// pointer requires extra caution, as it does not point to a valid element
488 /// This function is useful for interacting with foreign interfaces which
489 /// use two pointers to refer to a range of elements in memory, as is
492 /// [`as_mut_ptr`]: #method.as_mut_ptr
493 #[unstable(feature = "slice_ptr_range", issue = "65807")]
495 pub fn as_mut_ptr_range(&mut self) -> Range<*mut T> {
496 // See as_ptr_range() above for why `add` here is safe.
497 let start = self.as_mut_ptr();
498 let end = unsafe { start.add(self.len()) };
502 /// Swaps two elements in the slice.
506 /// * a - The index of the first element
507 /// * b - The index of the second element
511 /// Panics if `a` or `b` are out of bounds.
516 /// let mut v = ["a", "b", "c", "d"];
518 /// assert!(v == ["a", "d", "c", "b"]);
520 #[stable(feature = "rust1", since = "1.0.0")]
522 pub fn swap(&mut self, a: usize, b: usize) {
524 // Can't take two mutable loans from one vector, so instead just cast
525 // them to their raw pointers to do the swap
526 let pa: *mut T = &mut self[a];
527 let pb: *mut T = &mut self[b];
532 /// Reverses the order of elements in the slice, in place.
537 /// let mut v = [1, 2, 3];
539 /// assert!(v == [3, 2, 1]);
541 #[stable(feature = "rust1", since = "1.0.0")]
543 pub fn reverse(&mut self) {
544 let mut i: usize = 0;
547 // For very small types, all the individual reads in the normal
548 // path perform poorly. We can do better, given efficient unaligned
549 // load/store, by loading a larger chunk and reversing a register.
551 // Ideally LLVM would do this for us, as it knows better than we do
552 // whether unaligned reads are efficient (since that changes between
553 // different ARM versions, for example) and what the best chunk size
554 // would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
555 // the loop, so we need to do this ourselves. (Hypothesis: reverse
556 // is troublesome because the sides can be aligned differently --
557 // will be, when the length is odd -- so there's no way of emitting
558 // pre- and postludes to use fully-aligned SIMD in the middle.)
560 let fast_unaligned = cfg!(any(target_arch = "x86", target_arch = "x86_64"));
562 if fast_unaligned && mem::size_of::<T>() == 1 {
563 // Use the llvm.bswap intrinsic to reverse u8s in a usize
564 let chunk = mem::size_of::<usize>();
565 while i + chunk - 1 < ln / 2 {
567 let pa: *mut T = self.get_unchecked_mut(i);
568 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
569 let va = ptr::read_unaligned(pa as *mut usize);
570 let vb = ptr::read_unaligned(pb as *mut usize);
571 ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
572 ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
578 if fast_unaligned && mem::size_of::<T>() == 2 {
579 // Use rotate-by-16 to reverse u16s in a u32
580 let chunk = mem::size_of::<u32>() / 2;
581 while i + chunk - 1 < ln / 2 {
583 let pa: *mut T = self.get_unchecked_mut(i);
584 let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
585 let va = ptr::read_unaligned(pa as *mut u32);
586 let vb = ptr::read_unaligned(pb as *mut u32);
587 ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
588 ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
595 // Unsafe swap to avoid the bounds check in safe swap.
597 let pa: *mut T = self.get_unchecked_mut(i);
598 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
605 /// Returns an iterator over the slice.
610 /// let x = &[1, 2, 4];
611 /// let mut iterator = x.iter();
613 /// assert_eq!(iterator.next(), Some(&1));
614 /// assert_eq!(iterator.next(), Some(&2));
615 /// assert_eq!(iterator.next(), Some(&4));
616 /// assert_eq!(iterator.next(), None);
618 #[stable(feature = "rust1", since = "1.0.0")]
620 pub fn iter(&self) -> Iter<'_, T> {
622 let ptr = self.as_ptr();
623 assume(!ptr.is_null());
625 let end = if mem::size_of::<T>() == 0 {
626 (ptr as *const u8).wrapping_add(self.len()) as *const T
631 Iter { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: marker::PhantomData }
635 /// Returns an iterator that allows modifying each value.
640 /// let x = &mut [1, 2, 4];
641 /// for elem in x.iter_mut() {
644 /// assert_eq!(x, &[3, 4, 6]);
646 #[stable(feature = "rust1", since = "1.0.0")]
648 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
650 let ptr = self.as_mut_ptr();
651 assume(!ptr.is_null());
653 let end = if mem::size_of::<T>() == 0 {
654 (ptr as *mut u8).wrapping_add(self.len()) as *mut T
659 IterMut { ptr: NonNull::new_unchecked(ptr), end, _marker: marker::PhantomData }
663 /// Returns an iterator over all contiguous windows of length
664 /// `size`. The windows overlap. If the slice is shorter than
665 /// `size`, the iterator returns no values.
669 /// Panics if `size` is 0.
674 /// let slice = ['r', 'u', 's', 't'];
675 /// let mut iter = slice.windows(2);
676 /// assert_eq!(iter.next().unwrap(), &['r', 'u']);
677 /// assert_eq!(iter.next().unwrap(), &['u', 's']);
678 /// assert_eq!(iter.next().unwrap(), &['s', 't']);
679 /// assert!(iter.next().is_none());
682 /// If the slice is shorter than `size`:
685 /// let slice = ['f', 'o', 'o'];
686 /// let mut iter = slice.windows(4);
687 /// assert!(iter.next().is_none());
689 #[stable(feature = "rust1", since = "1.0.0")]
691 pub fn windows(&self, size: usize) -> Windows<'_, T> {
693 Windows { v: self, size }
696 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
697 /// beginning of the slice.
699 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
700 /// slice, then the last chunk will not have length `chunk_size`.
702 /// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
703 /// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
708 /// Panics if `chunk_size` is 0.
713 /// let slice = ['l', 'o', 'r', 'e', 'm'];
714 /// let mut iter = slice.chunks(2);
715 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
716 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
717 /// assert_eq!(iter.next().unwrap(), &['m']);
718 /// assert!(iter.next().is_none());
721 /// [`chunks_exact`]: #method.chunks_exact
722 /// [`rchunks`]: #method.rchunks
723 #[stable(feature = "rust1", since = "1.0.0")]
725 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
726 assert!(chunk_size != 0);
727 Chunks { v: self, chunk_size }
730 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
731 /// beginning of the slice.
733 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
734 /// length of the slice, then the last chunk will not have length `chunk_size`.
736 /// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
737 /// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
738 /// the end of the slice.
742 /// Panics if `chunk_size` is 0.
747 /// let v = &mut [0, 0, 0, 0, 0];
748 /// let mut count = 1;
750 /// for chunk in v.chunks_mut(2) {
751 /// for elem in chunk.iter_mut() {
756 /// assert_eq!(v, &[1, 1, 2, 2, 3]);
759 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
760 /// [`rchunks_mut`]: #method.rchunks_mut
761 #[stable(feature = "rust1", since = "1.0.0")]
763 pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> {
764 assert!(chunk_size != 0);
765 ChunksMut { v: self, chunk_size }
768 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
769 /// beginning of the slice.
771 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
772 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
773 /// from the `remainder` function of the iterator.
775 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
776 /// resulting code better than in the case of [`chunks`].
778 /// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
779 /// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
783 /// Panics if `chunk_size` is 0.
788 /// let slice = ['l', 'o', 'r', 'e', 'm'];
789 /// let mut iter = slice.chunks_exact(2);
790 /// assert_eq!(iter.next().unwrap(), &['l', 'o']);
791 /// assert_eq!(iter.next().unwrap(), &['r', 'e']);
792 /// assert!(iter.next().is_none());
793 /// assert_eq!(iter.remainder(), &['m']);
796 /// [`chunks`]: #method.chunks
797 /// [`rchunks_exact`]: #method.rchunks_exact
798 #[stable(feature = "chunks_exact", since = "1.31.0")]
800 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
801 assert!(chunk_size != 0);
802 let rem = self.len() % chunk_size;
803 let len = self.len() - rem;
804 let (fst, snd) = self.split_at(len);
805 ChunksExact { v: fst, rem: snd, chunk_size }
808 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
809 /// beginning of the slice.
811 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
812 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
813 /// retrieved from the `into_remainder` function of the iterator.
815 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
816 /// resulting code better than in the case of [`chunks_mut`].
818 /// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
819 /// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
824 /// Panics if `chunk_size` is 0.
829 /// let v = &mut [0, 0, 0, 0, 0];
830 /// let mut count = 1;
832 /// for chunk in v.chunks_exact_mut(2) {
833 /// for elem in chunk.iter_mut() {
838 /// assert_eq!(v, &[1, 1, 2, 2, 0]);
841 /// [`chunks_mut`]: #method.chunks_mut
842 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
843 #[stable(feature = "chunks_exact", since = "1.31.0")]
845 pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> {
846 assert!(chunk_size != 0);
847 let rem = self.len() % chunk_size;
848 let len = self.len() - rem;
849 let (fst, snd) = self.split_at_mut(len);
850 ChunksExactMut { v: fst, rem: snd, chunk_size }
853 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
856 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
857 /// slice, then the last chunk will not have length `chunk_size`.
859 /// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
860 /// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
865 /// Panics if `chunk_size` is 0.
870 /// let slice = ['l', 'o', 'r', 'e', 'm'];
871 /// let mut iter = slice.rchunks(2);
872 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
873 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
874 /// assert_eq!(iter.next().unwrap(), &['l']);
875 /// assert!(iter.next().is_none());
878 /// [`rchunks_exact`]: #method.rchunks_exact
879 /// [`chunks`]: #method.chunks
880 #[stable(feature = "rchunks", since = "1.31.0")]
882 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
883 assert!(chunk_size != 0);
884 RChunks { v: self, chunk_size }
887 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
890 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
891 /// length of the slice, then the last chunk will not have length `chunk_size`.
893 /// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
894 /// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
895 /// beginning of the slice.
899 /// Panics if `chunk_size` is 0.
904 /// let v = &mut [0, 0, 0, 0, 0];
905 /// let mut count = 1;
907 /// for chunk in v.rchunks_mut(2) {
908 /// for elem in chunk.iter_mut() {
913 /// assert_eq!(v, &[3, 2, 2, 1, 1]);
916 /// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
917 /// [`chunks_mut`]: #method.chunks_mut
918 #[stable(feature = "rchunks", since = "1.31.0")]
920 pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> {
921 assert!(chunk_size != 0);
922 RChunksMut { v: self, chunk_size }
925 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
926 /// end of the slice.
928 /// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
929 /// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
930 /// from the `remainder` function of the iterator.
932 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
933 /// resulting code better than in the case of [`chunks`].
935 /// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
936 /// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
941 /// Panics if `chunk_size` is 0.
946 /// let slice = ['l', 'o', 'r', 'e', 'm'];
947 /// let mut iter = slice.rchunks_exact(2);
948 /// assert_eq!(iter.next().unwrap(), &['e', 'm']);
949 /// assert_eq!(iter.next().unwrap(), &['o', 'r']);
950 /// assert!(iter.next().is_none());
951 /// assert_eq!(iter.remainder(), &['l']);
954 /// [`chunks`]: #method.chunks
955 /// [`rchunks`]: #method.rchunks
956 /// [`chunks_exact`]: #method.chunks_exact
957 #[stable(feature = "rchunks", since = "1.31.0")]
959 pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> {
960 assert!(chunk_size != 0);
961 let rem = self.len() % chunk_size;
962 let (fst, snd) = self.split_at(rem);
963 RChunksExact { v: snd, rem: fst, chunk_size }
966 /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
969 /// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
970 /// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
971 /// retrieved from the `into_remainder` function of the iterator.
973 /// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
974 /// resulting code better than in the case of [`chunks_mut`].
976 /// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
977 /// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
982 /// Panics if `chunk_size` is 0.
987 /// let v = &mut [0, 0, 0, 0, 0];
988 /// let mut count = 1;
990 /// for chunk in v.rchunks_exact_mut(2) {
991 /// for elem in chunk.iter_mut() {
996 /// assert_eq!(v, &[0, 2, 2, 1, 1]);
999 /// [`chunks_mut`]: #method.chunks_mut
1000 /// [`rchunks_mut`]: #method.rchunks_mut
1001 /// [`chunks_exact_mut`]: #method.chunks_exact_mut
1002 #[stable(feature = "rchunks", since = "1.31.0")]
1004 pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> {
1005 assert!(chunk_size != 0);
1006 let rem = self.len() % chunk_size;
1007 let (fst, snd) = self.split_at_mut(rem);
1008 RChunksExactMut { v: snd, rem: fst, chunk_size }
1011 /// Divides one slice into two at an index.
1013 /// The first will contain all indices from `[0, mid)` (excluding
1014 /// the index `mid` itself) and the second will contain all
1015 /// indices from `[mid, len)` (excluding the index `len` itself).
1019 /// Panics if `mid > len`.
1024 /// let v = [1, 2, 3, 4, 5, 6];
1027 /// let (left, right) = v.split_at(0);
1028 /// assert!(left == []);
1029 /// assert!(right == [1, 2, 3, 4, 5, 6]);
1033 /// let (left, right) = v.split_at(2);
1034 /// assert!(left == [1, 2]);
1035 /// assert!(right == [3, 4, 5, 6]);
1039 /// let (left, right) = v.split_at(6);
1040 /// assert!(left == [1, 2, 3, 4, 5, 6]);
1041 /// assert!(right == []);
1044 #[stable(feature = "rust1", since = "1.0.0")]
1046 pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
1047 (&self[..mid], &self[mid..])
1050 /// Divides one mutable slice into two at an index.
1052 /// The first will contain all indices from `[0, mid)` (excluding
1053 /// the index `mid` itself) and the second will contain all
1054 /// indices from `[mid, len)` (excluding the index `len` itself).
1058 /// Panics if `mid > len`.
1063 /// let mut v = [1, 0, 3, 0, 5, 6];
1064 /// // scoped to restrict the lifetime of the borrows
1066 /// let (left, right) = v.split_at_mut(2);
1067 /// assert!(left == [1, 0]);
1068 /// assert!(right == [3, 0, 5, 6]);
1072 /// assert!(v == [1, 2, 3, 4, 5, 6]);
1074 #[stable(feature = "rust1", since = "1.0.0")]
1076 pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
1077 let len = self.len();
1078 let ptr = self.as_mut_ptr();
1081 assert!(mid <= len);
1083 (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
1087 /// Returns an iterator over subslices separated by elements that match
1088 /// `pred`. The matched element is not contained in the subslices.
1093 /// let slice = [10, 40, 33, 20];
1094 /// let mut iter = slice.split(|num| num % 3 == 0);
1096 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1097 /// assert_eq!(iter.next().unwrap(), &[20]);
1098 /// assert!(iter.next().is_none());
1101 /// If the first element is matched, an empty slice will be the first item
1102 /// returned by the iterator. Similarly, if the last element in the slice
1103 /// is matched, an empty slice will be the last item returned by the
1107 /// let slice = [10, 40, 33];
1108 /// let mut iter = slice.split(|num| num % 3 == 0);
1110 /// assert_eq!(iter.next().unwrap(), &[10, 40]);
1111 /// assert_eq!(iter.next().unwrap(), &[]);
1112 /// assert!(iter.next().is_none());
1115 /// If two matched elements are directly adjacent, an empty slice will be
1116 /// present between them:
1119 /// let slice = [10, 6, 33, 20];
1120 /// let mut iter = slice.split(|num| num % 3 == 0);
1122 /// assert_eq!(iter.next().unwrap(), &[10]);
1123 /// assert_eq!(iter.next().unwrap(), &[]);
1124 /// assert_eq!(iter.next().unwrap(), &[20]);
1125 /// assert!(iter.next().is_none());
1127 #[stable(feature = "rust1", since = "1.0.0")]
1129 pub fn split<F>(&self, pred: F) -> Split<'_, T, F>
1131 F: FnMut(&T) -> bool,
1133 Split { v: self, pred, finished: false }
1136 /// Returns an iterator over mutable subslices separated by elements that
1137 /// match `pred`. The matched element is not contained in the subslices.
1142 /// let mut v = [10, 40, 30, 20, 60, 50];
1144 /// for group in v.split_mut(|num| *num % 3 == 0) {
1147 /// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
1149 #[stable(feature = "rust1", since = "1.0.0")]
1151 pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<'_, T, F>
1153 F: FnMut(&T) -> bool,
1155 SplitMut { v: self, pred, finished: false }
1158 /// Returns an iterator over subslices separated by elements that match
1159 /// `pred`, starting at the end of the slice and working backwards.
1160 /// The matched element is not contained in the subslices.
1165 /// let slice = [11, 22, 33, 0, 44, 55];
1166 /// let mut iter = slice.rsplit(|num| *num == 0);
1168 /// assert_eq!(iter.next().unwrap(), &[44, 55]);
1169 /// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
1170 /// assert_eq!(iter.next(), None);
1173 /// As with `split()`, if the first or last element is matched, an empty
1174 /// slice will be the first (or last) item returned by the iterator.
1177 /// let v = &[0, 1, 1, 2, 3, 5, 8];
1178 /// let mut it = v.rsplit(|n| *n % 2 == 0);
1179 /// assert_eq!(it.next().unwrap(), &[]);
1180 /// assert_eq!(it.next().unwrap(), &[3, 5]);
1181 /// assert_eq!(it.next().unwrap(), &[1, 1]);
1182 /// assert_eq!(it.next().unwrap(), &[]);
1183 /// assert_eq!(it.next(), None);
1185 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1187 pub fn rsplit<F>(&self, pred: F) -> RSplit<'_, T, F>
1189 F: FnMut(&T) -> bool,
1191 RSplit { inner: self.split(pred) }
1194 /// Returns an iterator over mutable subslices separated by elements that
1195 /// match `pred`, starting at the end of the slice and working
1196 /// backwards. The matched element is not contained in the subslices.
1201 /// let mut v = [100, 400, 300, 200, 600, 500];
1203 /// let mut count = 0;
1204 /// for group in v.rsplit_mut(|num| *num % 3 == 0) {
1206 /// group[0] = count;
1208 /// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
1211 #[stable(feature = "slice_rsplit", since = "1.27.0")]
1213 pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<'_, T, F>
1215 F: FnMut(&T) -> bool,
1217 RSplitMut { inner: self.split_mut(pred) }
1220 /// Returns an iterator over subslices separated by elements that match
1221 /// `pred`, limited to returning at most `n` items. The matched element is
1222 /// not contained in the subslices.
1224 /// The last element returned, if any, will contain the remainder of the
1229 /// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
1230 /// `[20, 60, 50]`):
1233 /// let v = [10, 40, 30, 20, 60, 50];
1235 /// for group in v.splitn(2, |num| *num % 3 == 0) {
1236 /// println!("{:?}", group);
1239 #[stable(feature = "rust1", since = "1.0.0")]
1241 pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<'_, T, F>
1243 F: FnMut(&T) -> bool,
1245 SplitN { inner: GenericSplitN { iter: self.split(pred), count: n } }
1248 /// Returns an iterator over subslices separated by elements that match
1249 /// `pred`, limited to returning at most `n` items. The matched element is
1250 /// not contained in the subslices.
1252 /// The last element returned, if any, will contain the remainder of the
1258 /// let mut v = [10, 40, 30, 20, 60, 50];
1260 /// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
1263 /// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
1265 #[stable(feature = "rust1", since = "1.0.0")]
1267 pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F>
1269 F: FnMut(&T) -> bool,
1271 SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n } }
1274 /// Returns an iterator over subslices separated by elements that match
1275 /// `pred` limited to returning at most `n` items. This starts at the end of
1276 /// the slice and works backwards. The matched element is not contained in
1279 /// The last element returned, if any, will contain the remainder of the
1284 /// Print the slice split once, starting from the end, by numbers divisible
1285 /// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
1288 /// let v = [10, 40, 30, 20, 60, 50];
1290 /// for group in v.rsplitn(2, |num| *num % 3 == 0) {
1291 /// println!("{:?}", group);
1294 #[stable(feature = "rust1", since = "1.0.0")]
1296 pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<'_, T, F>
1298 F: FnMut(&T) -> bool,
1300 RSplitN { inner: GenericSplitN { iter: self.rsplit(pred), count: n } }
1303 /// Returns an iterator over subslices separated by elements that match
1304 /// `pred` limited to returning at most `n` items. This starts at the end of
1305 /// the slice and works backwards. The matched element is not contained in
1308 /// The last element returned, if any, will contain the remainder of the
1314 /// let mut s = [10, 40, 30, 20, 60, 50];
1316 /// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
1319 /// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
1321 #[stable(feature = "rust1", since = "1.0.0")]
1323 pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F>
1325 F: FnMut(&T) -> bool,
1327 RSplitNMut { inner: GenericSplitN { iter: self.rsplit_mut(pred), count: n } }
1330 /// Returns `true` if the slice contains an element with the given value.
1335 /// let v = [10, 40, 30];
1336 /// assert!(v.contains(&30));
1337 /// assert!(!v.contains(&50));
1340 /// If you do not have an `&T`, but just an `&U` such that `T: Borrow<U>`
1341 /// (e.g. `String: Borrow<str>`), you can use `iter().any`:
1344 /// let v = [String::from("hello"), String::from("world")]; // slice of `String`
1345 /// assert!(v.iter().any(|e| e == "hello")); // search with `&str`
1346 /// assert!(!v.iter().any(|e| e == "hi"));
1348 #[stable(feature = "rust1", since = "1.0.0")]
1349 pub fn contains(&self, x: &T) -> bool
1353 x.slice_contains(self)
1356 /// Returns `true` if `needle` is a prefix of the slice.
1361 /// let v = [10, 40, 30];
1362 /// assert!(v.starts_with(&[10]));
1363 /// assert!(v.starts_with(&[10, 40]));
1364 /// assert!(!v.starts_with(&[50]));
1365 /// assert!(!v.starts_with(&[10, 50]));
1368 /// Always returns `true` if `needle` is an empty slice:
1371 /// let v = &[10, 40, 30];
1372 /// assert!(v.starts_with(&[]));
1373 /// let v: &[u8] = &[];
1374 /// assert!(v.starts_with(&[]));
1376 #[stable(feature = "rust1", since = "1.0.0")]
1377 pub fn starts_with(&self, needle: &[T]) -> bool
1381 let n = needle.len();
1382 self.len() >= n && needle == &self[..n]
1385 /// Returns `true` if `needle` is a suffix of the slice.
1390 /// let v = [10, 40, 30];
1391 /// assert!(v.ends_with(&[30]));
1392 /// assert!(v.ends_with(&[40, 30]));
1393 /// assert!(!v.ends_with(&[50]));
1394 /// assert!(!v.ends_with(&[50, 30]));
1397 /// Always returns `true` if `needle` is an empty slice:
1400 /// let v = &[10, 40, 30];
1401 /// assert!(v.ends_with(&[]));
1402 /// let v: &[u8] = &[];
1403 /// assert!(v.ends_with(&[]));
1405 #[stable(feature = "rust1", since = "1.0.0")]
1406 pub fn ends_with(&self, needle: &[T]) -> bool
1410 let (m, n) = (self.len(), needle.len());
1411 m >= n && needle == &self[m - n..]
1414 /// Binary searches this sorted slice for a given element.
1416 /// If the value is found then [`Result::Ok`] is returned, containing the
1417 /// index of the matching element. If there are multiple matches, then any
1418 /// one of the matches could be returned. If the value is not found then
1419 /// [`Result::Err`] is returned, containing the index where a matching
1420 /// element could be inserted while maintaining sorted order.
1424 /// Looks up a series of four elements. The first is found, with a
1425 /// uniquely determined position; the second and third are not
1426 /// found; the fourth could match any position in `[1, 4]`.
1429 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1431 /// assert_eq!(s.binary_search(&13), Ok(9));
1432 /// assert_eq!(s.binary_search(&4), Err(7));
1433 /// assert_eq!(s.binary_search(&100), Err(13));
1434 /// let r = s.binary_search(&1);
1435 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1438 /// If you want to insert an item to a sorted vector, while maintaining
1442 /// let mut s = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1444 /// let idx = s.binary_search(&num).unwrap_or_else(|x| x);
1445 /// s.insert(idx, num);
1446 /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]);
1448 #[stable(feature = "rust1", since = "1.0.0")]
1449 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
1453 self.binary_search_by(|p| p.cmp(x))
1456 /// Binary searches this sorted slice with a comparator function.
1458 /// The comparator function should implement an order consistent
1459 /// with the sort order of the underlying slice, returning an
1460 /// order code that indicates whether its argument is `Less`,
1461 /// `Equal` or `Greater` the desired target.
1463 /// If the value is found then [`Result::Ok`] is returned, containing the
1464 /// index of the matching element. If there are multiple matches, then any
1465 /// one of the matches could be returned. If the value is not found then
1466 /// [`Result::Err`] is returned, containing the index where a matching
1467 /// element could be inserted while maintaining sorted order.
1471 /// Looks up a series of four elements. The first is found, with a
1472 /// uniquely determined position; the second and third are not
1473 /// found; the fourth could match any position in `[1, 4]`.
1476 /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
1479 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
1481 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
1483 /// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
1485 /// let r = s.binary_search_by(|probe| probe.cmp(&seek));
1486 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1488 #[stable(feature = "rust1", since = "1.0.0")]
1490 pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
1492 F: FnMut(&'a T) -> Ordering,
1495 let mut size = s.len();
1499 let mut base = 0usize;
1501 let half = size / 2;
1502 let mid = base + half;
1503 // mid is always in [0, size), that means mid is >= 0 and < size.
1504 // mid >= 0: by definition
1505 // mid < size: mid = size / 2 + size / 4 + size / 8 ...
1506 let cmp = f(unsafe { s.get_unchecked(mid) });
1507 base = if cmp == Greater { base } else { mid };
1510 // base is always in [0, size) because base <= mid.
1511 let cmp = f(unsafe { s.get_unchecked(base) });
1512 if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
1515 /// Binary searches this sorted slice with a key extraction function.
1517 /// Assumes that the slice is sorted by the key, for instance with
1518 /// [`sort_by_key`] using the same key extraction function.
1520 /// If the value is found then [`Result::Ok`] is returned, containing the
1521 /// index of the matching element. If there are multiple matches, then any
1522 /// one of the matches could be returned. If the value is not found then
1523 /// [`Result::Err`] is returned, containing the index where a matching
1524 /// element could be inserted while maintaining sorted order.
1526 /// [`sort_by_key`]: #method.sort_by_key
1530 /// Looks up a series of four elements in a slice of pairs sorted by
1531 /// their second elements. The first is found, with a uniquely
1532 /// determined position; the second and third are not found; the
1533 /// fourth could match any position in `[1, 4]`.
1536 /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
1537 /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
1538 /// (1, 21), (2, 34), (4, 55)];
1540 /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
1541 /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
1542 /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
1543 /// let r = s.binary_search_by_key(&1, |&(a,b)| b);
1544 /// assert!(match r { Ok(1..=4) => true, _ => false, });
1546 #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
1548 pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
1550 F: FnMut(&'a T) -> B,
1553 self.binary_search_by(|k| f(k).cmp(b))
1556 /// Sorts the slice, but may not preserve the order of equal elements.
1558 /// This sort is unstable (i.e., may reorder equal elements), in-place
1559 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1561 /// # Current implementation
1563 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1564 /// which combines the fast average case of randomized quicksort with the fast worst case of
1565 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1566 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1567 /// deterministic behavior.
1569 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1570 /// slice consists of several concatenated sorted sequences.
1575 /// let mut v = [-5, 4, 1, -3, 2];
1577 /// v.sort_unstable();
1578 /// assert!(v == [-5, -3, 1, 2, 4]);
1581 /// [pdqsort]: https://github.com/orlp/pdqsort
1582 #[stable(feature = "sort_unstable", since = "1.20.0")]
1584 pub fn sort_unstable(&mut self)
1588 sort::quicksort(self, |a, b| a.lt(b));
1591 /// Sorts the slice with a comparator function, but may not preserve the order of equal
1594 /// This sort is unstable (i.e., may reorder equal elements), in-place
1595 /// (i.e., does not allocate), and `O(n log n)` worst-case.
1597 /// The comparator function must define a total ordering for the elements in the slice. If
1598 /// the ordering is not total, the order of the elements is unspecified. An order is a
1599 /// total order if it is (for all a, b and c):
1601 /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
1602 /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
1604 /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
1605 /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
1608 /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
1609 /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
1610 /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
1613 /// # Current implementation
1615 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1616 /// which combines the fast average case of randomized quicksort with the fast worst case of
1617 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1618 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1619 /// deterministic behavior.
1621 /// It is typically faster than stable sorting, except in a few special cases, e.g., when the
1622 /// slice consists of several concatenated sorted sequences.
1627 /// let mut v = [5, 4, 1, 3, 2];
1628 /// v.sort_unstable_by(|a, b| a.cmp(b));
1629 /// assert!(v == [1, 2, 3, 4, 5]);
1631 /// // reverse sorting
1632 /// v.sort_unstable_by(|a, b| b.cmp(a));
1633 /// assert!(v == [5, 4, 3, 2, 1]);
1636 /// [pdqsort]: https://github.com/orlp/pdqsort
1637 #[stable(feature = "sort_unstable", since = "1.20.0")]
1639 pub fn sort_unstable_by<F>(&mut self, mut compare: F)
1641 F: FnMut(&T, &T) -> Ordering,
1643 sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
1646 /// Sorts the slice with a key extraction function, but may not preserve the order of equal
1649 /// This sort is unstable (i.e., may reorder equal elements), in-place
1650 /// (i.e., does not allocate), and `O(m n log(m n))` worst-case, where the key function is
1653 /// # Current implementation
1655 /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
1656 /// which combines the fast average case of randomized quicksort with the fast worst case of
1657 /// heapsort, while achieving linear time on slices with certain patterns. It uses some
1658 /// randomization to avoid degenerate cases, but with a fixed seed to always provide
1659 /// deterministic behavior.
1661 /// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
1662 /// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
1663 /// cases where the key function is expensive.
1668 /// let mut v = [-5i32, 4, 1, -3, 2];
1670 /// v.sort_unstable_by_key(|k| k.abs());
1671 /// assert!(v == [1, 2, -3, 4, -5]);
1674 /// [pdqsort]: https://github.com/orlp/pdqsort
1675 #[stable(feature = "sort_unstable", since = "1.20.0")]
1677 pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
1682 sort::quicksort(self, |a, b| f(a).lt(&f(b)));
1685 /// Reorder the slice such that the element at `index` is at its final sorted position.
1687 /// This reordering has the additional property that any value at position `i < index` will be
1688 /// less than or equal to any value at a position `j > index`. Additionally, this reordering is
1689 /// unstable (i.e. any number of equal elements may end up at position `index`), in-place
1690 /// (i.e. does not allocate), and `O(n)` worst-case. This function is also/ known as "kth
1691 /// element" in other libraries. It returns a triplet of the following values: all elements less
1692 /// than the one at the given index, the value at the given index, and all elements greater than
1693 /// the one at the given index.
1695 /// # Current implementation
1697 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1698 /// used for [`sort_unstable`].
1700 /// [`sort_unstable`]: #method.sort_unstable
1704 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1709 /// #![feature(slice_partition_at_index)]
1711 /// let mut v = [-5i32, 4, 1, -3, 2];
1713 /// // Find the median
1714 /// v.partition_at_index(2);
1716 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1717 /// // about the specified index.
1718 /// assert!(v == [-3, -5, 1, 2, 4] ||
1719 /// v == [-5, -3, 1, 2, 4] ||
1720 /// v == [-3, -5, 1, 4, 2] ||
1721 /// v == [-5, -3, 1, 4, 2]);
1723 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1725 pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T])
1729 let mut f = |a: &T, b: &T| a.lt(b);
1730 sort::partition_at_index(self, index, &mut f)
1733 /// Reorder the slice with a comparator function such that the element at `index` is at its
1734 /// final sorted position.
1736 /// This reordering has the additional property that any value at position `i < index` will be
1737 /// less than or equal to any value at a position `j > index` using the comparator function.
1738 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1739 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1740 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1741 /// values: all elements less than the one at the given index, the value at the given index,
1742 /// and all elements greater than the one at the given index, using the provided comparator
1745 /// # Current implementation
1747 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1748 /// used for [`sort_unstable`].
1750 /// [`sort_unstable`]: #method.sort_unstable
1754 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1759 /// #![feature(slice_partition_at_index)]
1761 /// let mut v = [-5i32, 4, 1, -3, 2];
1763 /// // Find the median as if the slice were sorted in descending order.
1764 /// v.partition_at_index_by(2, |a, b| b.cmp(a));
1766 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1767 /// // about the specified index.
1768 /// assert!(v == [2, 4, 1, -5, -3] ||
1769 /// v == [2, 4, 1, -3, -5] ||
1770 /// v == [4, 2, 1, -5, -3] ||
1771 /// v == [4, 2, 1, -3, -5]);
1773 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1775 pub fn partition_at_index_by<F>(
1779 ) -> (&mut [T], &mut T, &mut [T])
1781 F: FnMut(&T, &T) -> Ordering,
1783 let mut f = |a: &T, b: &T| compare(a, b) == Less;
1784 sort::partition_at_index(self, index, &mut f)
1787 /// Reorder the slice with a key extraction function such that the element at `index` is at its
1788 /// final sorted position.
1790 /// This reordering has the additional property that any value at position `i < index` will be
1791 /// less than or equal to any value at a position `j > index` using the key extraction function.
1792 /// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
1793 /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
1794 /// is also known as "kth element" in other libraries. It returns a triplet of the following
1795 /// values: all elements less than the one at the given index, the value at the given index, and
1796 /// all elements greater than the one at the given index, using the provided key extraction
1799 /// # Current implementation
1801 /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
1802 /// used for [`sort_unstable`].
1804 /// [`sort_unstable`]: #method.sort_unstable
1808 /// Panics when `index >= len()`, meaning it always panics on empty slices.
1813 /// #![feature(slice_partition_at_index)]
1815 /// let mut v = [-5i32, 4, 1, -3, 2];
1817 /// // Return the median as if the array were sorted according to absolute value.
1818 /// v.partition_at_index_by_key(2, |a| a.abs());
1820 /// // We are only guaranteed the slice will be one of the following, based on the way we sort
1821 /// // about the specified index.
1822 /// assert!(v == [1, 2, -3, 4, -5] ||
1823 /// v == [1, 2, -3, -5, 4] ||
1824 /// v == [2, 1, -3, 4, -5] ||
1825 /// v == [2, 1, -3, -5, 4]);
1827 #[unstable(feature = "slice_partition_at_index", issue = "55300")]
1829 pub fn partition_at_index_by_key<K, F>(
1833 ) -> (&mut [T], &mut T, &mut [T])
1838 let mut g = |a: &T, b: &T| f(a).lt(&f(b));
1839 sort::partition_at_index(self, index, &mut g)
1842 /// Moves all consecutive repeated elements to the end of the slice according to the
1843 /// [`PartialEq`] trait implementation.
1845 /// Returns two slices. The first contains no consecutive repeated elements.
1846 /// The second contains all the duplicates in no specified order.
1848 /// If the slice is sorted, the first returned slice contains no duplicates.
1853 /// #![feature(slice_partition_dedup)]
1855 /// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
1857 /// let (dedup, duplicates) = slice.partition_dedup();
1859 /// assert_eq!(dedup, [1, 2, 3, 2, 1]);
1860 /// assert_eq!(duplicates, [2, 3, 1]);
1862 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1864 pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
1868 self.partition_dedup_by(|a, b| a == b)
1871 /// Moves all but the first of consecutive elements to the end of the slice satisfying
1872 /// a given equality relation.
1874 /// Returns two slices. The first contains no consecutive repeated elements.
1875 /// The second contains all the duplicates in no specified order.
1877 /// The `same_bucket` function is passed references to two elements from the slice and
1878 /// must determine if the elements compare equal. The elements are passed in opposite order
1879 /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
1880 /// at the end of the slice.
1882 /// If the slice is sorted, the first returned slice contains no duplicates.
1887 /// #![feature(slice_partition_dedup)]
1889 /// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
1891 /// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
1893 /// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
1894 /// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
1896 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
1898 pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
1900 F: FnMut(&mut T, &mut T) -> bool,
1902 // Although we have a mutable reference to `self`, we cannot make
1903 // *arbitrary* changes. The `same_bucket` calls could panic, so we
1904 // must ensure that the slice is in a valid state at all times.
1906 // The way that we handle this is by using swaps; we iterate
1907 // over all the elements, swapping as we go so that at the end
1908 // the elements we wish to keep are in the front, and those we
1909 // wish to reject are at the back. We can then split the slice.
1910 // This operation is still O(n).
1912 // Example: We start in this state, where `r` represents "next
1913 // read" and `w` represents "next_write`.
1916 // +---+---+---+---+---+---+
1917 // | 0 | 1 | 1 | 2 | 3 | 3 |
1918 // +---+---+---+---+---+---+
1921 // Comparing self[r] against self[w-1], this is not a duplicate, so
1922 // we swap self[r] and self[w] (no effect as r==w) and then increment both
1923 // r and w, leaving us with:
1926 // +---+---+---+---+---+---+
1927 // | 0 | 1 | 1 | 2 | 3 | 3 |
1928 // +---+---+---+---+---+---+
1931 // Comparing self[r] against self[w-1], this value is a duplicate,
1932 // so we increment `r` but leave everything else unchanged:
1935 // +---+---+---+---+---+---+
1936 // | 0 | 1 | 1 | 2 | 3 | 3 |
1937 // +---+---+---+---+---+---+
1940 // Comparing self[r] against self[w-1], this is not a duplicate,
1941 // so swap self[r] and self[w] and advance r and w:
1944 // +---+---+---+---+---+---+
1945 // | 0 | 1 | 2 | 1 | 3 | 3 |
1946 // +---+---+---+---+---+---+
1949 // Not a duplicate, repeat:
1952 // +---+---+---+---+---+---+
1953 // | 0 | 1 | 2 | 3 | 1 | 3 |
1954 // +---+---+---+---+---+---+
1957 // Duplicate, advance r. End of slice. Split at w.
1959 let len = self.len();
1961 return (self, &mut []);
1964 let ptr = self.as_mut_ptr();
1965 let mut next_read: usize = 1;
1966 let mut next_write: usize = 1;
1969 // Avoid bounds checks by using raw pointers.
1970 while next_read < len {
1971 let ptr_read = ptr.add(next_read);
1972 let prev_ptr_write = ptr.add(next_write - 1);
1973 if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
1974 if next_read != next_write {
1975 let ptr_write = prev_ptr_write.offset(1);
1976 mem::swap(&mut *ptr_read, &mut *ptr_write);
1984 self.split_at_mut(next_write)
1987 /// Moves all but the first of consecutive elements to the end of the slice that resolve
1988 /// to the same key.
1990 /// Returns two slices. The first contains no consecutive repeated elements.
1991 /// The second contains all the duplicates in no specified order.
1993 /// If the slice is sorted, the first returned slice contains no duplicates.
1998 /// #![feature(slice_partition_dedup)]
2000 /// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
2002 /// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
2004 /// assert_eq!(dedup, [10, 20, 30, 20, 11]);
2005 /// assert_eq!(duplicates, [21, 30, 13]);
2007 #[unstable(feature = "slice_partition_dedup", issue = "54279")]
2009 pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
2011 F: FnMut(&mut T) -> K,
2014 self.partition_dedup_by(|a, b| key(a) == key(b))
2017 /// Rotates the slice in-place such that the first `mid` elements of the
2018 /// slice move to the end while the last `self.len() - mid` elements move to
2019 /// the front. After calling `rotate_left`, the element previously at index
2020 /// `mid` will become the first element in the slice.
2024 /// This function will panic if `mid` is greater than the length of the
2025 /// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
2030 /// Takes linear (in `self.len()`) time.
2035 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2036 /// a.rotate_left(2);
2037 /// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
2040 /// Rotating a subslice:
2043 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2044 /// a[1..5].rotate_left(1);
2045 /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
2047 #[stable(feature = "slice_rotate", since = "1.26.0")]
2048 pub fn rotate_left(&mut self, mid: usize) {
2049 assert!(mid <= self.len());
2050 let k = self.len() - mid;
2053 let p = self.as_mut_ptr();
2054 rotate::ptr_rotate(mid, p.add(mid), k);
2058 /// Rotates the slice in-place such that the first `self.len() - k`
2059 /// elements of the slice move to the end while the last `k` elements move
2060 /// to the front. After calling `rotate_right`, the element previously at
2061 /// index `self.len() - k` will become the first element in the slice.
2065 /// This function will panic if `k` is greater than the length of the
2066 /// slice. Note that `k == self.len()` does _not_ panic and is a no-op
2071 /// Takes linear (in `self.len()`) time.
2076 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2077 /// a.rotate_right(2);
2078 /// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
2081 /// Rotate a subslice:
2084 /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
2085 /// a[1..5].rotate_right(1);
2086 /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
2088 #[stable(feature = "slice_rotate", since = "1.26.0")]
2089 pub fn rotate_right(&mut self, k: usize) {
2090 assert!(k <= self.len());
2091 let mid = self.len() - k;
2094 let p = self.as_mut_ptr();
2095 rotate::ptr_rotate(mid, p.add(mid), k);
2099 /// Copies the elements from `src` into `self`.
2101 /// The length of `src` must be the same as `self`.
2103 /// If `src` implements `Copy`, it can be more performant to use
2104 /// [`copy_from_slice`].
2108 /// This function will panic if the two slices have different lengths.
2112 /// Cloning two elements from a slice into another:
2115 /// let src = [1, 2, 3, 4];
2116 /// let mut dst = [0, 0];
2118 /// // Because the slices have to be the same length,
2119 /// // we slice the source slice from four elements
2120 /// // to two. It will panic if we don't do this.
2121 /// dst.clone_from_slice(&src[2..]);
2123 /// assert_eq!(src, [1, 2, 3, 4]);
2124 /// assert_eq!(dst, [3, 4]);
2127 /// Rust enforces that there can only be one mutable reference with no
2128 /// immutable references to a particular piece of data in a particular
2129 /// scope. Because of this, attempting to use `clone_from_slice` on a
2130 /// single slice will result in a compile failure:
2133 /// let mut slice = [1, 2, 3, 4, 5];
2135 /// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
2138 /// To work around this, we can use [`split_at_mut`] to create two distinct
2139 /// sub-slices from a slice:
2142 /// let mut slice = [1, 2, 3, 4, 5];
2145 /// let (left, right) = slice.split_at_mut(2);
2146 /// left.clone_from_slice(&right[1..]);
2149 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2152 /// [`copy_from_slice`]: #method.copy_from_slice
2153 /// [`split_at_mut`]: #method.split_at_mut
2154 #[stable(feature = "clone_from_slice", since = "1.7.0")]
2155 pub fn clone_from_slice(&mut self, src: &[T])
2159 assert!(self.len() == src.len(), "destination and source slices have different lengths");
2160 // NOTE: We need to explicitly slice them to the same length
2161 // for bounds checking to be elided, and the optimizer will
2162 // generate memcpy for simple cases (for example T = u8).
2163 let len = self.len();
2164 let src = &src[..len];
2166 self[i].clone_from(&src[i]);
2170 /// Copies all elements from `src` into `self`, using a memcpy.
2172 /// The length of `src` must be the same as `self`.
2174 /// If `src` does not implement `Copy`, use [`clone_from_slice`].
2178 /// This function will panic if the two slices have different lengths.
2182 /// Copying two elements from a slice into another:
2185 /// let src = [1, 2, 3, 4];
2186 /// let mut dst = [0, 0];
2188 /// // Because the slices have to be the same length,
2189 /// // we slice the source slice from four elements
2190 /// // to two. It will panic if we don't do this.
2191 /// dst.copy_from_slice(&src[2..]);
2193 /// assert_eq!(src, [1, 2, 3, 4]);
2194 /// assert_eq!(dst, [3, 4]);
2197 /// Rust enforces that there can only be one mutable reference with no
2198 /// immutable references to a particular piece of data in a particular
2199 /// scope. Because of this, attempting to use `copy_from_slice` on a
2200 /// single slice will result in a compile failure:
2203 /// let mut slice = [1, 2, 3, 4, 5];
2205 /// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
2208 /// To work around this, we can use [`split_at_mut`] to create two distinct
2209 /// sub-slices from a slice:
2212 /// let mut slice = [1, 2, 3, 4, 5];
2215 /// let (left, right) = slice.split_at_mut(2);
2216 /// left.copy_from_slice(&right[1..]);
2219 /// assert_eq!(slice, [4, 5, 3, 4, 5]);
2222 /// [`clone_from_slice`]: #method.clone_from_slice
2223 /// [`split_at_mut`]: #method.split_at_mut
2224 #[stable(feature = "copy_from_slice", since = "1.9.0")]
2225 pub fn copy_from_slice(&mut self, src: &[T])
2229 assert_eq!(self.len(), src.len(), "destination and source slices have different lengths");
2231 ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len());
2235 /// Copies elements from one part of the slice to another part of itself,
2236 /// using a memmove.
2238 /// `src` is the range within `self` to copy from. `dest` is the starting
2239 /// index of the range within `self` to copy to, which will have the same
2240 /// length as `src`. The two ranges may overlap. The ends of the two ranges
2241 /// must be less than or equal to `self.len()`.
2245 /// This function will panic if either range exceeds the end of the slice,
2246 /// or if the end of `src` is before the start.
2250 /// Copying four bytes within a slice:
2253 /// let mut bytes = *b"Hello, World!";
2255 /// bytes.copy_within(1..5, 8);
2257 /// assert_eq!(&bytes, b"Hello, Wello!");
2259 #[stable(feature = "copy_within", since = "1.37.0")]
2260 pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
2264 let src_start = match src.start_bound() {
2265 ops::Bound::Included(&n) => n,
2266 ops::Bound::Excluded(&n) => {
2267 n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
2269 ops::Bound::Unbounded => 0,
2271 let src_end = match src.end_bound() {
2272 ops::Bound::Included(&n) => {
2273 n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
2275 ops::Bound::Excluded(&n) => n,
2276 ops::Bound::Unbounded => self.len(),
2278 assert!(src_start <= src_end, "src end is before src start");
2279 assert!(src_end <= self.len(), "src is out of bounds");
2280 let count = src_end - src_start;
2281 assert!(dest <= self.len() - count, "dest is out of bounds");
2283 ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count);
2287 /// Swaps all elements in `self` with those in `other`.
2289 /// The length of `other` must be the same as `self`.
2293 /// This function will panic if the two slices have different lengths.
2297 /// Swapping two elements across slices:
2300 /// let mut slice1 = [0, 0];
2301 /// let mut slice2 = [1, 2, 3, 4];
2303 /// slice1.swap_with_slice(&mut slice2[2..]);
2305 /// assert_eq!(slice1, [3, 4]);
2306 /// assert_eq!(slice2, [1, 2, 0, 0]);
2309 /// Rust enforces that there can only be one mutable reference to a
2310 /// particular piece of data in a particular scope. Because of this,
2311 /// attempting to use `swap_with_slice` on a single slice will result in
2312 /// a compile failure:
2315 /// let mut slice = [1, 2, 3, 4, 5];
2316 /// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
2319 /// To work around this, we can use [`split_at_mut`] to create two distinct
2320 /// mutable sub-slices from a slice:
2323 /// let mut slice = [1, 2, 3, 4, 5];
2326 /// let (left, right) = slice.split_at_mut(2);
2327 /// left.swap_with_slice(&mut right[1..]);
2330 /// assert_eq!(slice, [4, 5, 3, 1, 2]);
2333 /// [`split_at_mut`]: #method.split_at_mut
2334 #[stable(feature = "swap_with_slice", since = "1.27.0")]
2335 pub fn swap_with_slice(&mut self, other: &mut [T]) {
2336 assert!(self.len() == other.len(), "destination and source slices have different lengths");
2338 ptr::swap_nonoverlapping(self.as_mut_ptr(), other.as_mut_ptr(), self.len());
2342 /// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
2343 fn align_to_offsets<U>(&self) -> (usize, usize) {
2344 // What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
2345 // lowest number of `T`s. And how many `T`s we need for each such "multiple".
2347 // Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
2348 // for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
2349 // place of every 3 Ts in the `rest` slice. A bit more complicated.
2351 // Formula to calculate this is:
2353 // Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
2354 // Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
2356 // Expanded and simplified:
2358 // Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
2359 // Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
2361 // Luckily since all this is constant-evaluated... performance here matters not!
2363 fn gcd(a: usize, b: usize) -> usize {
2364 use crate::intrinsics;
2365 // iterative stein’s algorithm
2366 // We should still make this `const fn` (and revert to recursive algorithm if we do)
2367 // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2368 let (ctz_a, mut ctz_b) = unsafe {
2375 (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
2377 let k = ctz_a.min(ctz_b);
2378 let mut a = a >> ctz_a;
2381 // remove all factors of 2 from b
2384 mem::swap(&mut a, &mut b);
2391 ctz_b = intrinsics::cttz_nonzero(b);
2396 let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
2397 let ts: usize = mem::size_of::<U>() / gcd;
2398 let us: usize = mem::size_of::<T>() / gcd;
2400 // Armed with this knowledge, we can find how many `U`s we can fit!
2401 let us_len = self.len() / ts * us;
2402 // And how many `T`s will be in the trailing slice!
2403 let ts_len = self.len() % ts;
2407 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2410 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2411 /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
2412 /// length possible for a given type and input slice, but only your algorithm's performance
2413 /// should depend on that, not its correctness. It is permissible for all of the input data to
2414 /// be returned as the prefix or suffix slice.
2416 /// This method has no purpose when either input element `T` or output element `U` are
2417 /// zero-sized and will return the original slice without splitting anything.
2421 /// This method is essentially a `transmute` with respect to the elements in the returned
2422 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2430 /// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2431 /// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
2432 /// // less_efficient_algorithm_for_bytes(prefix);
2433 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2434 /// // less_efficient_algorithm_for_bytes(suffix);
2437 #[stable(feature = "slice_align_to", since = "1.30.0")]
2438 pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
2439 // Note that most of this function will be constant-evaluated,
2440 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2441 // handle ZSTs specially, which is – don't handle them at all.
2442 return (self, &[], &[]);
2445 // First, find at what point do we split between the first and 2nd slice. Easy with
2446 // ptr.align_offset.
2447 let ptr = self.as_ptr();
2448 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2449 if offset > self.len() {
2452 let (left, rest) = self.split_at(offset);
2453 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2454 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2457 from_raw_parts(rest.as_ptr() as *const U, us_len),
2458 from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len),
2463 /// Transmute the slice to a slice of another type, ensuring alignment of the types is
2466 /// This method splits the slice into three distinct slices: prefix, correctly aligned middle
2467 /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
2468 /// length possible for a given type and input slice, but only your algorithm's performance
2469 /// should depend on that, not its correctness. It is permissible for all of the input data to
2470 /// be returned as the prefix or suffix slice.
2472 /// This method has no purpose when either input element `T` or output element `U` are
2473 /// zero-sized and will return the original slice without splitting anything.
2477 /// This method is essentially a `transmute` with respect to the elements in the returned
2478 /// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
2486 /// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
2487 /// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
2488 /// // less_efficient_algorithm_for_bytes(prefix);
2489 /// // more_efficient_algorithm_for_aligned_shorts(shorts);
2490 /// // less_efficient_algorithm_for_bytes(suffix);
2493 #[stable(feature = "slice_align_to", since = "1.30.0")]
2494 pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
2495 // Note that most of this function will be constant-evaluated,
2496 if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
2497 // handle ZSTs specially, which is – don't handle them at all.
2498 return (self, &mut [], &mut []);
2501 // First, find at what point do we split between the first and 2nd slice. Easy with
2502 // ptr.align_offset.
2503 let ptr = self.as_ptr();
2504 let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
2505 if offset > self.len() {
2506 (self, &mut [], &mut [])
2508 let (left, rest) = self.split_at_mut(offset);
2509 // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
2510 let (us_len, ts_len) = rest.align_to_offsets::<U>();
2511 let mut_ptr = rest.as_mut_ptr();
2514 from_raw_parts_mut(mut_ptr as *mut U, us_len),
2515 from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len),
2520 /// Checks if the elements of this slice are sorted.
2522 /// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
2523 /// slice yields exactly zero or one element, `true` is returned.
2525 /// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
2526 /// implies that this function returns `false` if any two consecutive items are not
2532 /// #![feature(is_sorted)]
2533 /// let empty: [i32; 0] = [];
2535 /// assert!([1, 2, 2, 9].is_sorted());
2536 /// assert!(![1, 3, 2, 4].is_sorted());
2537 /// assert!([0].is_sorted());
2538 /// assert!(empty.is_sorted());
2539 /// assert!(![0.0, 1.0, std::f32::NAN].is_sorted());
2542 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2543 pub fn is_sorted(&self) -> bool
2547 self.is_sorted_by(|a, b| a.partial_cmp(b))
2550 /// Checks if the elements of this slice are sorted using the given comparator function.
2552 /// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
2553 /// function to determine the ordering of two elements. Apart from that, it's equivalent to
2554 /// [`is_sorted`]; see its documentation for more information.
2556 /// [`is_sorted`]: #method.is_sorted
2557 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2558 pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
2560 F: FnMut(&T, &T) -> Option<Ordering>,
2562 self.iter().is_sorted_by(|a, b| compare(*a, *b))
2565 /// Checks if the elements of this slice are sorted using the given key extraction function.
2567 /// Instead of comparing the slice's elements directly, this function compares the keys of the
2568 /// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
2569 /// documentation for more information.
2571 /// [`is_sorted`]: #method.is_sorted
2576 /// #![feature(is_sorted)]
2578 /// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
2579 /// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
2582 #[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
2583 pub fn is_sorted_by_key<F, K>(&self, f: F) -> bool
2588 self.iter().is_sorted_by_key(f)
2592 #[lang = "slice_u8"]
2595 /// Checks if all bytes in this slice are within the ASCII range.
2596 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2598 pub fn is_ascii(&self) -> bool {
2599 self.iter().all(|b| b.is_ascii())
2602 /// Checks that two slices are an ASCII case-insensitive match.
2604 /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
2605 /// but without allocating and copying temporaries.
2606 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2608 pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
2609 self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a.eq_ignore_ascii_case(b))
2612 /// Converts this slice to its ASCII upper case equivalent in-place.
2614 /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
2615 /// but non-ASCII letters are unchanged.
2617 /// To return a new uppercased value without modifying the existing one, use
2618 /// [`to_ascii_uppercase`].
2620 /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
2621 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2623 pub fn make_ascii_uppercase(&mut self) {
2625 byte.make_ascii_uppercase();
2629 /// Converts this slice to its ASCII lower case equivalent in-place.
2631 /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
2632 /// but non-ASCII letters are unchanged.
2634 /// To return a new lowercased value without modifying the existing one, use
2635 /// [`to_ascii_lowercase`].
2637 /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
2638 #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
2640 pub fn make_ascii_lowercase(&mut self) {
2642 byte.make_ascii_lowercase();
2647 #[stable(feature = "rust1", since = "1.0.0")]
2648 impl<T, I> ops::Index<I> for [T]
2652 type Output = I::Output;
2655 fn index(&self, index: I) -> &I::Output {
2660 #[stable(feature = "rust1", since = "1.0.0")]
2661 impl<T, I> ops::IndexMut<I> for [T]
2666 fn index_mut(&mut self, index: I) -> &mut I::Output {
2667 index.index_mut(self)
2673 fn slice_index_len_fail(index: usize, len: usize) -> ! {
2674 panic!("index {} out of range for slice of length {}", index, len);
2679 fn slice_index_order_fail(index: usize, end: usize) -> ! {
2680 panic!("slice index starts at {} but ends at {}", index, end);
2685 fn slice_index_overflow_fail() -> ! {
2686 panic!("attempted to index slice up to maximum usize");
2689 mod private_slice_index {
2691 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2694 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2695 impl Sealed for usize {}
2696 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2697 impl Sealed for ops::Range<usize> {}
2698 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2699 impl Sealed for ops::RangeTo<usize> {}
2700 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2701 impl Sealed for ops::RangeFrom<usize> {}
2702 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2703 impl Sealed for ops::RangeFull {}
2704 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2705 impl Sealed for ops::RangeInclusive<usize> {}
2706 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2707 impl Sealed for ops::RangeToInclusive<usize> {}
2710 /// A helper trait used for indexing operations.
2711 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2712 #[rustc_on_unimplemented(
2713 on(T = "str", label = "string indices are ranges of `usize`",),
2715 all(any(T = "str", T = "&str", T = "std::string::String"), _Self = "{integer}"),
2716 note = "you can use `.chars().nth()` or `.bytes().nth()`
2717 see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
2719 message = "the type `{T}` cannot be indexed by `{Self}`",
2720 label = "slice indices are of type `usize` or ranges of `usize`"
2722 pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
2723 /// The output type returned by methods.
2724 #[stable(feature = "slice_get_slice", since = "1.28.0")]
2725 type Output: ?Sized;
2727 /// Returns a shared reference to the output at this location, if in
2729 #[unstable(feature = "slice_index_methods", issue = "none")]
2730 fn get(self, slice: &T) -> Option<&Self::Output>;
2732 /// Returns a mutable reference to the output at this location, if in
2734 #[unstable(feature = "slice_index_methods", issue = "none")]
2735 fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
2737 /// Returns a shared reference to the output at this location, without
2738 /// performing any bounds checking.
2739 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
2740 /// even if the resulting reference is not used.
2741 /// [undefined behavior]: ../../reference/behavior-considered-undefined.html
2742 #[unstable(feature = "slice_index_methods", issue = "none")]
2743 unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
2745 /// Returns a mutable reference to the output at this location, without
2746 /// performing any bounds checking.
2747 /// Calling this method with an out-of-bounds index is *[undefined behavior]*
2748 /// even if the resulting reference is not used.
2749 /// [undefined behavior]: ../../reference/behavior-considered-undefined.html
2750 #[unstable(feature = "slice_index_methods", issue = "none")]
2751 unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
2753 /// Returns a shared reference to the output at this location, panicking
2754 /// if out of bounds.
2755 #[unstable(feature = "slice_index_methods", issue = "none")]
2756 fn index(self, slice: &T) -> &Self::Output;
2758 /// Returns a mutable reference to the output at this location, panicking
2759 /// if out of bounds.
2760 #[unstable(feature = "slice_index_methods", issue = "none")]
2761 fn index_mut(self, slice: &mut T) -> &mut Self::Output;
2764 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2765 impl<T> SliceIndex<[T]> for usize {
2769 fn get(self, slice: &[T]) -> Option<&T> {
2770 if self < slice.len() { unsafe { Some(self.get_unchecked(slice)) } } else { None }
2774 fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
2775 if self < slice.len() { unsafe { Some(self.get_unchecked_mut(slice)) } } else { None }
2779 unsafe fn get_unchecked(self, slice: &[T]) -> &T {
2780 &*slice.as_ptr().add(self)
2784 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
2785 &mut *slice.as_mut_ptr().add(self)
2789 fn index(self, slice: &[T]) -> &T {
2790 // N.B., use intrinsic indexing
2795 fn index_mut(self, slice: &mut [T]) -> &mut T {
2796 // N.B., use intrinsic indexing
2801 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2802 impl<T> SliceIndex<[T]> for ops::Range<usize> {
2806 fn get(self, slice: &[T]) -> Option<&[T]> {
2807 if self.start > self.end || self.end > slice.len() {
2810 unsafe { Some(self.get_unchecked(slice)) }
2815 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2816 if self.start > self.end || self.end > slice.len() {
2819 unsafe { Some(self.get_unchecked_mut(slice)) }
2824 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2825 from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
2829 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2830 from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
2834 fn index(self, slice: &[T]) -> &[T] {
2835 if self.start > self.end {
2836 slice_index_order_fail(self.start, self.end);
2837 } else if self.end > slice.len() {
2838 slice_index_len_fail(self.end, slice.len());
2840 unsafe { self.get_unchecked(slice) }
2844 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2845 if self.start > self.end {
2846 slice_index_order_fail(self.start, self.end);
2847 } else if self.end > slice.len() {
2848 slice_index_len_fail(self.end, slice.len());
2850 unsafe { self.get_unchecked_mut(slice) }
2854 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2855 impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
2859 fn get(self, slice: &[T]) -> Option<&[T]> {
2860 (0..self.end).get(slice)
2864 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2865 (0..self.end).get_mut(slice)
2869 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2870 (0..self.end).get_unchecked(slice)
2874 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2875 (0..self.end).get_unchecked_mut(slice)
2879 fn index(self, slice: &[T]) -> &[T] {
2880 (0..self.end).index(slice)
2884 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2885 (0..self.end).index_mut(slice)
2889 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2890 impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
2894 fn get(self, slice: &[T]) -> Option<&[T]> {
2895 (self.start..slice.len()).get(slice)
2899 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2900 (self.start..slice.len()).get_mut(slice)
2904 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2905 (self.start..slice.len()).get_unchecked(slice)
2909 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2910 (self.start..slice.len()).get_unchecked_mut(slice)
2914 fn index(self, slice: &[T]) -> &[T] {
2915 (self.start..slice.len()).index(slice)
2919 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2920 (self.start..slice.len()).index_mut(slice)
2924 #[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
2925 impl<T> SliceIndex<[T]> for ops::RangeFull {
2929 fn get(self, slice: &[T]) -> Option<&[T]> {
2934 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2939 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2944 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2949 fn index(self, slice: &[T]) -> &[T] {
2954 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
2959 #[stable(feature = "inclusive_range", since = "1.26.0")]
2960 impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
2964 fn get(self, slice: &[T]) -> Option<&[T]> {
2965 if *self.end() == usize::max_value() {
2968 (*self.start()..self.end() + 1).get(slice)
2973 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
2974 if *self.end() == usize::max_value() {
2977 (*self.start()..self.end() + 1).get_mut(slice)
2982 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
2983 (*self.start()..self.end() + 1).get_unchecked(slice)
2987 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
2988 (*self.start()..self.end() + 1).get_unchecked_mut(slice)
2992 fn index(self, slice: &[T]) -> &[T] {
2993 if *self.end() == usize::max_value() {
2994 slice_index_overflow_fail();
2996 (*self.start()..self.end() + 1).index(slice)
3000 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3001 if *self.end() == usize::max_value() {
3002 slice_index_overflow_fail();
3004 (*self.start()..self.end() + 1).index_mut(slice)
3008 #[stable(feature = "inclusive_range", since = "1.26.0")]
3009 impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
3013 fn get(self, slice: &[T]) -> Option<&[T]> {
3014 (0..=self.end).get(slice)
3018 fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
3019 (0..=self.end).get_mut(slice)
3023 unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
3024 (0..=self.end).get_unchecked(slice)
3028 unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
3029 (0..=self.end).get_unchecked_mut(slice)
3033 fn index(self, slice: &[T]) -> &[T] {
3034 (0..=self.end).index(slice)
3038 fn index_mut(self, slice: &mut [T]) -> &mut [T] {
3039 (0..=self.end).index_mut(slice)
3043 ////////////////////////////////////////////////////////////////////////////////
3045 ////////////////////////////////////////////////////////////////////////////////
3047 #[stable(feature = "rust1", since = "1.0.0")]
3048 impl<T> Default for &[T] {
3049 /// Creates an empty slice.
3050 fn default() -> Self {
3055 #[stable(feature = "mut_slice_default", since = "1.5.0")]
3056 impl<T> Default for &mut [T] {
3057 /// Creates a mutable empty slice.
3058 fn default() -> Self {
3067 #[stable(feature = "rust1", since = "1.0.0")]
3068 impl<'a, T> IntoIterator for &'a [T] {
3070 type IntoIter = Iter<'a, T>;
3072 fn into_iter(self) -> Iter<'a, T> {
3077 #[stable(feature = "rust1", since = "1.0.0")]
3078 impl<'a, T> IntoIterator for &'a mut [T] {
3079 type Item = &'a mut T;
3080 type IntoIter = IterMut<'a, T>;
3082 fn into_iter(self) -> IterMut<'a, T> {
3087 // Macro helper functions
3089 fn size_from_ptr<T>(_: *const T) -> usize {
3093 // Inlining is_empty and len makes a huge performance difference
3094 macro_rules! is_empty {
3095 // The way we encode the length of a ZST iterator, this works both for ZST
3098 $self.ptr.as_ptr() as *const T == $self.end
3101 // To get rid of some bounds checks (see `position`), we compute the length in a somewhat
3102 // unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
3104 ($self: ident) => {{
3105 #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
3107 let start = $self.ptr;
3108 let size = size_from_ptr(start.as_ptr());
3110 // This _cannot_ use `unchecked_sub` because we depend on wrapping
3111 // to represent the length of long ZST slice iterators.
3112 ($self.end as usize).wrapping_sub(start.as_ptr() as usize)
3114 // We know that `start <= end`, so can do better than `offset_from`,
3115 // which needs to deal in signed. By setting appropriate flags here
3116 // we can tell LLVM this, which helps it remove bounds checks.
3117 // SAFETY: By the type invariant, `start <= end`
3118 let diff = unsafe { unchecked_sub($self.end as usize, start.as_ptr() as usize) };
3119 // By also telling LLVM that the pointers are apart by an exact
3120 // multiple of the type size, it can optimize `len() == 0` down to
3121 // `start == end` instead of `(end - start) < size`.
3122 // SAFETY: By the type invariant, the pointers are aligned so the
3123 // distance between them must be a multiple of pointee size
3124 unsafe { exact_div(diff, size) }
3129 // The shared definition of the `Iter` and `IterMut` iterators
3130 macro_rules! iterator {
3132 struct $name:ident -> $ptr:ty,
3138 // Returns the first element and moves the start of the iterator forwards by 1.
3139 // Greatly improves performance compared to an inlined function. The iterator
3140 // must not be empty.
3141 macro_rules! next_unchecked {
3142 ($self: ident) => {& $( $mut_ )* *$self.post_inc_start(1)}
3145 // Returns the last element and moves the end of the iterator backwards by 1.
3146 // Greatly improves performance compared to an inlined function. The iterator
3147 // must not be empty.
3148 macro_rules! next_back_unchecked {
3149 ($self: ident) => {& $( $mut_ )* *$self.pre_dec_end(1)}
3152 // Shrinks the iterator when T is a ZST, by moving the end of the iterator
3153 // backwards by `n`. `n` must not exceed `self.len()`.
3154 macro_rules! zst_shrink {
3155 ($self: ident, $n: ident) => {
3156 $self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T;
3160 impl<'a, T> $name<'a, T> {
3161 // Helper function for creating a slice from the iterator.
3163 fn make_slice(&self) -> &'a [T] {
3164 unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) }
3167 // Helper function for moving the start of the iterator forwards by `offset` elements,
3168 // returning the old start.
3169 // Unsafe because the offset must not exceed `self.len()`.
3171 unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
3172 if mem::size_of::<T>() == 0 {
3173 zst_shrink!(self, offset);
3176 let old = self.ptr.as_ptr();
3177 self.ptr = NonNull::new_unchecked(self.ptr.as_ptr().offset(offset));
3182 // Helper function for moving the end of the iterator backwards by `offset` elements,
3183 // returning the new end.
3184 // Unsafe because the offset must not exceed `self.len()`.
3186 unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
3187 if mem::size_of::<T>() == 0 {
3188 zst_shrink!(self, offset);
3191 self.end = self.end.offset(-offset);
3197 #[stable(feature = "rust1", since = "1.0.0")]
3198 impl<T> ExactSizeIterator for $name<'_, T> {
3200 fn len(&self) -> usize {
3205 fn is_empty(&self) -> bool {
3210 #[stable(feature = "rust1", since = "1.0.0")]
3211 impl<'a, T> Iterator for $name<'a, T> {
3215 fn next(&mut self) -> Option<$elem> {
3216 // could be implemented with slices, but this avoids bounds checks
3218 assume(!self.ptr.as_ptr().is_null());
3219 if mem::size_of::<T>() != 0 {
3220 assume(!self.end.is_null());
3222 if is_empty!(self) {
3225 Some(next_unchecked!(self))
3231 fn size_hint(&self) -> (usize, Option<usize>) {
3232 let exact = len!(self);
3233 (exact, Some(exact))
3237 fn count(self) -> usize {
3242 fn nth(&mut self, n: usize) -> Option<$elem> {
3243 if n >= len!(self) {
3244 // This iterator is now empty.
3245 if mem::size_of::<T>() == 0 {
3246 // We have to do it this way as `ptr` may never be 0, but `end`
3247 // could be (due to wrapping).
3248 self.end = self.ptr.as_ptr();
3251 // End can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
3252 self.ptr = NonNull::new_unchecked(self.end as *mut T);
3257 // We are in bounds. `post_inc_start` does the right thing even for ZSTs.
3259 self.post_inc_start(n as isize);
3260 Some(next_unchecked!(self))
3265 fn last(mut self) -> Option<$elem> {
3270 #[rustc_inherit_overflow_checks]
3271 fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
3273 P: FnMut(Self::Item) -> bool,
3275 // The addition might panic on overflow.
3277 self.try_fold(0, move |i, x| {
3278 if predicate(x) { Err(i) }
3282 unsafe { assume(i < n) };
3288 fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
3289 P: FnMut(Self::Item) -> bool,
3290 Self: Sized + ExactSizeIterator + DoubleEndedIterator
3292 // No need for an overflow check here, because `ExactSizeIterator`
3294 self.try_rfold(n, move |i, x| {
3296 if predicate(x) { Err(i) }
3300 unsafe { assume(i < n) };
3308 #[stable(feature = "rust1", since = "1.0.0")]
3309 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
3311 fn next_back(&mut self) -> Option<$elem> {
3312 // could be implemented with slices, but this avoids bounds checks
3314 assume(!self.ptr.as_ptr().is_null());
3315 if mem::size_of::<T>() != 0 {
3316 assume(!self.end.is_null());
3318 if is_empty!(self) {
3321 Some(next_back_unchecked!(self))
3327 fn nth_back(&mut self, n: usize) -> Option<$elem> {
3328 if n >= len!(self) {
3329 // This iterator is now empty.
3330 self.end = self.ptr.as_ptr();
3333 // We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
3335 self.pre_dec_end(n as isize);
3336 Some(next_back_unchecked!(self))
3341 #[stable(feature = "fused", since = "1.26.0")]
3342 impl<T> FusedIterator for $name<'_, T> {}
3344 #[unstable(feature = "trusted_len", issue = "37572")]
3345 unsafe impl<T> TrustedLen for $name<'_, T> {}
3349 /// Immutable slice iterator
3351 /// This struct is created by the [`iter`] method on [slices].
3358 /// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
3359 /// let slice = &[1, 2, 3];
3361 /// // Then, we iterate over it:
3362 /// for element in slice.iter() {
3363 /// println!("{}", element);
3367 /// [`iter`]: ../../std/primitive.slice.html#method.iter
3368 /// [slices]: ../../std/primitive.slice.html
3369 #[stable(feature = "rust1", since = "1.0.0")]
3370 pub struct Iter<'a, T: 'a> {
3372 end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3373 // ptr == end is a quick test for the Iterator being empty, that works
3374 // for both ZST and non-ZST.
3375 _marker: marker::PhantomData<&'a T>,
3378 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3379 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
3380 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3381 f.debug_tuple("Iter").field(&self.as_slice()).finish()
3385 #[stable(feature = "rust1", since = "1.0.0")]
3386 unsafe impl<T: Sync> Sync for Iter<'_, T> {}
3387 #[stable(feature = "rust1", since = "1.0.0")]
3388 unsafe impl<T: Sync> Send for Iter<'_, T> {}
3390 impl<'a, T> Iter<'a, T> {
3391 /// Views the underlying data as a subslice of the original data.
3393 /// This has the same lifetime as the original slice, and so the
3394 /// iterator can continue to be used while this exists.
3401 /// // First, we declare a type which has the `iter` method to get the `Iter`
3402 /// // struct (&[usize here]):
3403 /// let slice = &[1, 2, 3];
3405 /// // Then, we get the iterator:
3406 /// let mut iter = slice.iter();
3407 /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
3408 /// println!("{:?}", iter.as_slice());
3410 /// // Next, we move to the second element of the slice:
3412 /// // Now `as_slice` returns "[2, 3]":
3413 /// println!("{:?}", iter.as_slice());
3415 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3416 pub fn as_slice(&self) -> &'a [T] {
3421 iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
3422 fn is_sorted_by<F>(self, mut compare: F) -> bool
3425 F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
3427 self.as_slice().windows(2).all(|w| {
3428 compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
3433 #[stable(feature = "rust1", since = "1.0.0")]
3434 impl<T> Clone for Iter<'_, T> {
3435 fn clone(&self) -> Self {
3436 Iter { ptr: self.ptr, end: self.end, _marker: self._marker }
3440 #[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
3441 impl<T> AsRef<[T]> for Iter<'_, T> {
3442 fn as_ref(&self) -> &[T] {
3447 /// Mutable slice iterator.
3449 /// This struct is created by the [`iter_mut`] method on [slices].
3456 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3457 /// // struct (&[usize here]):
3458 /// let mut slice = &mut [1, 2, 3];
3460 /// // Then, we iterate over it and increment each element value:
3461 /// for element in slice.iter_mut() {
3465 /// // We now have "[2, 3, 4]":
3466 /// println!("{:?}", slice);
3469 /// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
3470 /// [slices]: ../../std/primitive.slice.html
3471 #[stable(feature = "rust1", since = "1.0.0")]
3472 pub struct IterMut<'a, T: 'a> {
3474 end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
3475 // ptr == end is a quick test for the Iterator being empty, that works
3476 // for both ZST and non-ZST.
3477 _marker: marker::PhantomData<&'a mut T>,
3480 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3481 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
3482 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3483 f.debug_tuple("IterMut").field(&self.make_slice()).finish()
3487 #[stable(feature = "rust1", since = "1.0.0")]
3488 unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
3489 #[stable(feature = "rust1", since = "1.0.0")]
3490 unsafe impl<T: Send> Send for IterMut<'_, T> {}
3492 impl<'a, T> IterMut<'a, T> {
3493 /// Views the underlying data as a subslice of the original data.
3495 /// To avoid creating `&mut` references that alias, this is forced
3496 /// to consume the iterator.
3503 /// // First, we declare a type which has `iter_mut` method to get the `IterMut`
3504 /// // struct (&[usize here]):
3505 /// let mut slice = &mut [1, 2, 3];
3508 /// // Then, we get the iterator:
3509 /// let mut iter = slice.iter_mut();
3510 /// // We move to next element:
3512 /// // So if we print what `into_slice` method returns here, we have "[2, 3]":
3513 /// println!("{:?}", iter.into_slice());
3516 /// // Now let's modify a value of the slice:
3518 /// // First we get back the iterator:
3519 /// let mut iter = slice.iter_mut();
3520 /// // We change the value of the first element of the slice returned by the `next` method:
3521 /// *iter.next().unwrap() += 1;
3523 /// // Now slice is "[2, 2, 3]":
3524 /// println!("{:?}", slice);
3526 #[stable(feature = "iter_to_slice", since = "1.4.0")]
3527 pub fn into_slice(self) -> &'a mut [T] {
3528 unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) }
3531 /// Views the underlying data as a subslice of the original data.
3533 /// To avoid creating `&mut [T]` references that alias, the returned slice
3534 /// borrows its lifetime from the iterator the method is applied on.
3541 /// # #![feature(slice_iter_mut_as_slice)]
3542 /// let mut slice: &mut [usize] = &mut [1, 2, 3];
3544 /// // First, we get the iterator:
3545 /// let mut iter = slice.iter_mut();
3546 /// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
3547 /// assert_eq!(iter.as_slice(), &[1, 2, 3]);
3549 /// // Next, we move to the second element of the slice:
3551 /// // Now `as_slice` returns "[2, 3]":
3552 /// assert_eq!(iter.as_slice(), &[2, 3]);
3554 #[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
3555 pub fn as_slice(&self) -> &[T] {
3560 iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
3562 /// An internal abstraction over the splitting iterators, so that
3563 /// splitn, splitn_mut etc can be implemented once.
3565 trait SplitIter: DoubleEndedIterator {
3566 /// Marks the underlying iterator as complete, extracting the remaining
3567 /// portion of the slice.
3568 fn finish(&mut self) -> Option<Self::Item>;
3571 /// An iterator over subslices separated by elements that match a predicate
3574 /// This struct is created by the [`split`] method on [slices].
3576 /// [`split`]: ../../std/primitive.slice.html#method.split
3577 /// [slices]: ../../std/primitive.slice.html
3578 #[stable(feature = "rust1", since = "1.0.0")]
3579 pub struct Split<'a, T: 'a, P>
3581 P: FnMut(&T) -> bool,
3588 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3589 impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P>
3591 P: FnMut(&T) -> bool,
3593 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3594 f.debug_struct("Split").field("v", &self.v).field("finished", &self.finished).finish()
3598 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
3599 #[stable(feature = "rust1", since = "1.0.0")]
3600 impl<T, P> Clone for Split<'_, T, P>
3602 P: Clone + FnMut(&T) -> bool,
3604 fn clone(&self) -> Self {
3605 Split { v: self.v, pred: self.pred.clone(), finished: self.finished }
3609 #[stable(feature = "rust1", since = "1.0.0")]
3610 impl<'a, T, P> Iterator for Split<'a, T, P>
3612 P: FnMut(&T) -> bool,
3614 type Item = &'a [T];
3617 fn next(&mut self) -> Option<&'a [T]> {
3622 match self.v.iter().position(|x| (self.pred)(x)) {
3623 None => self.finish(),
3625 let ret = Some(&self.v[..idx]);
3626 self.v = &self.v[idx + 1..];
3633 fn size_hint(&self) -> (usize, Option<usize>) {
3634 if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
3638 #[stable(feature = "rust1", since = "1.0.0")]
3639 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P>
3641 P: FnMut(&T) -> bool,
3644 fn next_back(&mut self) -> Option<&'a [T]> {
3649 match self.v.iter().rposition(|x| (self.pred)(x)) {
3650 None => self.finish(),
3652 let ret = Some(&self.v[idx + 1..]);
3653 self.v = &self.v[..idx];
3660 impl<'a, T, P> SplitIter for Split<'a, T, P>
3662 P: FnMut(&T) -> bool,
3665 fn finish(&mut self) -> Option<&'a [T]> {
3669 self.finished = true;
3675 #[stable(feature = "fused", since = "1.26.0")]
3676 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
3678 /// An iterator over the subslices of the vector which are separated
3679 /// by elements that match `pred`.
3681 /// This struct is created by the [`split_mut`] method on [slices].
3683 /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
3684 /// [slices]: ../../std/primitive.slice.html
3685 #[stable(feature = "rust1", since = "1.0.0")]
3686 pub struct SplitMut<'a, T: 'a, P>
3688 P: FnMut(&T) -> bool,
3695 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3696 impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P>
3698 P: FnMut(&T) -> bool,
3700 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3701 f.debug_struct("SplitMut").field("v", &self.v).field("finished", &self.finished).finish()
3705 impl<'a, T, P> SplitIter for SplitMut<'a, T, P>
3707 P: FnMut(&T) -> bool,
3710 fn finish(&mut self) -> Option<&'a mut [T]> {
3714 self.finished = true;
3715 Some(mem::replace(&mut self.v, &mut []))
3720 #[stable(feature = "rust1", since = "1.0.0")]
3721 impl<'a, T, P> Iterator for SplitMut<'a, T, P>
3723 P: FnMut(&T) -> bool,
3725 type Item = &'a mut [T];
3728 fn next(&mut self) -> Option<&'a mut [T]> {
3734 // work around borrowck limitations
3735 let pred = &mut self.pred;
3736 self.v.iter().position(|x| (*pred)(x))
3739 None => self.finish(),
3741 let tmp = mem::replace(&mut self.v, &mut []);
3742 let (head, tail) = tmp.split_at_mut(idx);
3743 self.v = &mut tail[1..];
3750 fn size_hint(&self) -> (usize, Option<usize>) {
3754 // if the predicate doesn't match anything, we yield one slice
3755 // if it matches every element, we yield len+1 empty slices.
3756 (1, Some(self.v.len() + 1))
3761 #[stable(feature = "rust1", since = "1.0.0")]
3762 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P>
3764 P: FnMut(&T) -> bool,
3767 fn next_back(&mut self) -> Option<&'a mut [T]> {
3773 // work around borrowck limitations
3774 let pred = &mut self.pred;
3775 self.v.iter().rposition(|x| (*pred)(x))
3778 None => self.finish(),
3780 let tmp = mem::replace(&mut self.v, &mut []);
3781 let (head, tail) = tmp.split_at_mut(idx);
3783 Some(&mut tail[1..])
3789 #[stable(feature = "fused", since = "1.26.0")]
3790 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3792 /// An iterator over subslices separated by elements that match a predicate
3793 /// function, starting from the end of the slice.
3795 /// This struct is created by the [`rsplit`] method on [slices].
3797 /// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
3798 /// [slices]: ../../std/primitive.slice.html
3799 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3800 #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
3801 pub struct RSplit<'a, T: 'a, P>
3803 P: FnMut(&T) -> bool,
3805 inner: Split<'a, T, P>,
3808 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3809 impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P>
3811 P: FnMut(&T) -> bool,
3813 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3814 f.debug_struct("RSplit")
3815 .field("v", &self.inner.v)
3816 .field("finished", &self.inner.finished)
3821 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3822 impl<'a, T, P> Iterator for RSplit<'a, T, P>
3824 P: FnMut(&T) -> bool,
3826 type Item = &'a [T];
3829 fn next(&mut self) -> Option<&'a [T]> {
3830 self.inner.next_back()
3834 fn size_hint(&self) -> (usize, Option<usize>) {
3835 self.inner.size_hint()
3839 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3840 impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P>
3842 P: FnMut(&T) -> bool,
3845 fn next_back(&mut self) -> Option<&'a [T]> {
3850 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3851 impl<'a, T, P> SplitIter for RSplit<'a, T, P>
3853 P: FnMut(&T) -> bool,
3856 fn finish(&mut self) -> Option<&'a [T]> {
3861 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3862 impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
3864 /// An iterator over the subslices of the vector which are separated
3865 /// by elements that match `pred`, starting from the end of the slice.
3867 /// This struct is created by the [`rsplit_mut`] method on [slices].
3869 /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
3870 /// [slices]: ../../std/primitive.slice.html
3871 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3872 pub struct RSplitMut<'a, T: 'a, P>
3874 P: FnMut(&T) -> bool,
3876 inner: SplitMut<'a, T, P>,
3879 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3880 impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P>
3882 P: FnMut(&T) -> bool,
3884 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3885 f.debug_struct("RSplitMut")
3886 .field("v", &self.inner.v)
3887 .field("finished", &self.inner.finished)
3892 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3893 impl<'a, T, P> SplitIter for RSplitMut<'a, T, P>
3895 P: FnMut(&T) -> bool,
3898 fn finish(&mut self) -> Option<&'a mut [T]> {
3903 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3904 impl<'a, T, P> Iterator for RSplitMut<'a, T, P>
3906 P: FnMut(&T) -> bool,
3908 type Item = &'a mut [T];
3911 fn next(&mut self) -> Option<&'a mut [T]> {
3912 self.inner.next_back()
3916 fn size_hint(&self) -> (usize, Option<usize>) {
3917 self.inner.size_hint()
3921 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3922 impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P>
3924 P: FnMut(&T) -> bool,
3927 fn next_back(&mut self) -> Option<&'a mut [T]> {
3932 #[stable(feature = "slice_rsplit", since = "1.27.0")]
3933 impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
3935 /// An private iterator over subslices separated by elements that
3936 /// match a predicate function, splitting at most a fixed number of
3939 struct GenericSplitN<I> {
3944 impl<T, I: SplitIter<Item = T>> Iterator for GenericSplitN<I> {
3948 fn next(&mut self) -> Option<T> {
3963 fn size_hint(&self) -> (usize, Option<usize>) {
3964 let (lower, upper_opt) = self.iter.size_hint();
3965 (lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
3969 /// An iterator over subslices separated by elements that match a predicate
3970 /// function, limited to a given number of splits.
3972 /// This struct is created by the [`splitn`] method on [slices].
3974 /// [`splitn`]: ../../std/primitive.slice.html#method.splitn
3975 /// [slices]: ../../std/primitive.slice.html
3976 #[stable(feature = "rust1", since = "1.0.0")]
3977 pub struct SplitN<'a, T: 'a, P>
3979 P: FnMut(&T) -> bool,
3981 inner: GenericSplitN<Split<'a, T, P>>,
3984 #[stable(feature = "core_impl_debug", since = "1.9.0")]
3985 impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P>
3987 P: FnMut(&T) -> bool,
3989 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3990 f.debug_struct("SplitN").field("inner", &self.inner).finish()
3994 /// An iterator over subslices separated by elements that match a
3995 /// predicate function, limited to a given number of splits, starting
3996 /// from the end of the slice.
3998 /// This struct is created by the [`rsplitn`] method on [slices].
4000 /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
4001 /// [slices]: ../../std/primitive.slice.html
4002 #[stable(feature = "rust1", since = "1.0.0")]
4003 pub struct RSplitN<'a, T: 'a, P>
4005 P: FnMut(&T) -> bool,
4007 inner: GenericSplitN<RSplit<'a, T, P>>,
4010 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4011 impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P>
4013 P: FnMut(&T) -> bool,
4015 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4016 f.debug_struct("RSplitN").field("inner", &self.inner).finish()
4020 /// An iterator over subslices separated by elements that match a predicate
4021 /// function, limited to a given number of splits.
4023 /// This struct is created by the [`splitn_mut`] method on [slices].
4025 /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
4026 /// [slices]: ../../std/primitive.slice.html
4027 #[stable(feature = "rust1", since = "1.0.0")]
4028 pub struct SplitNMut<'a, T: 'a, P>
4030 P: FnMut(&T) -> bool,
4032 inner: GenericSplitN<SplitMut<'a, T, P>>,
4035 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4036 impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P>
4038 P: FnMut(&T) -> bool,
4040 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4041 f.debug_struct("SplitNMut").field("inner", &self.inner).finish()
4045 /// An iterator over subslices separated by elements that match a
4046 /// predicate function, limited to a given number of splits, starting
4047 /// from the end of the slice.
4049 /// This struct is created by the [`rsplitn_mut`] method on [slices].
4051 /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
4052 /// [slices]: ../../std/primitive.slice.html
4053 #[stable(feature = "rust1", since = "1.0.0")]
4054 pub struct RSplitNMut<'a, T: 'a, P>
4056 P: FnMut(&T) -> bool,
4058 inner: GenericSplitN<RSplitMut<'a, T, P>>,
4061 #[stable(feature = "core_impl_debug", since = "1.9.0")]
4062 impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P>
4064 P: FnMut(&T) -> bool,
4066 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
4067 f.debug_struct("RSplitNMut").field("inner", &self.inner).finish()
4071 macro_rules! forward_iterator {
4072 ($name:ident: $elem:ident, $iter_of:ty) => {
4073 #[stable(feature = "rust1", since = "1.0.0")]
4074 impl<'a, $elem, P> Iterator for $name<'a, $elem, P>
4076 P: FnMut(&T) -> bool,
4078 type Item = $iter_of;
4081 fn next(&mut self) -> Option<$iter_of> {
4086 fn size_hint(&self) -> (usize, Option<usize>) {
4087 self.inner.size_hint()
4091 #[stable(feature = "fused", since = "1.26.0")]
4092 impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {}
4096 forward_iterator! { SplitN: T, &'a [T] }
4097 forward_iterator! { RSplitN: T, &'a [T] }
4098 forward_iterator! { SplitNMut: T, &'a mut [T] }
4099 forward_iterator! { RSplitNMut: T, &'a mut [T] }
4101 /// An iterator over overlapping subslices of length `size`.
4103 /// This struct is created by the [`windows`] method on [slices].
4105 /// [`windows`]: ../../std/primitive.slice.html#method.windows
4106 /// [slices]: ../../std/primitive.slice.html
4108 #[stable(feature = "rust1", since = "1.0.0")]
4109 pub struct Windows<'a, T: 'a> {
4114 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4115 #[stable(feature = "rust1", since = "1.0.0")]
4116 impl<T> Clone for Windows<'_, T> {
4117 fn clone(&self) -> Self {
4118 Windows { v: self.v, size: self.size }
4122 #[stable(feature = "rust1", since = "1.0.0")]
4123 impl<'a, T> Iterator for Windows<'a, T> {
4124 type Item = &'a [T];
4127 fn next(&mut self) -> Option<&'a [T]> {
4128 if self.size > self.v.len() {
4131 let ret = Some(&self.v[..self.size]);
4132 self.v = &self.v[1..];
4138 fn size_hint(&self) -> (usize, Option<usize>) {
4139 if self.size > self.v.len() {
4142 let size = self.v.len() - self.size + 1;
4148 fn count(self) -> usize {
4153 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4154 let (end, overflow) = self.size.overflowing_add(n);
4155 if end > self.v.len() || overflow {
4159 let nth = &self.v[n..end];
4160 self.v = &self.v[n + 1..];
4166 fn last(self) -> Option<Self::Item> {
4167 if self.size > self.v.len() {
4170 let start = self.v.len() - self.size;
4171 Some(&self.v[start..])
4176 #[stable(feature = "rust1", since = "1.0.0")]
4177 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
4179 fn next_back(&mut self) -> Option<&'a [T]> {
4180 if self.size > self.v.len() {
4183 let ret = Some(&self.v[self.v.len() - self.size..]);
4184 self.v = &self.v[..self.v.len() - 1];
4190 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4191 let (end, overflow) = self.v.len().overflowing_sub(n);
4192 if end < self.size || overflow {
4196 let ret = &self.v[end - self.size..end];
4197 self.v = &self.v[..end - 1];
4203 #[stable(feature = "rust1", since = "1.0.0")]
4204 impl<T> ExactSizeIterator for Windows<'_, T> {}
4206 #[unstable(feature = "trusted_len", issue = "37572")]
4207 unsafe impl<T> TrustedLen for Windows<'_, T> {}
4209 #[stable(feature = "fused", since = "1.26.0")]
4210 impl<T> FusedIterator for Windows<'_, T> {}
4213 unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
4214 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4215 from_raw_parts(self.v.as_ptr().add(i), self.size)
4217 fn may_have_side_effect() -> bool {
4222 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4223 /// time), starting at the beginning of the slice.
4225 /// When the slice len is not evenly divided by the chunk size, the last slice
4226 /// of the iteration will be the remainder.
4228 /// This struct is created by the [`chunks`] method on [slices].
4230 /// [`chunks`]: ../../std/primitive.slice.html#method.chunks
4231 /// [slices]: ../../std/primitive.slice.html
4233 #[stable(feature = "rust1", since = "1.0.0")]
4234 pub struct Chunks<'a, T: 'a> {
4239 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4240 #[stable(feature = "rust1", since = "1.0.0")]
4241 impl<T> Clone for Chunks<'_, T> {
4242 fn clone(&self) -> Self {
4243 Chunks { v: self.v, chunk_size: self.chunk_size }
4247 #[stable(feature = "rust1", since = "1.0.0")]
4248 impl<'a, T> Iterator for Chunks<'a, T> {
4249 type Item = &'a [T];
4252 fn next(&mut self) -> Option<&'a [T]> {
4253 if self.v.is_empty() {
4256 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4257 let (fst, snd) = self.v.split_at(chunksz);
4264 fn size_hint(&self) -> (usize, Option<usize>) {
4265 if self.v.is_empty() {
4268 let n = self.v.len() / self.chunk_size;
4269 let rem = self.v.len() % self.chunk_size;
4270 let n = if rem > 0 { n + 1 } else { n };
4276 fn count(self) -> usize {
4281 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4282 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4283 if start >= self.v.len() || overflow {
4287 let end = match start.checked_add(self.chunk_size) {
4288 Some(sum) => cmp::min(self.v.len(), sum),
4289 None => self.v.len(),
4291 let nth = &self.v[start..end];
4292 self.v = &self.v[end..];
4298 fn last(self) -> Option<Self::Item> {
4299 if self.v.is_empty() {
4302 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4303 Some(&self.v[start..])
4308 #[stable(feature = "rust1", since = "1.0.0")]
4309 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
4311 fn next_back(&mut self) -> Option<&'a [T]> {
4312 if self.v.is_empty() {
4315 let remainder = self.v.len() % self.chunk_size;
4316 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4317 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4324 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4325 let len = self.len();
4330 let start = (len - 1 - n) * self.chunk_size;
4331 let end = match start.checked_add(self.chunk_size) {
4332 Some(res) => cmp::min(res, self.v.len()),
4333 None => self.v.len(),
4335 let nth_back = &self.v[start..end];
4336 self.v = &self.v[..start];
4342 #[stable(feature = "rust1", since = "1.0.0")]
4343 impl<T> ExactSizeIterator for Chunks<'_, T> {}
4345 #[unstable(feature = "trusted_len", issue = "37572")]
4346 unsafe impl<T> TrustedLen for Chunks<'_, T> {}
4348 #[stable(feature = "fused", since = "1.26.0")]
4349 impl<T> FusedIterator for Chunks<'_, T> {}
4352 unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
4353 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4354 let start = i * self.chunk_size;
4355 let end = match start.checked_add(self.chunk_size) {
4356 None => self.v.len(),
4357 Some(end) => cmp::min(end, self.v.len()),
4359 from_raw_parts(self.v.as_ptr().add(start), end - start)
4361 fn may_have_side_effect() -> bool {
4366 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4367 /// elements at a time), starting at the beginning of the slice.
4369 /// When the slice len is not evenly divided by the chunk size, the last slice
4370 /// of the iteration will be the remainder.
4372 /// This struct is created by the [`chunks_mut`] method on [slices].
4374 /// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
4375 /// [slices]: ../../std/primitive.slice.html
4377 #[stable(feature = "rust1", since = "1.0.0")]
4378 pub struct ChunksMut<'a, T: 'a> {
4383 #[stable(feature = "rust1", since = "1.0.0")]
4384 impl<'a, T> Iterator for ChunksMut<'a, T> {
4385 type Item = &'a mut [T];
4388 fn next(&mut self) -> Option<&'a mut [T]> {
4389 if self.v.is_empty() {
4392 let sz = cmp::min(self.v.len(), self.chunk_size);
4393 let tmp = mem::replace(&mut self.v, &mut []);
4394 let (head, tail) = tmp.split_at_mut(sz);
4401 fn size_hint(&self) -> (usize, Option<usize>) {
4402 if self.v.is_empty() {
4405 let n = self.v.len() / self.chunk_size;
4406 let rem = self.v.len() % self.chunk_size;
4407 let n = if rem > 0 { n + 1 } else { n };
4413 fn count(self) -> usize {
4418 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4419 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4420 if start >= self.v.len() || overflow {
4424 let end = match start.checked_add(self.chunk_size) {
4425 Some(sum) => cmp::min(self.v.len(), sum),
4426 None => self.v.len(),
4428 let tmp = mem::replace(&mut self.v, &mut []);
4429 let (head, tail) = tmp.split_at_mut(end);
4430 let (_, nth) = head.split_at_mut(start);
4437 fn last(self) -> Option<Self::Item> {
4438 if self.v.is_empty() {
4441 let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
4442 Some(&mut self.v[start..])
4447 #[stable(feature = "rust1", since = "1.0.0")]
4448 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
4450 fn next_back(&mut self) -> Option<&'a mut [T]> {
4451 if self.v.is_empty() {
4454 let remainder = self.v.len() % self.chunk_size;
4455 let sz = if remainder != 0 { remainder } else { self.chunk_size };
4456 let tmp = mem::replace(&mut self.v, &mut []);
4457 let tmp_len = tmp.len();
4458 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4465 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4466 let len = self.len();
4471 let start = (len - 1 - n) * self.chunk_size;
4472 let end = match start.checked_add(self.chunk_size) {
4473 Some(res) => cmp::min(res, self.v.len()),
4474 None => self.v.len(),
4476 let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
4477 let (head, nth_back) = temp.split_at_mut(start);
4484 #[stable(feature = "rust1", since = "1.0.0")]
4485 impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
4487 #[unstable(feature = "trusted_len", issue = "37572")]
4488 unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
4490 #[stable(feature = "fused", since = "1.26.0")]
4491 impl<T> FusedIterator for ChunksMut<'_, T> {}
4494 unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
4495 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4496 let start = i * self.chunk_size;
4497 let end = match start.checked_add(self.chunk_size) {
4498 None => self.v.len(),
4499 Some(end) => cmp::min(end, self.v.len()),
4501 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
4503 fn may_have_side_effect() -> bool {
4508 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4509 /// time), starting at the beginning of the slice.
4511 /// When the slice len is not evenly divided by the chunk size, the last
4512 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
4513 /// the [`remainder`] function from the iterator.
4515 /// This struct is created by the [`chunks_exact`] method on [slices].
4517 /// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
4518 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
4519 /// [slices]: ../../std/primitive.slice.html
4521 #[stable(feature = "chunks_exact", since = "1.31.0")]
4522 pub struct ChunksExact<'a, T: 'a> {
4528 impl<'a, T> ChunksExact<'a, T> {
4529 /// Returns the remainder of the original slice that is not going to be
4530 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4532 #[stable(feature = "chunks_exact", since = "1.31.0")]
4533 pub fn remainder(&self) -> &'a [T] {
4538 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4539 #[stable(feature = "chunks_exact", since = "1.31.0")]
4540 impl<T> Clone for ChunksExact<'_, T> {
4541 fn clone(&self) -> Self {
4542 ChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
4546 #[stable(feature = "chunks_exact", since = "1.31.0")]
4547 impl<'a, T> Iterator for ChunksExact<'a, T> {
4548 type Item = &'a [T];
4551 fn next(&mut self) -> Option<&'a [T]> {
4552 if self.v.len() < self.chunk_size {
4555 let (fst, snd) = self.v.split_at(self.chunk_size);
4562 fn size_hint(&self) -> (usize, Option<usize>) {
4563 let n = self.v.len() / self.chunk_size;
4568 fn count(self) -> usize {
4573 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4574 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4575 if start >= self.v.len() || overflow {
4579 let (_, snd) = self.v.split_at(start);
4586 fn last(mut self) -> Option<Self::Item> {
4591 #[stable(feature = "chunks_exact", since = "1.31.0")]
4592 impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
4594 fn next_back(&mut self) -> Option<&'a [T]> {
4595 if self.v.len() < self.chunk_size {
4598 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
4605 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4606 let len = self.len();
4611 let start = (len - 1 - n) * self.chunk_size;
4612 let end = start + self.chunk_size;
4613 let nth_back = &self.v[start..end];
4614 self.v = &self.v[..start];
4620 #[stable(feature = "chunks_exact", since = "1.31.0")]
4621 impl<T> ExactSizeIterator for ChunksExact<'_, T> {
4622 fn is_empty(&self) -> bool {
4627 #[unstable(feature = "trusted_len", issue = "37572")]
4628 unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
4630 #[stable(feature = "chunks_exact", since = "1.31.0")]
4631 impl<T> FusedIterator for ChunksExact<'_, T> {}
4634 #[stable(feature = "chunks_exact", since = "1.31.0")]
4635 unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
4636 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4637 let start = i * self.chunk_size;
4638 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
4640 fn may_have_side_effect() -> bool {
4645 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4646 /// elements at a time), starting at the beginning of the slice.
4648 /// When the slice len is not evenly divided by the chunk size, the last up to
4649 /// `chunk_size-1` elements will be omitted but can be retrieved from the
4650 /// [`into_remainder`] function from the iterator.
4652 /// This struct is created by the [`chunks_exact_mut`] method on [slices].
4654 /// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
4655 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
4656 /// [slices]: ../../std/primitive.slice.html
4658 #[stable(feature = "chunks_exact", since = "1.31.0")]
4659 pub struct ChunksExactMut<'a, T: 'a> {
4665 impl<'a, T> ChunksExactMut<'a, T> {
4666 /// Returns the remainder of the original slice that is not going to be
4667 /// returned by the iterator. The returned slice has at most `chunk_size-1`
4669 #[stable(feature = "chunks_exact", since = "1.31.0")]
4670 pub fn into_remainder(self) -> &'a mut [T] {
4675 #[stable(feature = "chunks_exact", since = "1.31.0")]
4676 impl<'a, T> Iterator for ChunksExactMut<'a, T> {
4677 type Item = &'a mut [T];
4680 fn next(&mut self) -> Option<&'a mut [T]> {
4681 if self.v.len() < self.chunk_size {
4684 let tmp = mem::replace(&mut self.v, &mut []);
4685 let (head, tail) = tmp.split_at_mut(self.chunk_size);
4692 fn size_hint(&self) -> (usize, Option<usize>) {
4693 let n = self.v.len() / self.chunk_size;
4698 fn count(self) -> usize {
4703 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4704 let (start, overflow) = n.overflowing_mul(self.chunk_size);
4705 if start >= self.v.len() || overflow {
4709 let tmp = mem::replace(&mut self.v, &mut []);
4710 let (_, snd) = tmp.split_at_mut(start);
4717 fn last(mut self) -> Option<Self::Item> {
4722 #[stable(feature = "chunks_exact", since = "1.31.0")]
4723 impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
4725 fn next_back(&mut self) -> Option<&'a mut [T]> {
4726 if self.v.len() < self.chunk_size {
4729 let tmp = mem::replace(&mut self.v, &mut []);
4730 let tmp_len = tmp.len();
4731 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
4738 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4739 let len = self.len();
4744 let start = (len - 1 - n) * self.chunk_size;
4745 let end = start + self.chunk_size;
4746 let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
4747 let (head, nth_back) = temp.split_at_mut(start);
4754 #[stable(feature = "chunks_exact", since = "1.31.0")]
4755 impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
4756 fn is_empty(&self) -> bool {
4761 #[unstable(feature = "trusted_len", issue = "37572")]
4762 unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
4764 #[stable(feature = "chunks_exact", since = "1.31.0")]
4765 impl<T> FusedIterator for ChunksExactMut<'_, T> {}
4768 #[stable(feature = "chunks_exact", since = "1.31.0")]
4769 unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
4770 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
4771 let start = i * self.chunk_size;
4772 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
4774 fn may_have_side_effect() -> bool {
4779 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
4780 /// time), starting at the end of the slice.
4782 /// When the slice len is not evenly divided by the chunk size, the last slice
4783 /// of the iteration will be the remainder.
4785 /// This struct is created by the [`rchunks`] method on [slices].
4787 /// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
4788 /// [slices]: ../../std/primitive.slice.html
4790 #[stable(feature = "rchunks", since = "1.31.0")]
4791 pub struct RChunks<'a, T: 'a> {
4796 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
4797 #[stable(feature = "rchunks", since = "1.31.0")]
4798 impl<T> Clone for RChunks<'_, T> {
4799 fn clone(&self) -> Self {
4800 RChunks { v: self.v, chunk_size: self.chunk_size }
4804 #[stable(feature = "rchunks", since = "1.31.0")]
4805 impl<'a, T> Iterator for RChunks<'a, T> {
4806 type Item = &'a [T];
4809 fn next(&mut self) -> Option<&'a [T]> {
4810 if self.v.is_empty() {
4813 let chunksz = cmp::min(self.v.len(), self.chunk_size);
4814 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
4821 fn size_hint(&self) -> (usize, Option<usize>) {
4822 if self.v.is_empty() {
4825 let n = self.v.len() / self.chunk_size;
4826 let rem = self.v.len() % self.chunk_size;
4827 let n = if rem > 0 { n + 1 } else { n };
4833 fn count(self) -> usize {
4838 fn nth(&mut self, n: usize) -> Option<Self::Item> {
4839 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4840 if end >= self.v.len() || overflow {
4844 // Can't underflow because of the check above
4845 let end = self.v.len() - end;
4846 let start = match end.checked_sub(self.chunk_size) {
4850 let nth = &self.v[start..end];
4851 self.v = &self.v[0..start];
4857 fn last(self) -> Option<Self::Item> {
4858 if self.v.is_empty() {
4861 let rem = self.v.len() % self.chunk_size;
4862 let end = if rem == 0 { self.chunk_size } else { rem };
4863 Some(&self.v[0..end])
4868 #[stable(feature = "rchunks", since = "1.31.0")]
4869 impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
4871 fn next_back(&mut self) -> Option<&'a [T]> {
4872 if self.v.is_empty() {
4875 let remainder = self.v.len() % self.chunk_size;
4876 let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
4877 let (fst, snd) = self.v.split_at(chunksz);
4884 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
4885 let len = self.len();
4890 // can't underflow because `n < len`
4891 let offset_from_end = (len - 1 - n) * self.chunk_size;
4892 let end = self.v.len() - offset_from_end;
4893 let start = end.saturating_sub(self.chunk_size);
4894 let nth_back = &self.v[start..end];
4895 self.v = &self.v[end..];
4901 #[stable(feature = "rchunks", since = "1.31.0")]
4902 impl<T> ExactSizeIterator for RChunks<'_, T> {}
4904 #[unstable(feature = "trusted_len", issue = "37572")]
4905 unsafe impl<T> TrustedLen for RChunks<'_, T> {}
4907 #[stable(feature = "rchunks", since = "1.31.0")]
4908 impl<T> FusedIterator for RChunks<'_, T> {}
4911 #[stable(feature = "rchunks", since = "1.31.0")]
4912 unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
4913 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
4914 let end = self.v.len() - i * self.chunk_size;
4915 let start = match end.checked_sub(self.chunk_size) {
4917 Some(start) => start,
4919 from_raw_parts(self.v.as_ptr().add(start), end - start)
4921 fn may_have_side_effect() -> bool {
4926 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
4927 /// elements at a time), starting at the end of the slice.
4929 /// When the slice len is not evenly divided by the chunk size, the last slice
4930 /// of the iteration will be the remainder.
4932 /// This struct is created by the [`rchunks_mut`] method on [slices].
4934 /// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
4935 /// [slices]: ../../std/primitive.slice.html
4937 #[stable(feature = "rchunks", since = "1.31.0")]
4938 pub struct RChunksMut<'a, T: 'a> {
4943 #[stable(feature = "rchunks", since = "1.31.0")]
4944 impl<'a, T> Iterator for RChunksMut<'a, T> {
4945 type Item = &'a mut [T];
4948 fn next(&mut self) -> Option<&'a mut [T]> {
4949 if self.v.is_empty() {
4952 let sz = cmp::min(self.v.len(), self.chunk_size);
4953 let tmp = mem::replace(&mut self.v, &mut []);
4954 let tmp_len = tmp.len();
4955 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
4962 fn size_hint(&self) -> (usize, Option<usize>) {
4963 if self.v.is_empty() {
4966 let n = self.v.len() / self.chunk_size;
4967 let rem = self.v.len() % self.chunk_size;
4968 let n = if rem > 0 { n + 1 } else { n };
4974 fn count(self) -> usize {
4979 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
4980 let (end, overflow) = n.overflowing_mul(self.chunk_size);
4981 if end >= self.v.len() || overflow {
4985 // Can't underflow because of the check above
4986 let end = self.v.len() - end;
4987 let start = match end.checked_sub(self.chunk_size) {
4991 let tmp = mem::replace(&mut self.v, &mut []);
4992 let (head, tail) = tmp.split_at_mut(start);
4993 let (nth, _) = tail.split_at_mut(end - start);
5000 fn last(self) -> Option<Self::Item> {
5001 if self.v.is_empty() {
5004 let rem = self.v.len() % self.chunk_size;
5005 let end = if rem == 0 { self.chunk_size } else { rem };
5006 Some(&mut self.v[0..end])
5011 #[stable(feature = "rchunks", since = "1.31.0")]
5012 impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
5014 fn next_back(&mut self) -> Option<&'a mut [T]> {
5015 if self.v.is_empty() {
5018 let remainder = self.v.len() % self.chunk_size;
5019 let sz = if remainder != 0 { remainder } else { self.chunk_size };
5020 let tmp = mem::replace(&mut self.v, &mut []);
5021 let (head, tail) = tmp.split_at_mut(sz);
5028 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5029 let len = self.len();
5034 // can't underflow because `n < len`
5035 let offset_from_end = (len - 1 - n) * self.chunk_size;
5036 let end = self.v.len() - offset_from_end;
5037 let start = end.saturating_sub(self.chunk_size);
5038 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5039 let (_, nth_back) = tmp.split_at_mut(start);
5046 #[stable(feature = "rchunks", since = "1.31.0")]
5047 impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
5049 #[unstable(feature = "trusted_len", issue = "37572")]
5050 unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
5052 #[stable(feature = "rchunks", since = "1.31.0")]
5053 impl<T> FusedIterator for RChunksMut<'_, T> {}
5056 #[stable(feature = "rchunks", since = "1.31.0")]
5057 unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
5058 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5059 let end = self.v.len() - i * self.chunk_size;
5060 let start = match end.checked_sub(self.chunk_size) {
5062 Some(start) => start,
5064 from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
5066 fn may_have_side_effect() -> bool {
5071 /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
5072 /// time), starting at the end of the slice.
5074 /// When the slice len is not evenly divided by the chunk size, the last
5075 /// up to `chunk_size-1` elements will be omitted but can be retrieved from
5076 /// the [`remainder`] function from the iterator.
5078 /// This struct is created by the [`rchunks_exact`] method on [slices].
5080 /// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
5081 /// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
5082 /// [slices]: ../../std/primitive.slice.html
5084 #[stable(feature = "rchunks", since = "1.31.0")]
5085 pub struct RChunksExact<'a, T: 'a> {
5091 impl<'a, T> RChunksExact<'a, T> {
5092 /// Returns the remainder of the original slice that is not going to be
5093 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5095 #[stable(feature = "rchunks", since = "1.31.0")]
5096 pub fn remainder(&self) -> &'a [T] {
5101 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
5102 #[stable(feature = "rchunks", since = "1.31.0")]
5103 impl<'a, T> Clone for RChunksExact<'a, T> {
5104 fn clone(&self) -> RChunksExact<'a, T> {
5105 RChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
5109 #[stable(feature = "rchunks", since = "1.31.0")]
5110 impl<'a, T> Iterator for RChunksExact<'a, T> {
5111 type Item = &'a [T];
5114 fn next(&mut self) -> Option<&'a [T]> {
5115 if self.v.len() < self.chunk_size {
5118 let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
5125 fn size_hint(&self) -> (usize, Option<usize>) {
5126 let n = self.v.len() / self.chunk_size;
5131 fn count(self) -> usize {
5136 fn nth(&mut self, n: usize) -> Option<Self::Item> {
5137 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5138 if end >= self.v.len() || overflow {
5142 let (fst, _) = self.v.split_at(self.v.len() - end);
5149 fn last(mut self) -> Option<Self::Item> {
5154 #[stable(feature = "rchunks", since = "1.31.0")]
5155 impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
5157 fn next_back(&mut self) -> Option<&'a [T]> {
5158 if self.v.len() < self.chunk_size {
5161 let (fst, snd) = self.v.split_at(self.chunk_size);
5168 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5169 let len = self.len();
5174 // now that we know that `n` corresponds to a chunk,
5175 // none of these operations can underflow/overflow
5176 let offset = (len - n) * self.chunk_size;
5177 let start = self.v.len() - offset;
5178 let end = start + self.chunk_size;
5179 let nth_back = &self.v[start..end];
5180 self.v = &self.v[end..];
5186 #[stable(feature = "rchunks", since = "1.31.0")]
5187 impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
5188 fn is_empty(&self) -> bool {
5193 #[unstable(feature = "trusted_len", issue = "37572")]
5194 unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
5196 #[stable(feature = "rchunks", since = "1.31.0")]
5197 impl<T> FusedIterator for RChunksExact<'_, T> {}
5200 #[stable(feature = "rchunks", since = "1.31.0")]
5201 unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
5202 unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
5203 let end = self.v.len() - i * self.chunk_size;
5204 let start = end - self.chunk_size;
5205 from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
5207 fn may_have_side_effect() -> bool {
5212 /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
5213 /// elements at a time), starting at the end of the slice.
5215 /// When the slice len is not evenly divided by the chunk size, the last up to
5216 /// `chunk_size-1` elements will be omitted but can be retrieved from the
5217 /// [`into_remainder`] function from the iterator.
5219 /// This struct is created by the [`rchunks_exact_mut`] method on [slices].
5221 /// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
5222 /// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
5223 /// [slices]: ../../std/primitive.slice.html
5225 #[stable(feature = "rchunks", since = "1.31.0")]
5226 pub struct RChunksExactMut<'a, T: 'a> {
5232 impl<'a, T> RChunksExactMut<'a, T> {
5233 /// Returns the remainder of the original slice that is not going to be
5234 /// returned by the iterator. The returned slice has at most `chunk_size-1`
5236 #[stable(feature = "rchunks", since = "1.31.0")]
5237 pub fn into_remainder(self) -> &'a mut [T] {
5242 #[stable(feature = "rchunks", since = "1.31.0")]
5243 impl<'a, T> Iterator for RChunksExactMut<'a, T> {
5244 type Item = &'a mut [T];
5247 fn next(&mut self) -> Option<&'a mut [T]> {
5248 if self.v.len() < self.chunk_size {
5251 let tmp = mem::replace(&mut self.v, &mut []);
5252 let tmp_len = tmp.len();
5253 let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
5260 fn size_hint(&self) -> (usize, Option<usize>) {
5261 let n = self.v.len() / self.chunk_size;
5266 fn count(self) -> usize {
5271 fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
5272 let (end, overflow) = n.overflowing_mul(self.chunk_size);
5273 if end >= self.v.len() || overflow {
5277 let tmp = mem::replace(&mut self.v, &mut []);
5278 let tmp_len = tmp.len();
5279 let (fst, _) = tmp.split_at_mut(tmp_len - end);
5286 fn last(mut self) -> Option<Self::Item> {
5291 #[stable(feature = "rchunks", since = "1.31.0")]
5292 impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
5294 fn next_back(&mut self) -> Option<&'a mut [T]> {
5295 if self.v.len() < self.chunk_size {
5298 let tmp = mem::replace(&mut self.v, &mut []);
5299 let (head, tail) = tmp.split_at_mut(self.chunk_size);
5306 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
5307 let len = self.len();
5312 // now that we know that `n` corresponds to a chunk,
5313 // none of these operations can underflow/overflow
5314 let offset = (len - n) * self.chunk_size;
5315 let start = self.v.len() - offset;
5316 let end = start + self.chunk_size;
5317 let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
5318 let (_, nth_back) = tmp.split_at_mut(start);
5325 #[stable(feature = "rchunks", since = "1.31.0")]
5326 impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
5327 fn is_empty(&self) -> bool {
5332 #[unstable(feature = "trusted_len", issue = "37572")]
5333 unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
5335 #[stable(feature = "rchunks", since = "1.31.0")]
5336 impl<T> FusedIterator for RChunksExactMut<'_, T> {}
5339 #[stable(feature = "rchunks", since = "1.31.0")]
5340 unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
5341 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
5342 let end = self.v.len() - i * self.chunk_size;
5343 let start = end - self.chunk_size;
5344 from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
5346 fn may_have_side_effect() -> bool {
5355 /// Forms a slice from a pointer and a length.
5357 /// The `len` argument is the number of **elements**, not the number of bytes.
5361 /// Behavior is undefined if any of the following conditions are violated:
5363 /// * `data` must be [valid] for reads for `len * mem::size_of::<T>()` many bytes,
5364 /// and it must be properly aligned. This means in particular:
5366 /// * The entire memory range of this slice must be contained within a single allocated object!
5367 /// Slices can never span across multiple allocated objects.
5368 /// * `data` must be non-null and aligned even for zero-length slices. One
5369 /// reason for this is that enum layout optimizations may rely on references
5370 /// (including slices of any length) being aligned and non-null to distinguish
5371 /// them from other data. You can obtain a pointer that is usable as `data`
5372 /// for zero-length slices using [`NonNull::dangling()`].
5374 /// * The memory referenced by the returned slice must not be mutated for the duration
5375 /// of lifetime `'a`, except inside an `UnsafeCell`.
5377 /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
5378 /// See the safety documentation of [`pointer::offset`].
5382 /// The lifetime for the returned slice is inferred from its usage. To
5383 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
5384 /// source lifetime is safe in the context, such as by providing a helper
5385 /// function taking the lifetime of a host value for the slice, or by explicit
5393 /// // manifest a slice for a single element
5395 /// let ptr = &x as *const _;
5396 /// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
5397 /// assert_eq!(slice[0], 42);
5400 /// [valid]: ../../std/ptr/index.html#safety
5401 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
5402 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
5404 #[stable(feature = "rust1", since = "1.0.0")]
5405 pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
5406 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
5408 mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5409 "attempt to create slice covering at least half the address space"
5411 &*ptr::slice_from_raw_parts(data, len)
5414 /// Performs the same functionality as [`from_raw_parts`], except that a
5415 /// mutable slice is returned.
5419 /// Behavior is undefined if any of the following conditions are violated:
5421 /// * `data` must be [valid] for writes for `len * mem::size_of::<T>()` many bytes,
5422 /// and it must be properly aligned. This means in particular:
5424 /// * The entire memory range of this slice must be contained within a single allocated object!
5425 /// Slices can never span across multiple allocated objects.
5426 /// * `data` must be non-null and aligned even for zero-length slices. One
5427 /// reason for this is that enum layout optimizations may rely on references
5428 /// (including slices of any length) being aligned and non-null to distinguish
5429 /// them from other data. You can obtain a pointer that is usable as `data`
5430 /// for zero-length slices using [`NonNull::dangling()`].
5432 /// * The memory referenced by the returned slice must not be accessed through any other pointer
5433 /// (not derived from the return value) for the duration of lifetime `'a`.
5434 /// Both read and write accesses are forbidden.
5436 /// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
5437 /// See the safety documentation of [`pointer::offset`].
5439 /// [valid]: ../../std/ptr/index.html#safety
5440 /// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
5441 /// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
5442 /// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
5444 #[stable(feature = "rust1", since = "1.0.0")]
5445 pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
5446 debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
5448 mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
5449 "attempt to create slice covering at least half the address space"
5451 &mut *ptr::slice_from_raw_parts_mut(data, len)
5454 /// Converts a reference to T into a slice of length 1 (without copying).
5455 #[stable(feature = "from_ref", since = "1.28.0")]
5456 pub fn from_ref<T>(s: &T) -> &[T] {
5457 unsafe { from_raw_parts(s, 1) }
5460 /// Converts a reference to T into a slice of length 1 (without copying).
5461 #[stable(feature = "from_ref", since = "1.28.0")]
5462 pub fn from_mut<T>(s: &mut T) -> &mut [T] {
5463 unsafe { from_raw_parts_mut(s, 1) }
5466 // This function is public only because there is no other way to unit test heapsort.
5467 #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "none")]
5469 pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
5471 F: FnMut(&T, &T) -> bool,
5473 sort::heapsort(v, &mut is_less);
5477 // Comparison traits
5481 /// Calls implementation provided memcmp.
5483 /// Interprets the data as u8.
5485 /// Returns 0 for equal, < 0 for less than and > 0 for greater
5487 // FIXME(#32610): Return type should be c_int
5488 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
5491 #[stable(feature = "rust1", since = "1.0.0")]
5492 impl<A, B> PartialEq<[B]> for [A]
5496 fn eq(&self, other: &[B]) -> bool {
5497 SlicePartialEq::equal(self, other)
5500 fn ne(&self, other: &[B]) -> bool {
5501 SlicePartialEq::not_equal(self, other)
5505 #[stable(feature = "rust1", since = "1.0.0")]
5506 impl<T: Eq> Eq for [T] {}
5508 /// Implements comparison of vectors lexicographically.
5509 #[stable(feature = "rust1", since = "1.0.0")]
5510 impl<T: Ord> Ord for [T] {
5511 fn cmp(&self, other: &[T]) -> Ordering {
5512 SliceOrd::compare(self, other)
5516 /// Implements comparison of vectors lexicographically.
5517 #[stable(feature = "rust1", since = "1.0.0")]
5518 impl<T: PartialOrd> PartialOrd for [T] {
5519 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
5520 SlicePartialOrd::partial_compare(self, other)
5525 // intermediate trait for specialization of slice's PartialEq
5526 trait SlicePartialEq<B> {
5527 fn equal(&self, other: &[B]) -> bool;
5529 fn not_equal(&self, other: &[B]) -> bool {
5534 // Generic slice equality
5535 impl<A, B> SlicePartialEq<B> for [A]
5539 default fn equal(&self, other: &[B]) -> bool {
5540 if self.len() != other.len() {
5544 self.iter().zip(other.iter()).all(|(x, y)| x == y)
5548 // Use an equal-pointer optimization when types are `Eq`
5549 impl<A> SlicePartialEq<A> for [A]
5551 A: PartialEq<A> + Eq,
5553 default fn equal(&self, other: &[A]) -> bool {
5554 if self.len() != other.len() {
5558 if self.as_ptr() == other.as_ptr() {
5562 self.iter().zip(other.iter()).all(|(x, y)| x == y)
5566 // Use memcmp for bytewise equality when the types allow
5567 impl<A> SlicePartialEq<A> for [A]
5569 A: PartialEq<A> + BytewiseEquality,
5571 fn equal(&self, other: &[A]) -> bool {
5572 if self.len() != other.len() {
5575 if self.as_ptr() == other.as_ptr() {
5579 let size = mem::size_of_val(self);
5580 memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
5586 // intermediate trait for specialization of slice's PartialOrd
5587 trait SlicePartialOrd: Sized {
5588 fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
5591 impl<A: PartialOrd> SlicePartialOrd for A {
5592 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
5593 let l = cmp::min(left.len(), right.len());
5595 // Slice to the loop iteration range to enable bound check
5596 // elimination in the compiler
5597 let lhs = &left[..l];
5598 let rhs = &right[..l];
5601 match lhs[i].partial_cmp(&rhs[i]) {
5602 Some(Ordering::Equal) => (),
5603 non_eq => return non_eq,
5607 left.len().partial_cmp(&right.len())
5611 // This is the impl that we would like to have. Unfortunately it's not sound.
5612 // See `partial_ord_slice.rs`.
5614 impl<A> SlicePartialOrd for A
5618 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
5619 Some(SliceOrd::compare(left, right))
5624 impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
5625 fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
5626 Some(SliceOrd::compare(left, right))
5630 trait AlwaysApplicableOrd: SliceOrd + Ord {}
5632 macro_rules! always_applicable_ord {
5633 ($([$($p:tt)*] $t:ty,)*) => {
5634 $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
5638 always_applicable_ord! {
5639 [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
5640 [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
5642 [T: ?Sized] *const T, [T: ?Sized] *mut T,
5643 [T: AlwaysApplicableOrd] &T,
5644 [T: AlwaysApplicableOrd] &mut T,
5645 [T: AlwaysApplicableOrd] Option<T>,
5649 // intermediate trait for specialization of slice's Ord
5650 trait SliceOrd: Sized {
5651 fn compare(left: &[Self], right: &[Self]) -> Ordering;
5654 impl<A: Ord> SliceOrd for A {
5655 default fn compare(left: &[Self], right: &[Self]) -> Ordering {
5656 let l = cmp::min(left.len(), right.len());
5658 // Slice to the loop iteration range to enable bound check
5659 // elimination in the compiler
5660 let lhs = &left[..l];
5661 let rhs = &right[..l];
5664 match lhs[i].cmp(&rhs[i]) {
5665 Ordering::Equal => (),
5666 non_eq => return non_eq,
5670 left.len().cmp(&right.len())
5674 // memcmp compares a sequence of unsigned bytes lexicographically.
5675 // this matches the order we want for [u8], but no others (not even [i8]).
5676 impl SliceOrd for u8 {
5678 fn compare(left: &[Self], right: &[Self]) -> Ordering {
5680 unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
5682 left.len().cmp(&right.len())
5683 } else if order < 0 {
5692 /// Trait implemented for types that can be compared for equality using
5693 /// their bytewise representation
5694 trait BytewiseEquality: Eq + Copy {}
5696 macro_rules! impl_marker_for {
5697 ($traitname:ident, $($ty:ty)*) => {
5699 impl $traitname for $ty { }
5704 impl_marker_for!(BytewiseEquality,
5705 u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
5708 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
5709 unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
5710 &*self.ptr.as_ptr().add(i)
5712 fn may_have_side_effect() -> bool {
5718 unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
5719 unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
5720 &mut *self.ptr.as_ptr().add(i)
5722 fn may_have_side_effect() -> bool {
5727 trait SliceContains: Sized {
5728 fn slice_contains(&self, x: &[Self]) -> bool;
5731 impl<T> SliceContains for T
5735 default fn slice_contains(&self, x: &[Self]) -> bool {
5736 x.iter().any(|y| *y == *self)
5740 impl SliceContains for u8 {
5741 fn slice_contains(&self, x: &[Self]) -> bool {
5742 memchr::memchr(*self, x).is_some()
5746 impl SliceContains for i8 {
5747 fn slice_contains(&self, x: &[Self]) -> bool {
5748 let byte = *self as u8;
5749 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
5750 memchr::memchr(byte, bytes).is_some()