1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Slice management and manipulation
13 //! For more details `std::slice`.
15 #![stable(feature = "rust1", since = "1.0.0")]
16 #![doc(primitive = "slice")]
18 // How this module is organized.
20 // The library infrastructure for slices is fairly messy. There's
21 // a lot of stuff defined here. Let's keep it clean.
23 // Since slices don't support inherent methods; all operations
24 // on them are defined on traits, which are then reexported from
25 // the prelude for convenience. So there are a lot of traits here.
27 // The layout of this file is thus:
29 // * Slice-specific 'extension' traits and their implementations. This
30 // is where most of the slice API resides.
31 // * Implementations of a few common traits with important slice ops.
32 // * Definitions of a bunch of iterators.
34 // * The `raw` and `bytes` submodules.
35 // * Boilerplate trait implementations.
39 use cmp::{Ordering, PartialEq, PartialOrd, Eq, Ord};
40 use cmp::Ordering::{Less, Equal, Greater};
43 use intrinsics::assume;
45 use ops::{FnMut, self, Index};
48 use option::Option::{None, Some};
50 use result::Result::{Ok, Err};
54 use marker::{Send, Sized, Sync, self};
56 // Avoid conflicts with *both* the Slice trait (buggy) and the `slice::raw` module.
57 use raw::Slice as RawSlice;
64 /// Extension methods for slices.
65 #[allow(missing_docs)] // docs in libcollections
69 fn split_at<'a>(&'a self, mid: usize) -> (&'a [Self::Item], &'a [Self::Item]);
70 fn iter<'a>(&'a self) -> Iter<'a, Self::Item>;
71 fn split<'a, P>(&'a self, pred: P) -> Split<'a, Self::Item, P>
72 where P: FnMut(&Self::Item) -> bool;
73 fn splitn<'a, P>(&'a self, n: usize, pred: P) -> SplitN<'a, Self::Item, P>
74 where P: FnMut(&Self::Item) -> bool;
75 fn rsplitn<'a, P>(&'a self, n: usize, pred: P) -> RSplitN<'a, Self::Item, P>
76 where P: FnMut(&Self::Item) -> bool;
77 fn windows<'a>(&'a self, size: usize) -> Windows<'a, Self::Item>;
78 fn chunks<'a>(&'a self, size: usize) -> Chunks<'a, Self::Item>;
79 fn get<'a>(&'a self, index: usize) -> Option<&'a Self::Item>;
80 fn first<'a>(&'a self) -> Option<&'a Self::Item>;
81 fn tail<'a>(&'a self) -> &'a [Self::Item];
82 fn init<'a>(&'a self) -> &'a [Self::Item];
83 fn last<'a>(&'a self) -> Option<&'a Self::Item>;
84 unsafe fn get_unchecked<'a>(&'a self, index: usize) -> &'a Self::Item;
85 fn as_ptr(&self) -> *const Self::Item;
86 fn binary_search_by<F>(&self, f: F) -> Result<usize, usize> where
87 F: FnMut(&Self::Item) -> Ordering;
88 fn len(&self) -> usize;
89 fn is_empty(&self) -> bool { self.len() == 0 }
90 fn get_mut<'a>(&'a mut self, index: usize) -> Option<&'a mut Self::Item>;
91 fn as_mut_slice<'a>(&'a mut self) -> &'a mut [Self::Item];
92 fn iter_mut<'a>(&'a mut self) -> IterMut<'a, Self::Item>;
93 fn first_mut<'a>(&'a mut self) -> Option<&'a mut Self::Item>;
94 fn tail_mut<'a>(&'a mut self) -> &'a mut [Self::Item];
95 fn init_mut<'a>(&'a mut self) -> &'a mut [Self::Item];
96 fn last_mut<'a>(&'a mut self) -> Option<&'a mut Self::Item>;
97 fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, Self::Item, P>
98 where P: FnMut(&Self::Item) -> bool;
99 fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<Self::Item, P>
100 where P: FnMut(&Self::Item) -> bool;
101 fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P>
102 where P: FnMut(&Self::Item) -> bool;
103 fn chunks_mut<'a>(&'a mut self, chunk_size: usize) -> ChunksMut<'a, Self::Item>;
104 fn swap(&mut self, a: usize, b: usize);
105 fn split_at_mut<'a>(&'a mut self, mid: usize) -> (&'a mut [Self::Item], &'a mut [Self::Item]);
106 fn reverse(&mut self);
107 unsafe fn get_unchecked_mut<'a>(&'a mut self, index: usize) -> &'a mut Self::Item;
108 fn as_mut_ptr(&mut self) -> *mut Self::Item;
110 fn position_elem(&self, t: &Self::Item) -> Option<usize> where Self::Item: PartialEq;
112 fn rposition_elem(&self, t: &Self::Item) -> Option<usize> where Self::Item: PartialEq;
114 fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq;
116 fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
118 fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
120 fn binary_search(&self, x: &Self::Item) -> Result<usize, usize> where Self::Item: Ord;
121 fn next_permutation(&mut self) -> bool where Self::Item: Ord;
122 fn prev_permutation(&mut self) -> bool where Self::Item: Ord;
124 fn clone_from_slice(&mut self, &[Self::Item]) -> usize where Self::Item: Clone;
127 #[unstable(feature = "core")]
128 impl<T> SliceExt for [T] {
132 fn split_at(&self, mid: usize) -> (&[T], &[T]) {
133 (&self[..mid], &self[mid..])
137 fn iter<'a>(&'a self) -> Iter<'a, T> {
139 let p = self.as_ptr();
140 assume(!p.is_null());
141 if mem::size_of::<T>() == 0 {
143 end: (p as usize + self.len()) as *const T,
144 _marker: marker::PhantomData}
147 end: p.offset(self.len() as isize),
148 _marker: marker::PhantomData}
154 fn split<'a, P>(&'a self, pred: P) -> Split<'a, T, P> where P: FnMut(&T) -> bool {
163 fn splitn<'a, P>(&'a self, n: usize, pred: P) -> SplitN<'a, T, P> where
164 P: FnMut(&T) -> bool,
167 inner: GenericSplitN {
168 iter: self.split(pred),
176 fn rsplitn<'a, P>(&'a self, n: usize, pred: P) -> RSplitN<'a, T, P> where
177 P: FnMut(&T) -> bool,
180 inner: GenericSplitN {
181 iter: self.split(pred),
189 fn windows(&self, size: usize) -> Windows<T> {
191 Windows { v: self, size: size }
195 fn chunks(&self, size: usize) -> Chunks<T> {
197 Chunks { v: self, size: size }
201 fn get(&self, index: usize) -> Option<&T> {
202 if index < self.len() { Some(&self[index]) } else { None }
206 fn first(&self) -> Option<&T> {
207 if self.len() == 0 { None } else { Some(&self[0]) }
211 fn tail(&self) -> &[T] { &self[1..] }
214 fn init(&self) -> &[T] {
215 &self[..self.len() - 1]
219 fn last(&self) -> Option<&T> {
220 if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
224 unsafe fn get_unchecked(&self, index: usize) -> &T {
225 transmute(self.repr().data.offset(index as isize))
229 fn as_ptr(&self) -> *const T {
233 #[unstable(feature = "core")]
234 fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize> where
235 F: FnMut(&T) -> Ordering
237 let mut base : usize = 0;
238 let mut lim : usize = self.len();
241 let ix = base + (lim >> 1);
243 Equal => return Ok(ix),
256 fn len(&self) -> usize { self.repr().len }
259 fn get_mut(&mut self, index: usize) -> Option<&mut T> {
260 if index < self.len() { Some(&mut self[index]) } else { None }
264 fn as_mut_slice(&mut self) -> &mut [T] { self }
268 fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
270 let self2: &mut [T] = mem::transmute_copy(&self);
272 (ops::IndexMut::index_mut(self, &ops::RangeTo { end: mid } ),
273 ops::IndexMut::index_mut(self2, &ops::RangeFrom { start: mid } ))
279 fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
281 let self2: &mut [T] = mem::transmute_copy(&self);
283 (ops::IndexMut::index_mut(self, ops::RangeTo { end: mid } ),
284 ops::IndexMut::index_mut(self2, ops::RangeFrom { start: mid } ))
289 fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> {
291 let p = self.as_mut_ptr();
292 assume(!p.is_null());
293 if mem::size_of::<T>() == 0 {
295 end: (p as usize + self.len()) as *mut T,
296 _marker: marker::PhantomData}
299 end: p.offset(self.len() as isize),
300 _marker: marker::PhantomData}
306 fn last_mut(&mut self) -> Option<&mut T> {
307 let len = self.len();
308 if len == 0 { return None; }
309 Some(&mut self[len - 1])
313 fn first_mut(&mut self) -> Option<&mut T> {
314 if self.len() == 0 { None } else { Some(&mut self[0]) }
318 fn tail_mut(&mut self) -> &mut [T] {
323 fn init_mut(&mut self) -> &mut [T] {
324 let len = self.len();
325 &mut self[.. (len - 1)]
329 fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
330 SplitMut { v: self, pred: pred, finished: false }
334 fn splitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> SplitNMut<'a, T, P> where
338 inner: GenericSplitN {
339 iter: self.split_mut(pred),
347 fn rsplitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> RSplitNMut<'a, T, P> where
348 P: FnMut(&T) -> bool,
351 inner: GenericSplitN {
352 iter: self.split_mut(pred),
360 fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
361 assert!(chunk_size > 0);
362 ChunksMut { v: self, chunk_size: chunk_size }
366 fn swap(&mut self, a: usize, b: usize) {
368 // Can't take two mutable loans from one vector, so instead just cast
369 // them to their raw pointers to do the swap
370 let pa: *mut T = &mut self[a];
371 let pb: *mut T = &mut self[b];
376 fn reverse(&mut self) {
377 let mut i: usize = 0;
380 // Unsafe swap to avoid the bounds check in safe swap.
382 let pa: *mut T = self.get_unchecked_mut(i);
383 let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
391 unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
392 transmute((self.repr().data as *mut T).offset(index as isize))
396 fn as_mut_ptr(&mut self) -> *mut T {
397 self.repr().data as *mut T
401 fn position_elem(&self, x: &T) -> Option<usize> where T: PartialEq {
402 self.iter().position(|y| *x == *y)
406 fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
407 self.iter().rposition(|x| *x == *t)
411 fn contains(&self, x: &T) -> bool where T: PartialEq {
412 self.iter().any(|elt| *x == *elt)
416 fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
417 let n = needle.len();
418 self.len() >= n && needle == &self[..n]
422 fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
423 let (m, n) = (self.len(), needle.len());
424 m >= n && needle == &self[m-n..]
427 #[unstable(feature = "core")]
428 fn binary_search(&self, x: &T) -> Result<usize, usize> where T: Ord {
429 self.binary_search_by(|p| p.cmp(x))
432 #[unstable(feature = "core")]
433 fn next_permutation(&mut self) -> bool where T: Ord {
434 // These cases only have 1 permutation each, so we can't do anything.
435 if self.len() < 2 { return false; }
437 // Step 1: Identify the longest, rightmost weakly decreasing part of the vector
438 let mut i = self.len() - 1;
439 while i > 0 && self[i-1] >= self[i] {
443 // If that is the entire vector, this is the last-ordered permutation.
448 // Step 2: Find the rightmost element larger than the pivot (i-1)
449 let mut j = self.len() - 1;
450 while j >= i && self[j] <= self[i-1] {
454 // Step 3: Swap that element with the pivot
457 // Step 4: Reverse the (previously) weakly decreasing part
463 #[unstable(feature = "core")]
464 fn prev_permutation(&mut self) -> bool where T: Ord {
465 // These cases only have 1 permutation each, so we can't do anything.
466 if self.len() < 2 { return false; }
468 // Step 1: Identify the longest, rightmost weakly increasing part of the vector
469 let mut i = self.len() - 1;
470 while i > 0 && self[i-1] <= self[i] {
474 // If that is the entire vector, this is the first-ordered permutation.
479 // Step 2: Reverse the weakly increasing part
482 // Step 3: Find the rightmost element equal to or bigger than the pivot (i-1)
483 let mut j = self.len() - 1;
484 while j >= i && self[j-1] < self[i-1] {
488 // Step 4: Swap that element with the pivot
495 fn clone_from_slice(&mut self, src: &[T]) -> usize where T: Clone {
496 let min = cmp::min(self.len(), src.len());
497 let dst = &mut self[.. min];
498 let src = &src[.. min];
500 dst[i].clone_from(&src[i]);
506 #[stable(feature = "rust1", since = "1.0.0")]
507 impl<T> ops::Index<usize> for [T] {
511 fn index(&self, &index: &usize) -> &T {
512 assert!(index < self.len());
514 unsafe { mem::transmute(self.repr().data.offset(index as isize)) }
518 fn index(&self, index: usize) -> &T {
519 assert!(index < self.len());
521 unsafe { mem::transmute(self.repr().data.offset(index as isize)) }
525 #[stable(feature = "rust1", since = "1.0.0")]
526 impl<T> ops::IndexMut<usize> for [T] {
529 fn index_mut(&mut self, &index: &usize) -> &mut T {
530 assert!(index < self.len());
532 unsafe { mem::transmute(self.repr().data.offset(index as isize)) }
537 fn index_mut(&mut self, index: usize) -> &mut T {
538 assert!(index < self.len());
540 unsafe { mem::transmute(self.repr().data.offset(index as isize)) }
544 #[stable(feature = "rust1", since = "1.0.0")]
545 impl<T> ops::Index<ops::Range<usize>> for [T] {
550 fn index(&self, index: &ops::Range<usize>) -> &[T] {
551 assert!(index.start <= index.end);
552 assert!(index.end <= self.len());
555 self.as_ptr().offset(index.start as isize),
556 index.end - index.start
563 fn index(&self, index: ops::Range<usize>) -> &[T] {
564 assert!(index.start <= index.end);
565 assert!(index.end <= self.len());
568 self.as_ptr().offset(index.start as isize),
569 index.end - index.start
574 #[stable(feature = "rust1", since = "1.0.0")]
575 impl<T> ops::Index<ops::RangeTo<usize>> for [T] {
580 fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
581 self.index(&ops::Range{ start: 0, end: index.end })
586 fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
587 self.index(ops::Range{ start: 0, end: index.end })
590 #[stable(feature = "rust1", since = "1.0.0")]
591 impl<T> ops::Index<ops::RangeFrom<usize>> for [T] {
596 fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
597 self.index(&ops::Range{ start: index.start, end: self.len() })
602 fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
603 self.index(ops::Range{ start: index.start, end: self.len() })
606 #[stable(feature = "rust1", since = "1.0.0")]
607 impl<T> ops::Index<RangeFull> for [T] {
612 fn index(&self, _index: &RangeFull) -> &[T] {
618 fn index(&self, _index: RangeFull) -> &[T] {
623 #[stable(feature = "rust1", since = "1.0.0")]
624 impl<T> ops::IndexMut<ops::Range<usize>> for [T] {
627 fn index_mut(&mut self, index: &ops::Range<usize>) -> &mut [T] {
628 assert!(index.start <= index.end);
629 assert!(index.end <= self.len());
632 self.as_mut_ptr().offset(index.start as isize),
633 index.end - index.start
640 fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
641 assert!(index.start <= index.end);
642 assert!(index.end <= self.len());
645 self.as_mut_ptr().offset(index.start as isize),
646 index.end - index.start
651 #[stable(feature = "rust1", since = "1.0.0")]
652 impl<T> ops::IndexMut<ops::RangeTo<usize>> for [T] {
655 fn index_mut(&mut self, index: &ops::RangeTo<usize>) -> &mut [T] {
656 self.index_mut(&ops::Range{ start: 0, end: index.end })
661 fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
662 self.index_mut(ops::Range{ start: 0, end: index.end })
665 #[stable(feature = "rust1", since = "1.0.0")]
666 impl<T> ops::IndexMut<ops::RangeFrom<usize>> for [T] {
669 fn index_mut(&mut self, index: &ops::RangeFrom<usize>) -> &mut [T] {
670 let len = self.len();
671 self.index_mut(&ops::Range{ start: index.start, end: len })
676 fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
677 let len = self.len();
678 self.index_mut(ops::Range{ start: index.start, end: len })
681 #[stable(feature = "rust1", since = "1.0.0")]
682 impl<T> ops::IndexMut<RangeFull> for [T] {
686 fn index_mut(&mut self, _index: &RangeFull) -> &mut [T] {
692 fn index_mut(&mut self, _index: RangeFull) -> &mut [T] {
698 ////////////////////////////////////////////////////////////////////////////////
700 ////////////////////////////////////////////////////////////////////////////////
702 /// Data that is viewable as a slice.
703 #[unstable(feature = "core",
704 reason = "will be replaced by slice syntax")]
705 #[deprecated(since = "1.0.0",
706 reason = "use std::convert::AsRef<[T]> instead")]
707 pub trait AsSlice<T> {
708 /// Work with `self` as a slice.
709 fn as_slice<'a>(&'a self) -> &'a [T];
712 #[unstable(feature = "core", reason = "trait is experimental")]
714 impl<T> AsSlice<T> for [T] {
716 fn as_slice<'a>(&'a self) -> &'a [T] { self }
719 #[unstable(feature = "core", reason = "trait is experimental")]
721 impl<'a, T, U: ?Sized + AsSlice<T>> AsSlice<T> for &'a U {
723 fn as_slice(&self) -> &[T] { AsSlice::as_slice(*self) }
726 #[unstable(feature = "core", reason = "trait is experimental")]
728 impl<'a, T, U: ?Sized + AsSlice<T>> AsSlice<T> for &'a mut U {
730 fn as_slice(&self) -> &[T] { AsSlice::as_slice(*self) }
733 #[stable(feature = "rust1", since = "1.0.0")]
734 impl<'a, T> Default for &'a [T] {
735 #[stable(feature = "rust1", since = "1.0.0")]
736 fn default() -> &'a [T] { &[] }
743 #[stable(feature = "rust1", since = "1.0.0")]
744 impl<'a, T> IntoIterator for &'a [T] {
746 type IntoIter = Iter<'a, T>;
748 fn into_iter(self) -> Iter<'a, T> {
753 #[stable(feature = "rust1", since = "1.0.0")]
754 impl<'a, T> IntoIterator for &'a mut [T] {
755 type Item = &'a mut T;
756 type IntoIter = IterMut<'a, T>;
758 fn into_iter(self) -> IterMut<'a, T> {
763 // The shared definition of the `Iter` and `IterMut` iterators
764 macro_rules! iterator {
765 (struct $name:ident -> $ptr:ty, $elem:ty) => {
766 #[stable(feature = "rust1", since = "1.0.0")]
767 impl<'a, T> Iterator for $name<'a, T> {
771 fn next(&mut self) -> Option<$elem> {
772 // could be implemented with slices, but this avoids bounds checks
774 ::intrinsics::assume(!self.ptr.is_null());
775 ::intrinsics::assume(!self.end.is_null());
776 if self.ptr == self.end {
779 if mem::size_of::<T>() == 0 {
780 // purposefully don't use 'ptr.offset' because for
781 // vectors with 0-size elements this would return the
783 self.ptr = transmute(self.ptr as usize + 1);
785 // Use a non-null pointer value
786 Some(&mut *(1 as *mut _))
789 self.ptr = self.ptr.offset(1);
798 fn size_hint(&self) -> (usize, Option<usize>) {
799 let diff = (self.end as usize) - (self.ptr as usize);
800 let size = mem::size_of::<T>();
801 let exact = diff / (if size == 0 {1} else {size});
806 #[stable(feature = "rust1", since = "1.0.0")]
807 impl<'a, T> DoubleEndedIterator for $name<'a, T> {
809 fn next_back(&mut self) -> Option<$elem> {
810 // could be implemented with slices, but this avoids bounds checks
812 ::intrinsics::assume(!self.ptr.is_null());
813 ::intrinsics::assume(!self.end.is_null());
814 if self.end == self.ptr {
817 if mem::size_of::<T>() == 0 {
818 // See above for why 'ptr.offset' isn't used
819 self.end = transmute(self.end as usize - 1);
821 // Use a non-null pointer value
822 Some(&mut *(1 as *mut _))
824 self.end = self.end.offset(-1);
826 Some(transmute(self.end))
835 macro_rules! make_slice {
836 ($t: ty => $result: ty: $start: expr, $end: expr) => {{
837 let diff = $end as usize - $start as usize;
838 let len = if mem::size_of::<T>() == 0 {
841 diff / mem::size_of::<$t>()
844 from_raw_parts($start, len)
849 macro_rules! make_mut_slice {
850 ($t: ty => $result: ty: $start: expr, $end: expr) => {{
851 let diff = $end as usize - $start as usize;
852 let len = if mem::size_of::<T>() == 0 {
855 diff / mem::size_of::<$t>()
858 from_raw_parts_mut($start, len)
863 /// Immutable slice iterator
864 #[stable(feature = "rust1", since = "1.0.0")]
865 pub struct Iter<'a, T: 'a> {
868 _marker: marker::PhantomData<&'a T>,
871 unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {}
872 unsafe impl<'a, T: Sync> Send for Iter<'a, T> {}
874 #[unstable(feature = "core")]
875 impl<'a, T> ops::Index<ops::Range<usize>> for Iter<'a, T> {
880 fn index(&self, index: &ops::Range<usize>) -> &[T] {
881 self.as_slice().index(index)
886 fn index(&self, index: ops::Range<usize>) -> &[T] {
887 self.as_slice().index(index)
891 #[unstable(feature = "core")]
892 impl<'a, T> ops::Index<ops::RangeTo<usize>> for Iter<'a, T> {
897 fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
898 self.as_slice().index(index)
903 fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
904 self.as_slice().index(index)
908 #[unstable(feature = "core")]
909 impl<'a, T> ops::Index<ops::RangeFrom<usize>> for Iter<'a, T> {
914 fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
915 self.as_slice().index(index)
920 fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
921 self.as_slice().index(index)
925 #[unstable(feature = "core")]
926 impl<'a, T> ops::Index<RangeFull> for Iter<'a, T> {
931 fn index(&self, _index: &RangeFull) -> &[T] {
937 fn index(&self, _index: RangeFull) -> &[T] {
942 impl<'a, T> Iter<'a, T> {
943 /// View the underlying data as a subslice of the original data.
945 /// This has the same lifetime as the original slice, and so the
946 /// iterator can continue to be used while this exists.
947 #[unstable(feature = "core")]
948 pub fn as_slice(&self) -> &'a [T] {
949 make_slice!(T => &'a [T]: self.ptr, self.end)
953 iterator!{struct Iter -> *const T, &'a T}
955 #[stable(feature = "rust1", since = "1.0.0")]
956 impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
958 #[stable(feature = "rust1", since = "1.0.0")]
959 impl<'a, T> Clone for Iter<'a, T> {
960 fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
963 #[unstable(feature = "core", reason = "trait is experimental")]
964 impl<'a, T> RandomAccessIterator for Iter<'a, T> {
966 fn indexable(&self) -> usize {
967 let (exact, _) = self.size_hint();
972 fn idx(&mut self, index: usize) -> Option<&'a T> {
974 if index < self.indexable() {
975 if mem::size_of::<T>() == 0 {
976 // Use a non-null pointer value
977 Some(&mut *(1 as *mut _))
979 Some(transmute(self.ptr.offset(index as isize)))
988 /// Mutable slice iterator.
989 #[stable(feature = "rust1", since = "1.0.0")]
990 pub struct IterMut<'a, T: 'a> {
993 _marker: marker::PhantomData<&'a mut T>,
996 unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {}
997 unsafe impl<'a, T: Send> Send for IterMut<'a, T> {}
999 #[unstable(feature = "core")]
1000 impl<'a, T> ops::Index<ops::Range<usize>> for IterMut<'a, T> {
1005 fn index(&self, index: &ops::Range<usize>) -> &[T] {
1006 self.index(&RangeFull).index(index)
1011 fn index(&self, index: ops::Range<usize>) -> &[T] {
1012 self.index(RangeFull).index(index)
1015 #[unstable(feature = "core")]
1016 impl<'a, T> ops::Index<ops::RangeTo<usize>> for IterMut<'a, T> {
1021 fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
1022 self.index(&RangeFull).index(index)
1027 fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
1028 self.index(RangeFull).index(index)
1031 #[unstable(feature = "core")]
1032 impl<'a, T> ops::Index<ops::RangeFrom<usize>> for IterMut<'a, T> {
1037 fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
1038 self.index(&RangeFull).index(index)
1043 fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
1044 self.index(RangeFull).index(index)
1047 #[unstable(feature = "core")]
1048 impl<'a, T> ops::Index<RangeFull> for IterMut<'a, T> {
1053 fn index(&self, _index: &RangeFull) -> &[T] {
1054 make_slice!(T => &[T]: self.ptr, self.end)
1059 fn index(&self, _index: RangeFull) -> &[T] {
1060 make_slice!(T => &[T]: self.ptr, self.end)
1064 #[unstable(feature = "core")]
1065 impl<'a, T> ops::IndexMut<ops::Range<usize>> for IterMut<'a, T> {
1068 fn index_mut(&mut self, index: &ops::Range<usize>) -> &mut [T] {
1069 self.index_mut(&RangeFull).index_mut(index)
1074 fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
1075 self.index_mut(RangeFull).index_mut(index)
1078 #[unstable(feature = "core")]
1079 impl<'a, T> ops::IndexMut<ops::RangeTo<usize>> for IterMut<'a, T> {
1083 fn index_mut(&mut self, index: &ops::RangeTo<usize>) -> &mut [T] {
1084 self.index_mut(&RangeFull).index_mut(index)
1089 fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
1090 self.index_mut(RangeFull).index_mut(index)
1093 #[unstable(feature = "core")]
1094 impl<'a, T> ops::IndexMut<ops::RangeFrom<usize>> for IterMut<'a, T> {
1098 fn index_mut(&mut self, index: &ops::RangeFrom<usize>) -> &mut [T] {
1099 self.index_mut(&RangeFull).index_mut(index)
1104 fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
1105 self.index_mut(RangeFull).index_mut(index)
1108 #[unstable(feature = "core")]
1109 impl<'a, T> ops::IndexMut<RangeFull> for IterMut<'a, T> {
1113 fn index_mut(&mut self, _index: &RangeFull) -> &mut [T] {
1114 make_mut_slice!(T => &mut [T]: self.ptr, self.end)
1119 fn index_mut(&mut self, _index: RangeFull) -> &mut [T] {
1120 make_mut_slice!(T => &mut [T]: self.ptr, self.end)
1125 impl<'a, T> IterMut<'a, T> {
1126 /// View the underlying data as a subslice of the original data.
1128 /// To avoid creating `&mut` references that alias, this is forced
1129 /// to consume the iterator. Consider using the `Slice` and
1130 /// `SliceMut` implementations for obtaining slices with more
1131 /// restricted lifetimes that do not consume the iterator.
1132 #[unstable(feature = "core")]
1133 pub fn into_slice(self) -> &'a mut [T] {
1134 make_mut_slice!(T => &'a mut [T]: self.ptr, self.end)
1138 iterator!{struct IterMut -> *mut T, &'a mut T}
1140 #[stable(feature = "rust1", since = "1.0.0")]
1141 impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
1143 /// An internal abstraction over the splitting iterators, so that
1144 /// splitn, splitn_mut etc can be implemented once.
1145 trait SplitIter: DoubleEndedIterator {
1146 /// Mark the underlying iterator as complete, extracting the remaining
1147 /// portion of the slice.
1148 fn finish(&mut self) -> Option<Self::Item>;
1151 /// An iterator over subslices separated by elements that match a predicate
1153 #[stable(feature = "rust1", since = "1.0.0")]
1154 pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
1160 // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
1161 #[stable(feature = "rust1", since = "1.0.0")]
1162 impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool {
1163 fn clone(&self) -> Split<'a, T, P> {
1166 pred: self.pred.clone(),
1167 finished: self.finished,
1172 #[stable(feature = "rust1", since = "1.0.0")]
1173 impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
1174 type Item = &'a [T];
1177 fn next(&mut self) -> Option<&'a [T]> {
1178 if self.finished { return None; }
1180 match self.v.iter().position(|x| (self.pred)(x)) {
1181 None => self.finish(),
1183 let ret = Some(&self.v[..idx]);
1184 self.v = &self.v[idx + 1..];
1191 fn size_hint(&self) -> (usize, Option<usize>) {
1195 (1, Some(self.v.len() + 1))
1200 #[stable(feature = "rust1", since = "1.0.0")]
1201 impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
1203 fn next_back(&mut self) -> Option<&'a [T]> {
1204 if self.finished { return None; }
1206 match self.v.iter().rposition(|x| (self.pred)(x)) {
1207 None => self.finish(),
1209 let ret = Some(&self.v[idx + 1..]);
1210 self.v = &self.v[..idx];
1217 impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool {
1219 fn finish(&mut self) -> Option<&'a [T]> {
1220 if self.finished { None } else { self.finished = true; Some(self.v) }
1224 /// An iterator over the subslices of the vector which are separated
1225 /// by elements that match `pred`.
1226 #[stable(feature = "rust1", since = "1.0.0")]
1227 pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
1233 impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1235 fn finish(&mut self) -> Option<&'a mut [T]> {
1239 self.finished = true;
1240 Some(mem::replace(&mut self.v, &mut []))
1245 #[stable(feature = "rust1", since = "1.0.0")]
1246 impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
1247 type Item = &'a mut [T];
1250 fn next(&mut self) -> Option<&'a mut [T]> {
1251 if self.finished { return None; }
1253 let idx_opt = { // work around borrowck limitations
1254 let pred = &mut self.pred;
1255 self.v.iter().position(|x| (*pred)(x))
1258 None => self.finish(),
1260 let tmp = mem::replace(&mut self.v, &mut []);
1261 let (head, tail) = tmp.split_at_mut(idx);
1262 self.v = &mut tail[1..];
1269 fn size_hint(&self) -> (usize, Option<usize>) {
1273 // if the predicate doesn't match anything, we yield one slice
1274 // if it matches every element, we yield len+1 empty slices.
1275 (1, Some(self.v.len() + 1))
1280 #[stable(feature = "rust1", since = "1.0.0")]
1281 impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where
1282 P: FnMut(&T) -> bool,
1285 fn next_back(&mut self) -> Option<&'a mut [T]> {
1286 if self.finished { return None; }
1288 let idx_opt = { // work around borrowck limitations
1289 let pred = &mut self.pred;
1290 self.v.iter().rposition(|x| (*pred)(x))
1293 None => self.finish(),
1295 let tmp = mem::replace(&mut self.v, &mut []);
1296 let (head, tail) = tmp.split_at_mut(idx);
1298 Some(&mut tail[1..])
1304 /// An private iterator over subslices separated by elements that
1305 /// match a predicate function, splitting at most a fixed number of
1307 struct GenericSplitN<I> {
1313 impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> {
1317 fn next(&mut self) -> Option<T> {
1318 if self.count == 0 {
1322 if self.invert { self.iter.next_back() } else { self.iter.next() }
1327 fn size_hint(&self) -> (usize, Option<usize>) {
1328 let (lower, upper_opt) = self.iter.size_hint();
1329 (lower, upper_opt.map(|upper| cmp::min(self.count + 1, upper)))
1333 /// An iterator over subslices separated by elements that match a predicate
1334 /// function, limited to a given number of splits.
1335 #[stable(feature = "rust1", since = "1.0.0")]
1336 pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
1337 inner: GenericSplitN<Split<'a, T, P>>
1340 /// An iterator over subslices separated by elements that match a
1341 /// predicate function, limited to a given number of splits, starting
1342 /// from the end of the slice.
1343 #[stable(feature = "rust1", since = "1.0.0")]
1344 pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
1345 inner: GenericSplitN<Split<'a, T, P>>
1348 /// An iterator over subslices separated by elements that match a predicate
1349 /// function, limited to a given number of splits.
1350 #[stable(feature = "rust1", since = "1.0.0")]
1351 pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
1352 inner: GenericSplitN<SplitMut<'a, T, P>>
1355 /// An iterator over subslices separated by elements that match a
1356 /// predicate function, limited to a given number of splits, starting
1357 /// from the end of the slice.
1358 #[stable(feature = "rust1", since = "1.0.0")]
1359 pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
1360 inner: GenericSplitN<SplitMut<'a, T, P>>
1363 macro_rules! forward_iterator {
1364 ($name:ident: $elem:ident, $iter_of:ty) => {
1365 #[stable(feature = "rust1", since = "1.0.0")]
1366 impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where
1367 P: FnMut(&T) -> bool
1369 type Item = $iter_of;
1372 fn next(&mut self) -> Option<$iter_of> {
1377 fn size_hint(&self) -> (usize, Option<usize>) {
1378 self.inner.size_hint()
1384 forward_iterator! { SplitN: T, &'a [T] }
1385 forward_iterator! { RSplitN: T, &'a [T] }
1386 forward_iterator! { SplitNMut: T, &'a mut [T] }
1387 forward_iterator! { RSplitNMut: T, &'a mut [T] }
1389 /// An iterator over overlapping subslices of length `size`.
1390 #[stable(feature = "rust1", since = "1.0.0")]
1391 pub struct Windows<'a, T:'a> {
1396 // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
1397 #[stable(feature = "rust1", since = "1.0.0")]
1398 impl<'a, T> Clone for Windows<'a, T> {
1399 fn clone(&self) -> Windows<'a, T> {
1407 #[stable(feature = "rust1", since = "1.0.0")]
1408 impl<'a, T> Iterator for Windows<'a, T> {
1409 type Item = &'a [T];
1412 fn next(&mut self) -> Option<&'a [T]> {
1413 if self.size > self.v.len() {
1416 let ret = Some(&self.v[..self.size]);
1417 self.v = &self.v[1..];
1423 fn size_hint(&self) -> (usize, Option<usize>) {
1424 if self.size > self.v.len() {
1427 let size = self.v.len() - self.size + 1;
1433 #[stable(feature = "rust1", since = "1.0.0")]
1434 impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
1436 fn next_back(&mut self) -> Option<&'a [T]> {
1437 if self.size > self.v.len() {
1440 let ret = Some(&self.v[self.v.len()-self.size..]);
1441 self.v = &self.v[..self.v.len()-1];
1447 #[stable(feature = "rust1", since = "1.0.0")]
1448 impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
1450 #[unstable(feature = "core", reason = "trait is experimental")]
1451 impl<'a, T> RandomAccessIterator for Windows<'a, T> {
1453 fn indexable(&self) -> usize {
1458 fn idx(&mut self, index: usize) -> Option<&'a [T]> {
1459 if index + self.size > self.v.len() {
1462 Some(&self.v[index .. index+self.size])
1467 /// An iterator over a slice in (non-overlapping) chunks (`size` elements at a
1470 /// When the slice len is not evenly divided by the chunk size, the last slice
1471 /// of the iteration will be the remainder.
1472 #[stable(feature = "rust1", since = "1.0.0")]
1473 pub struct Chunks<'a, T:'a> {
1478 // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
1479 #[stable(feature = "rust1", since = "1.0.0")]
1480 impl<'a, T> Clone for Chunks<'a, T> {
1481 fn clone(&self) -> Chunks<'a, T> {
1489 #[stable(feature = "rust1", since = "1.0.0")]
1490 impl<'a, T> Iterator for Chunks<'a, T> {
1491 type Item = &'a [T];
1494 fn next(&mut self) -> Option<&'a [T]> {
1495 if self.v.len() == 0 {
1498 let chunksz = cmp::min(self.v.len(), self.size);
1499 let (fst, snd) = self.v.split_at(chunksz);
1506 fn size_hint(&self) -> (usize, Option<usize>) {
1507 if self.v.len() == 0 {
1510 let n = self.v.len() / self.size;
1511 let rem = self.v.len() % self.size;
1512 let n = if rem > 0 { n+1 } else { n };
1518 #[stable(feature = "rust1", since = "1.0.0")]
1519 impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
1521 fn next_back(&mut self) -> Option<&'a [T]> {
1522 if self.v.len() == 0 {
1525 let remainder = self.v.len() % self.size;
1526 let chunksz = if remainder != 0 { remainder } else { self.size };
1527 let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
1534 #[stable(feature = "rust1", since = "1.0.0")]
1535 impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
1537 #[unstable(feature = "core", reason = "trait is experimental")]
1538 impl<'a, T> RandomAccessIterator for Chunks<'a, T> {
1540 fn indexable(&self) -> usize {
1541 self.v.len()/self.size + if self.v.len() % self.size != 0 { 1 } else { 0 }
1545 fn idx(&mut self, index: usize) -> Option<&'a [T]> {
1546 if index < self.indexable() {
1547 let lo = index * self.size;
1548 let mut hi = lo + self.size;
1549 if hi < lo || hi > self.v.len() { hi = self.v.len(); }
1551 Some(&self.v[lo..hi])
1558 /// An iterator over a slice in (non-overlapping) mutable chunks (`size`
1559 /// elements at a time). When the slice len is not evenly divided by the chunk
1560 /// size, the last slice of the iteration will be the remainder.
1561 #[stable(feature = "rust1", since = "1.0.0")]
1562 pub struct ChunksMut<'a, T:'a> {
1567 #[stable(feature = "rust1", since = "1.0.0")]
1568 impl<'a, T> Iterator for ChunksMut<'a, T> {
1569 type Item = &'a mut [T];
1572 fn next(&mut self) -> Option<&'a mut [T]> {
1573 if self.v.len() == 0 {
1576 let sz = cmp::min(self.v.len(), self.chunk_size);
1577 let tmp = mem::replace(&mut self.v, &mut []);
1578 let (head, tail) = tmp.split_at_mut(sz);
1585 fn size_hint(&self) -> (usize, Option<usize>) {
1586 if self.v.len() == 0 {
1589 let n = self.v.len() / self.chunk_size;
1590 let rem = self.v.len() % self.chunk_size;
1591 let n = if rem > 0 { n + 1 } else { n };
1597 #[stable(feature = "rust1", since = "1.0.0")]
1598 impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
1600 fn next_back(&mut self) -> Option<&'a mut [T]> {
1601 if self.v.len() == 0 {
1604 let remainder = self.v.len() % self.chunk_size;
1605 let sz = if remainder != 0 { remainder } else { self.chunk_size };
1606 let tmp = mem::replace(&mut self.v, &mut []);
1607 let tmp_len = tmp.len();
1608 let (head, tail) = tmp.split_at_mut(tmp_len - sz);
1615 #[stable(feature = "rust1", since = "1.0.0")]
1616 impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
1622 /// Converts a pointer to A into a slice of length 1 (without copying).
1623 #[unstable(feature = "core")]
1624 pub fn ref_slice<'a, A>(s: &'a A) -> &'a [A] {
1626 from_raw_parts(s, 1)
1630 /// Converts a pointer to A into a slice of length 1 (without copying).
1631 #[unstable(feature = "core")]
1632 pub fn mut_ref_slice<'a, A>(s: &'a mut A) -> &'a mut [A] {
1634 from_raw_parts_mut(s, 1)
1638 /// Forms a slice from a pointer and a length.
1640 /// The `len` argument is the number of **elements**, not the number of bytes.
1642 /// This function is unsafe as there is no guarantee that the given pointer is
1643 /// valid for `len` elements, nor whether the lifetime inferred is a suitable
1644 /// lifetime for the returned slice.
1648 /// The lifetime for the returned slice is inferred from its usage. To
1649 /// prevent accidental misuse, it's suggested to tie the lifetime to whichever
1650 /// source lifetime is safe in the context, such as by providing a helper
1651 /// function taking the lifetime of a host value for the slice, or by explicit
1659 /// // manifest a slice out of thin air!
1660 /// let ptr = 0x1234 as *const usize;
1663 /// let slice = slice::from_raw_parts(ptr, amt);
1667 #[stable(feature = "rust1", since = "1.0.0")]
1668 pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] {
1669 transmute(RawSlice { data: p, len: len })
1672 /// Performs the same functionality as `from_raw_parts`, except that a mutable
1673 /// slice is returned.
1675 /// This function is unsafe for the same reasons as `from_raw_parts`, as well
1676 /// as not being able to provide a non-aliasing guarantee of the returned
1679 #[stable(feature = "rust1", since = "1.0.0")]
1680 pub unsafe fn from_raw_parts_mut<'a, T>(p: *mut T, len: usize) -> &'a mut [T] {
1681 transmute(RawSlice { data: p, len: len })
1684 /// Forms a slice from a pointer and a length.
1686 /// The pointer given is actually a reference to the base of the slice. This
1687 /// reference is used to give a concrete lifetime to tie the returned slice to.
1688 /// Typically this should indicate that the slice is valid for as long as the
1689 /// pointer itself is valid.
1691 /// The `len` argument is the number of **elements**, not the number of bytes.
1693 /// This function is unsafe as there is no guarantee that the given pointer is
1694 /// valid for `len` elements, nor whether the lifetime provided is a suitable
1695 /// lifetime for the returned slice.
1702 /// // manifest a slice out of thin air!
1703 /// let ptr = 0x1234 as *const usize;
1706 /// let slice = slice::from_raw_buf(&ptr, amt);
1710 #[unstable(feature = "core")]
1711 #[deprecated(since = "1.0.0",
1712 reason = "use from_raw_parts")]
1713 pub unsafe fn from_raw_buf<'a, T>(p: &'a *const T, len: usize) -> &'a [T] {
1714 transmute(RawSlice { data: *p, len: len })
1717 /// Performs the same functionality as `from_raw_buf`, except that a mutable
1718 /// slice is returned.
1720 /// This function is unsafe for the same reasons as `from_raw_buf`, as well as
1721 /// not being able to provide a non-aliasing guarantee of the returned mutable
1724 #[unstable(feature = "core")]
1725 #[deprecated(since = "1.0.0",
1726 reason = "use from_raw_parts_mut")]
1727 pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: usize) -> &'a mut [T] {
1728 transmute(RawSlice { data: *p, len: len })
1735 /// Operations on `[u8]`.
1736 #[unstable(feature = "core", reason = "needs review")]
1739 use slice::SliceExt;
1741 /// A trait for operations on mutable `[u8]`s.
1742 pub trait MutableByteVector {
1743 /// Sets all bytes of the receiver to the given value.
1744 fn set_memory(&mut self, value: u8);
1747 impl MutableByteVector for [u8] {
1749 fn set_memory(&mut self, value: u8) {
1750 unsafe { ptr::write_bytes(self.as_mut_ptr(), value, self.len()) };
1754 /// Copies data from `src` to `dst`
1756 /// Panics if the length of `dst` is less than the length of `src`.
1758 pub fn copy_memory(dst: &mut [u8], src: &[u8]) {
1759 let len_src = src.len();
1760 assert!(dst.len() >= len_src);
1761 // `dst` is unaliasable, so we know statically it doesn't overlap
1764 ptr::copy_nonoverlapping(dst.as_mut_ptr(),
1774 // Boilerplate traits
1777 #[stable(feature = "rust1", since = "1.0.0")]
1778 impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
1779 fn eq(&self, other: &[B]) -> bool {
1780 self.len() == other.len() &&
1781 order::eq(self.iter(), other.iter())
1783 fn ne(&self, other: &[B]) -> bool {
1784 self.len() != other.len() ||
1785 order::ne(self.iter(), other.iter())
1789 #[stable(feature = "rust1", since = "1.0.0")]
1790 impl<T: Eq> Eq for [T] {}
1792 #[stable(feature = "rust1", since = "1.0.0")]
1793 impl<T: Ord> Ord for [T] {
1794 fn cmp(&self, other: &[T]) -> Ordering {
1795 order::cmp(self.iter(), other.iter())
1799 #[stable(feature = "rust1", since = "1.0.0")]
1800 impl<T: PartialOrd> PartialOrd for [T] {
1802 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
1803 order::partial_cmp(self.iter(), other.iter())
1806 fn lt(&self, other: &[T]) -> bool {
1807 order::lt(self.iter(), other.iter())
1810 fn le(&self, other: &[T]) -> bool {
1811 order::le(self.iter(), other.iter())
1814 fn ge(&self, other: &[T]) -> bool {
1815 order::ge(self.iter(), other.iter())
1818 fn gt(&self, other: &[T]) -> bool {
1819 order::gt(self.iter(), other.iter())
1823 /// Extension methods for slices containing integers.
1824 #[unstable(feature = "core")]
1825 pub trait IntSliceExt<U, S> {
1826 /// Converts the slice to an immutable slice of unsigned integers with the same width.
1827 fn as_unsigned<'a>(&'a self) -> &'a [U];
1828 /// Converts the slice to an immutable slice of signed integers with the same width.
1829 fn as_signed<'a>(&'a self) -> &'a [S];
1831 /// Converts the slice to a mutable slice of unsigned integers with the same width.
1832 fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
1833 /// Converts the slice to a mutable slice of signed integers with the same width.
1834 fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];
1837 macro_rules! impl_int_slice {
1838 ($u:ty, $s:ty, $t:ty) => {
1839 #[unstable(feature = "core")]
1840 impl IntSliceExt<$u, $s> for [$t] {
1842 fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
1844 fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
1846 fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
1848 fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
1853 macro_rules! impl_int_slices {
1855 impl_int_slice! { $u, $s, $u }
1856 impl_int_slice! { $u, $s, $s }
1860 impl_int_slices! { u8, i8 }
1861 impl_int_slices! { u16, i16 }
1862 impl_int_slices! { u32, i32 }
1863 impl_int_slices! { u64, i64 }
1864 impl_int_slices! { usize, isize }