1 //! Comparison traits for `[T]`.
3 use crate::cmp::{self, Ordering};
7 use super::from_raw_parts;
11 /// Calls implementation provided memcmp.
13 /// Interprets the data as u8.
15 /// Returns 0 for equal, < 0 for less than and > 0 for greater
17 fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> ffi::c_int;
20 #[stable(feature = "rust1", since = "1.0.0")]
21 impl<A, B> PartialEq<[B]> for [A]
25 fn eq(&self, other: &[B]) -> bool {
26 SlicePartialEq::equal(self, other)
29 fn ne(&self, other: &[B]) -> bool {
30 SlicePartialEq::not_equal(self, other)
34 #[stable(feature = "rust1", since = "1.0.0")]
35 impl<T: Eq> Eq for [T] {}
37 /// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
38 #[stable(feature = "rust1", since = "1.0.0")]
39 impl<T: Ord> Ord for [T] {
40 fn cmp(&self, other: &[T]) -> Ordering {
41 SliceOrd::compare(self, other)
45 /// Implements comparison of vectors [lexicographically](Ord#lexicographical-comparison).
46 #[stable(feature = "rust1", since = "1.0.0")]
47 impl<T: PartialOrd> PartialOrd for [T] {
48 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
49 SlicePartialOrd::partial_compare(self, other)
54 // intermediate trait for specialization of slice's PartialEq
55 trait SlicePartialEq<B> {
56 fn equal(&self, other: &[B]) -> bool;
58 fn not_equal(&self, other: &[B]) -> bool {
63 // Generic slice equality
64 impl<A, B> SlicePartialEq<B> for [A]
68 default fn equal(&self, other: &[B]) -> bool {
69 if self.len() != other.len() {
73 self.iter().zip(other.iter()).all(|(x, y)| x == y)
77 // Use memcmp for bytewise equality when the types allow
78 impl<A, B> SlicePartialEq<B> for [A]
80 A: BytewiseEquality<B>,
82 fn equal(&self, other: &[B]) -> bool {
83 if self.len() != other.len() {
87 // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
88 // The two slices have been checked to have the same size above.
90 let size = mem::size_of_val(self);
91 memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
97 // intermediate trait for specialization of slice's PartialOrd
98 trait SlicePartialOrd: Sized {
99 fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
102 impl<A: PartialOrd> SlicePartialOrd for A {
103 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
104 let l = cmp::min(left.len(), right.len());
106 // Slice to the loop iteration range to enable bound check
107 // elimination in the compiler
108 let lhs = &left[..l];
109 let rhs = &right[..l];
112 match lhs[i].partial_cmp(&rhs[i]) {
113 Some(Ordering::Equal) => (),
114 non_eq => return non_eq,
118 left.len().partial_cmp(&right.len())
122 // This is the impl that we would like to have. Unfortunately it's not sound.
123 // See `partial_ord_slice.rs`.
125 impl<A> SlicePartialOrd for A
129 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
130 Some(SliceOrd::compare(left, right))
135 impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
136 fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
137 Some(SliceOrd::compare(left, right))
141 #[rustc_specialization_trait]
142 trait AlwaysApplicableOrd: SliceOrd + Ord {}
144 macro_rules! always_applicable_ord {
145 ($([$($p:tt)*] $t:ty,)*) => {
146 $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
150 always_applicable_ord! {
151 [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
152 [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
154 [T: ?Sized] *const T, [T: ?Sized] *mut T,
155 [T: AlwaysApplicableOrd] &T,
156 [T: AlwaysApplicableOrd] &mut T,
157 [T: AlwaysApplicableOrd] Option<T>,
161 // intermediate trait for specialization of slice's Ord
162 trait SliceOrd: Sized {
163 fn compare(left: &[Self], right: &[Self]) -> Ordering;
166 impl<A: Ord> SliceOrd for A {
167 default fn compare(left: &[Self], right: &[Self]) -> Ordering {
168 let l = cmp::min(left.len(), right.len());
170 // Slice to the loop iteration range to enable bound check
171 // elimination in the compiler
172 let lhs = &left[..l];
173 let rhs = &right[..l];
176 match lhs[i].cmp(&rhs[i]) {
177 Ordering::Equal => (),
178 non_eq => return non_eq,
182 left.len().cmp(&right.len())
186 // memcmp compares a sequence of unsigned bytes lexicographically.
187 // this matches the order we want for [u8], but no others (not even [i8]).
188 impl SliceOrd for u8 {
190 fn compare(left: &[Self], right: &[Self]) -> Ordering {
191 // Since the length of a slice is always less than or equal to isize::MAX, this never underflows.
192 let diff = left.len() as isize - right.len() as isize;
193 // This comparison gets optimized away (on x86_64 and ARM) because the subtraction updates flags.
194 let len = if left.len() < right.len() { left.len() } else { right.len() };
195 // SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
196 // We use the minimum of both lengths which guarantees that both regions are
197 // valid for reads in that interval.
198 let mut order = unsafe { memcmp(left.as_ptr(), right.as_ptr(), len) as isize };
206 // Hack to allow specializing on `Eq` even though `Eq` has a method.
207 #[rustc_unsafe_specialization_marker]
208 trait MarkerEq<T>: PartialEq<T> {}
210 impl<T: Eq> MarkerEq<T> for T {}
213 /// Trait implemented for types that can be compared for equality using
214 /// their bytewise representation
215 #[rustc_specialization_trait]
216 trait BytewiseEquality<T>: MarkerEq<T> + Copy {}
218 macro_rules! impl_marker_for {
219 ($traitname:ident, $($ty:ty)*) => {
221 impl $traitname<$ty> for $ty { }
226 impl_marker_for!(BytewiseEquality,
227 u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
229 pub(super) trait SliceContains: Sized {
230 fn slice_contains(&self, x: &[Self]) -> bool;
233 impl<T> SliceContains for T
237 default fn slice_contains(&self, x: &[Self]) -> bool {
238 x.iter().any(|y| *y == *self)
242 impl SliceContains for u8 {
244 fn slice_contains(&self, x: &[Self]) -> bool {
245 memchr::memchr(*self, x).is_some()
249 impl SliceContains for i8 {
251 fn slice_contains(&self, x: &[Self]) -> bool {
252 let byte = *self as u8;
253 // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
254 // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
255 // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
256 // than `isize::MAX`. The returned slice is never mutated.
257 let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
258 memchr::memchr(byte, bytes).is_some()