1//! Comparison traits for `[T]`.
2
3use crate::cmp::{self, BytewiseEq, Ordering};
4use crate::intrinsics::compare_bytes;
5use crate::mem;
6
7use super::from_raw_parts;
8use super::memchr;
9
10#[stable(feature = "rust1", since = "1.0.0")]
11impl<T, U> PartialEq<[U]> for [T]
12where
13 T: PartialEq<U>,
14{
15 fn eq(&self, other: &[U]) -> bool {
16 SlicePartialEq::equal(self, other)
17 }
18
19 fn ne(&self, other: &[U]) -> bool {
20 SlicePartialEq::not_equal(self, other)
21 }
22}
23
24#[stable(feature = "rust1", since = "1.0.0")]
25impl<T: Eq> Eq for [T] {}
26
27/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
28#[stable(feature = "rust1", since = "1.0.0")]
29impl<T: Ord> Ord for [T] {
30 fn cmp(&self, other: &[T]) -> Ordering {
31 SliceOrd::compare(self, right:other)
32 }
33}
34
35/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
36#[stable(feature = "rust1", since = "1.0.0")]
37impl<T: PartialOrd> PartialOrd for [T] {
38 fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
39 SlicePartialOrd::partial_compare(self, right:other)
40 }
41}
42
43#[doc(hidden)]
44// intermediate trait for specialization of slice's PartialEq
45trait SlicePartialEq<B> {
46 fn equal(&self, other: &[B]) -> bool;
47
48 fn not_equal(&self, other: &[B]) -> bool {
49 !self.equal(other)
50 }
51}
52
53// Generic slice equality
54impl<A, B> SlicePartialEq<B> for [A]
55where
56 A: PartialEq<B>,
57{
58 default fn equal(&self, other: &[B]) -> bool {
59 if self.len() != other.len() {
60 return false;
61 }
62
63 // Implemented as explicit indexing rather
64 // than zipped iterators for performance reasons.
65 // See PR https://github.com/rust-lang/rust/pull/116846
66 for idx: usize in 0..self.len() {
67 // bound checks are optimized away
68 if self[idx] != other[idx] {
69 return false;
70 }
71 }
72
73 true
74 }
75}
76
77// When each element can be compared byte-wise, we can compare all the bytes
78// from the whole size in one call to the intrinsics.
79impl<A, B> SlicePartialEq<B> for [A]
80where
81 A: BytewiseEq<B>,
82{
83 fn equal(&self, other: &[B]) -> bool {
84 if self.len() != other.len() {
85 return false;
86 }
87
88 // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
89 // The two slices have been checked to have the same size above.
90 unsafe {
91 let size: usize = mem::size_of_val(self);
92 compare_bytes(self.as_ptr() as *const u8, right:other.as_ptr() as *const u8, bytes:size) == 0
93 }
94 }
95}
96
97#[doc(hidden)]
98// intermediate trait for specialization of slice's PartialOrd
99trait SlicePartialOrd: Sized {
100 fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
101}
102
103impl<A: PartialOrd> SlicePartialOrd for A {
104 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
105 let l: usize = cmp::min(v1:left.len(), v2:right.len());
106
107 // Slice to the loop iteration range to enable bound check
108 // elimination in the compiler
109 let lhs: &[A] = &left[..l];
110 let rhs: &[A] = &right[..l];
111
112 for i: usize in 0..l {
113 match lhs[i].partial_cmp(&rhs[i]) {
114 Some(Ordering::Equal) => (),
115 non_eq: Option => return non_eq,
116 }
117 }
118
119 left.len().partial_cmp(&right.len())
120 }
121}
122
123// This is the impl that we would like to have. Unfortunately it's not sound.
124// See `partial_ord_slice.rs`.
125/*
126impl<A> SlicePartialOrd for A
127where
128 A: Ord,
129{
130 default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
131 Some(SliceOrd::compare(left, right))
132 }
133}
134*/
135
136impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
137 fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
138 Some(SliceOrd::compare(left, right))
139 }
140}
141
142#[rustc_specialization_trait]
143trait AlwaysApplicableOrd: SliceOrd + Ord {}
144
145macro_rules! always_applicable_ord {
146 ($([$($p:tt)*] $t:ty,)*) => {
147 $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
148 }
149}
150
151always_applicable_ord! {
152 [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
153 [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
154 [] bool, [] char,
155 [T: ?Sized] *const T, [T: ?Sized] *mut T,
156 [T: AlwaysApplicableOrd] &T,
157 [T: AlwaysApplicableOrd] &mut T,
158 [T: AlwaysApplicableOrd] Option<T>,
159}
160
161#[doc(hidden)]
162// intermediate trait for specialization of slice's Ord
163trait SliceOrd: Sized {
164 fn compare(left: &[Self], right: &[Self]) -> Ordering;
165}
166
167impl<A: Ord> SliceOrd for A {
168 default fn compare(left: &[Self], right: &[Self]) -> Ordering {
169 let l: usize = cmp::min(v1:left.len(), v2:right.len());
170
171 // Slice to the loop iteration range to enable bound check
172 // elimination in the compiler
173 let lhs: &[A] = &left[..l];
174 let rhs: &[A] = &right[..l];
175
176 for i: usize in 0..l {
177 match lhs[i].cmp(&rhs[i]) {
178 Ordering::Equal => (),
179 non_eq: Ordering => return non_eq,
180 }
181 }
182
183 left.len().cmp(&right.len())
184 }
185}
186
187// `compare_bytes` compares a sequence of unsigned bytes lexicographically.
188// this matches the order we want for [u8], but no others (not even [i8]).
189impl SliceOrd for u8 {
190 #[inline]
191 fn compare(left: &[Self], right: &[Self]) -> Ordering {
192 // Since the length of a slice is always less than or equal to isize::MAX, this never underflows.
193 let diff: isize = left.len() as isize - right.len() as isize;
194 // This comparison gets optimized away (on x86_64 and ARM) because the subtraction updates flags.
195 let len: usize = if left.len() < right.len() { left.len() } else { right.len() };
196 // SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
197 // We use the minimum of both lengths which guarantees that both regions are
198 // valid for reads in that interval.
199 let mut order: isize = unsafe { compare_bytes(left:left.as_ptr(), right:right.as_ptr(), bytes:len) as isize };
200 if order == 0 {
201 order = diff;
202 }
203 order.cmp(&0)
204 }
205}
206
207pub(super) trait SliceContains: Sized {
208 fn slice_contains(&self, x: &[Self]) -> bool;
209}
210
211impl<T> SliceContains for T
212where
213 T: PartialEq,
214{
215 default fn slice_contains(&self, x: &[Self]) -> bool {
216 x.iter().any(|y: &T| *y == *self)
217 }
218}
219
220impl SliceContains for u8 {
221 #[inline]
222 fn slice_contains(&self, x: &[Self]) -> bool {
223 memchr::memchr(*self, text:x).is_some()
224 }
225}
226
227impl SliceContains for i8 {
228 #[inline]
229 fn slice_contains(&self, x: &[Self]) -> bool {
230 let byte: u8 = *self as u8;
231 // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
232 // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
233 // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
234 // than `isize::MAX`. The returned slice is never mutated.
235 let bytes: &[u8] = unsafe { from_raw_parts(data:x.as_ptr() as *const u8, x.len()) };
236 memchr::memchr(x:byte, text:bytes).is_some()
237 }
238}
239