1 | //! Trait implementations for `ByteStr`. |
2 | |
3 | use crate::bstr::ByteStr; |
4 | use crate::cmp::Ordering; |
5 | use crate::slice::SliceIndex; |
6 | use crate::{hash, ops, range}; |
7 | |
8 | #[unstable (feature = "bstr" , issue = "134915" )] |
9 | impl Ord for ByteStr { |
10 | #[inline ] |
11 | fn cmp(&self, other: &ByteStr) -> Ordering { |
12 | Ord::cmp(&self.0, &other.0) |
13 | } |
14 | } |
15 | |
16 | #[unstable (feature = "bstr" , issue = "134915" )] |
17 | impl PartialOrd for ByteStr { |
18 | #[inline ] |
19 | fn partial_cmp(&self, other: &ByteStr) -> Option<Ordering> { |
20 | PartialOrd::partial_cmp(&self.0, &other.0) |
21 | } |
22 | } |
23 | |
24 | #[unstable (feature = "bstr" , issue = "134915" )] |
25 | impl PartialEq<ByteStr> for ByteStr { |
26 | #[inline ] |
27 | fn eq(&self, other: &ByteStr) -> bool { |
28 | &self.0 == &other.0 |
29 | } |
30 | } |
31 | |
32 | #[unstable (feature = "bstr" , issue = "134915" )] |
33 | impl Eq for ByteStr {} |
34 | |
35 | #[unstable (feature = "bstr" , issue = "134915" )] |
36 | impl hash::Hash for ByteStr { |
37 | #[inline ] |
38 | fn hash<H: hash::Hasher>(&self, state: &mut H) { |
39 | self.0.hash(state); |
40 | } |
41 | } |
42 | |
43 | #[doc (hidden)] |
44 | #[macro_export ] |
45 | #[unstable (feature = "bstr_internals" , issue = "none" )] |
46 | macro_rules! impl_partial_eq { |
47 | ($lhs:ty, $rhs:ty) => { |
48 | #[allow(unused_lifetimes)] |
49 | impl<'a> PartialEq<$rhs> for $lhs { |
50 | #[inline] |
51 | fn eq(&self, other: &$rhs) -> bool { |
52 | let other: &[u8] = other.as_ref(); |
53 | PartialEq::eq(self.as_bytes(), other) |
54 | } |
55 | } |
56 | |
57 | #[allow(unused_lifetimes)] |
58 | impl<'a> PartialEq<$lhs> for $rhs { |
59 | #[inline] |
60 | fn eq(&self, other: &$lhs) -> bool { |
61 | let this: &[u8] = self.as_ref(); |
62 | PartialEq::eq(this, other.as_bytes()) |
63 | } |
64 | } |
65 | }; |
66 | } |
67 | |
68 | #[doc (hidden)] |
69 | #[unstable (feature = "bstr_internals" , issue = "none" )] |
70 | pub use impl_partial_eq; |
71 | |
72 | #[doc (hidden)] |
73 | #[macro_export ] |
74 | #[unstable (feature = "bstr_internals" , issue = "none" )] |
75 | macro_rules! impl_partial_eq_ord { |
76 | ($lhs:ty, $rhs:ty) => { |
77 | $crate::bstr::impl_partial_eq!($lhs, $rhs); |
78 | |
79 | #[allow(unused_lifetimes)] |
80 | #[unstable(feature = "bstr" , issue = "134915" )] |
81 | impl<'a> PartialOrd<$rhs> for $lhs { |
82 | #[inline] |
83 | fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> { |
84 | let other: &[u8] = other.as_ref(); |
85 | PartialOrd::partial_cmp(self.as_bytes(), other) |
86 | } |
87 | } |
88 | |
89 | #[allow(unused_lifetimes)] |
90 | #[unstable(feature = "bstr" , issue = "134915" )] |
91 | impl<'a> PartialOrd<$lhs> for $rhs { |
92 | #[inline] |
93 | fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> { |
94 | let this: &[u8] = self.as_ref(); |
95 | PartialOrd::partial_cmp(this, other.as_bytes()) |
96 | } |
97 | } |
98 | }; |
99 | } |
100 | |
101 | #[doc (hidden)] |
102 | #[unstable (feature = "bstr_internals" , issue = "none" )] |
103 | pub use impl_partial_eq_ord; |
104 | |
105 | #[doc (hidden)] |
106 | #[macro_export ] |
107 | #[unstable (feature = "bstr_internals" , issue = "none" )] |
108 | macro_rules! impl_partial_eq_n { |
109 | ($lhs:ty, $rhs:ty) => { |
110 | #[allow(unused_lifetimes)] |
111 | #[unstable(feature = "bstr" , issue = "134915" )] |
112 | impl<const N: usize> PartialEq<$rhs> for $lhs { |
113 | #[inline] |
114 | fn eq(&self, other: &$rhs) -> bool { |
115 | let other: &[u8] = other.as_ref(); |
116 | PartialEq::eq(self.as_bytes(), other) |
117 | } |
118 | } |
119 | |
120 | #[allow(unused_lifetimes)] |
121 | #[unstable(feature = "bstr" , issue = "134915" )] |
122 | impl<const N: usize> PartialEq<$lhs> for $rhs { |
123 | #[inline] |
124 | fn eq(&self, other: &$lhs) -> bool { |
125 | let this: &[u8] = self.as_ref(); |
126 | PartialEq::eq(this, other.as_bytes()) |
127 | } |
128 | } |
129 | }; |
130 | } |
131 | |
132 | #[doc (hidden)] |
133 | #[unstable (feature = "bstr_internals" , issue = "none" )] |
134 | pub use impl_partial_eq_n; |
135 | |
136 | // PartialOrd with `[u8]` omitted to avoid inference failures |
137 | impl_partial_eq!(ByteStr, [u8]); |
138 | // PartialOrd with `&[u8]` omitted to avoid inference failures |
139 | impl_partial_eq!(ByteStr, &[u8]); |
140 | // PartialOrd with `str` omitted to avoid inference failures |
141 | impl_partial_eq!(ByteStr, str); |
142 | // PartialOrd with `&str` omitted to avoid inference failures |
143 | impl_partial_eq!(ByteStr, &str); |
144 | // PartialOrd with `[u8; N]` omitted to avoid inference failures |
145 | impl_partial_eq_n!(ByteStr, [u8; N]); |
146 | // PartialOrd with `[u8; N]` omitted to avoid inference failures |
147 | impl_partial_eq_n!(ByteStr, &[u8; N]); |
148 | |
149 | #[unstable (feature = "bstr" , issue = "134915" )] |
150 | impl<I> ops::Index<I> for ByteStr |
151 | where |
152 | I: SliceIndex<ByteStr>, |
153 | { |
154 | type Output = I::Output; |
155 | |
156 | #[inline ] |
157 | fn index(&self, index: I) -> &I::Output { |
158 | index.index(self) |
159 | } |
160 | } |
161 | |
162 | #[unstable (feature = "bstr" , issue = "134915" )] |
163 | impl<I> ops::IndexMut<I> for ByteStr |
164 | where |
165 | I: SliceIndex<ByteStr>, |
166 | { |
167 | #[inline ] |
168 | fn index_mut(&mut self, index: I) -> &mut I::Output { |
169 | index.index_mut(self) |
170 | } |
171 | } |
172 | |
173 | #[unstable (feature = "bstr" , issue = "134915" )] |
174 | unsafe impl SliceIndex<ByteStr> for ops::RangeFull { |
175 | type Output = ByteStr; |
176 | #[inline ] |
177 | fn get(self, slice: &ByteStr) -> Option<&Self::Output> { |
178 | Some(slice) |
179 | } |
180 | #[inline ] |
181 | fn get_mut(self, slice: &mut ByteStr) -> Option<&mut Self::Output> { |
182 | Some(slice) |
183 | } |
184 | #[inline ] |
185 | unsafe fn get_unchecked(self, slice: *const ByteStr) -> *const Self::Output { |
186 | slice |
187 | } |
188 | #[inline ] |
189 | unsafe fn get_unchecked_mut(self, slice: *mut ByteStr) -> *mut Self::Output { |
190 | slice |
191 | } |
192 | #[inline ] |
193 | fn index(self, slice: &ByteStr) -> &Self::Output { |
194 | slice |
195 | } |
196 | #[inline ] |
197 | fn index_mut(self, slice: &mut ByteStr) -> &mut Self::Output { |
198 | slice |
199 | } |
200 | } |
201 | |
202 | #[unstable (feature = "bstr" , issue = "134915" )] |
203 | unsafe impl SliceIndex<ByteStr> for usize { |
204 | type Output = u8; |
205 | #[inline ] |
206 | fn get(self, slice: &ByteStr) -> Option<&Self::Output> { |
207 | self.get(slice.as_bytes()) |
208 | } |
209 | #[inline ] |
210 | fn get_mut(self, slice: &mut ByteStr) -> Option<&mut Self::Output> { |
211 | self.get_mut(slice.as_bytes_mut()) |
212 | } |
213 | #[inline ] |
214 | unsafe fn get_unchecked(self, slice: *const ByteStr) -> *const Self::Output { |
215 | // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. |
216 | unsafe { self.get_unchecked(slice as *const [u8]) } |
217 | } |
218 | #[inline ] |
219 | unsafe fn get_unchecked_mut(self, slice: *mut ByteStr) -> *mut Self::Output { |
220 | // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. |
221 | unsafe { self.get_unchecked_mut(slice as *mut [u8]) } |
222 | } |
223 | #[inline ] |
224 | fn index(self, slice: &ByteStr) -> &Self::Output { |
225 | self.index(slice.as_bytes()) |
226 | } |
227 | #[inline ] |
228 | fn index_mut(self, slice: &mut ByteStr) -> &mut Self::Output { |
229 | self.index_mut(slice.as_bytes_mut()) |
230 | } |
231 | } |
232 | |
233 | macro_rules! impl_slice_index { |
234 | ($index:ty) => { |
235 | #[unstable(feature = "bstr" , issue = "134915" )] |
236 | unsafe impl SliceIndex<ByteStr> for $index { |
237 | type Output = ByteStr; |
238 | #[inline] |
239 | fn get(self, slice: &ByteStr) -> Option<&Self::Output> { |
240 | self.get(slice.as_bytes()).map(ByteStr::from_bytes) |
241 | } |
242 | #[inline] |
243 | fn get_mut(self, slice: &mut ByteStr) -> Option<&mut Self::Output> { |
244 | self.get_mut(slice.as_bytes_mut()).map(ByteStr::from_bytes_mut) |
245 | } |
246 | #[inline] |
247 | unsafe fn get_unchecked(self, slice: *const ByteStr) -> *const Self::Output { |
248 | // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. |
249 | unsafe { self.get_unchecked(slice as *const [u8]) as *const ByteStr } |
250 | } |
251 | #[inline] |
252 | unsafe fn get_unchecked_mut(self, slice: *mut ByteStr) -> *mut Self::Output { |
253 | // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. |
254 | unsafe { self.get_unchecked_mut(slice as *mut [u8]) as *mut ByteStr } |
255 | } |
256 | #[inline] |
257 | fn index(self, slice: &ByteStr) -> &Self::Output { |
258 | ByteStr::from_bytes(self.index(slice.as_bytes())) |
259 | } |
260 | #[inline] |
261 | fn index_mut(self, slice: &mut ByteStr) -> &mut Self::Output { |
262 | ByteStr::from_bytes_mut(self.index_mut(slice.as_bytes_mut())) |
263 | } |
264 | } |
265 | }; |
266 | } |
267 | |
268 | impl_slice_index!(ops::IndexRange); |
269 | impl_slice_index!(ops::Range<usize>); |
270 | impl_slice_index!(range::Range<usize>); |
271 | impl_slice_index!(ops::RangeTo<usize>); |
272 | impl_slice_index!(ops::RangeFrom<usize>); |
273 | impl_slice_index!(range::RangeFrom<usize>); |
274 | impl_slice_index!(ops::RangeInclusive<usize>); |
275 | impl_slice_index!(range::RangeInclusive<usize>); |
276 | impl_slice_index!(ops::RangeToInclusive<usize>); |
277 | impl_slice_index!((ops::Bound<usize>, ops::Bound<usize>)); |
278 | |