1 | //! Macros used by iterators of slice. |
2 | |
3 | /// Convenience & performance macro for consuming the `end_or_len` field, by |
4 | /// giving a `(&mut) usize` or `(&mut) NonNull<T>` depending whether `T` is |
5 | /// or is not a ZST respectively. |
6 | /// |
7 | /// Internally, this reads the `end` through a pointer-to-`NonNull` so that |
8 | /// it'll get the appropriate non-null metadata in the backend without needing |
9 | /// to call `assume` manually. |
10 | macro_rules! if_zst { |
11 | (mut $this:ident, $len:ident => $zst_body:expr, $end:ident => $other_body:expr,) => {{ |
12 | #![allow(unused_unsafe)] // we're sometimes used within an unsafe block |
13 | |
14 | if T::IS_ZST { |
15 | // SAFETY: for ZSTs, the pointer is storing a provenance-free length, |
16 | // so consuming and updating it as a `usize` is fine. |
17 | let $len = unsafe { &mut *(&raw mut $this.end_or_len).cast::<usize>() }; |
18 | $zst_body |
19 | } else { |
20 | // SAFETY: for non-ZSTs, the type invariant ensures it cannot be null |
21 | let $end = unsafe { &mut *(&raw mut $this.end_or_len).cast::<NonNull<T>>() }; |
22 | $other_body |
23 | } |
24 | }}; |
25 | ($this:ident, $len:ident => $zst_body:expr, $end:ident => $other_body:expr,) => {{ |
26 | #![allow(unused_unsafe)] // we're sometimes used within an unsafe block |
27 | |
28 | if T::IS_ZST { |
29 | let $len = $this.end_or_len.addr(); |
30 | $zst_body |
31 | } else { |
32 | // SAFETY: for non-ZSTs, the type invariant ensures it cannot be null |
33 | let $end = unsafe { mem::transmute::<*const T, NonNull<T>>($this.end_or_len) }; |
34 | $other_body |
35 | } |
36 | }}; |
37 | } |
38 | |
39 | // Inlining is_empty and len makes a huge performance difference |
40 | macro_rules! is_empty { |
41 | ($self: ident) => { |
42 | if_zst!($self, |
43 | len => len == 0, |
44 | end => $self.ptr == end, |
45 | ) |
46 | }; |
47 | } |
48 | |
49 | macro_rules! len { |
50 | ($self: ident) => {{ |
51 | if_zst!($self, |
52 | len => len, |
53 | end => { |
54 | // To get rid of some bounds checks (see `position`), we use ptr_sub instead of |
55 | // offset_from (Tested by `codegen/slice-position-bounds-check`.) |
56 | // SAFETY: by the type invariant pointers are aligned and `start <= end` |
57 | unsafe { end.offset_from_unsigned($self.ptr) } |
58 | }, |
59 | ) |
60 | }}; |
61 | } |
62 | |
63 | // The shared definition of the `Iter` and `IterMut` iterators |
64 | macro_rules! iterator { |
65 | ( |
66 | struct $name:ident -> $ptr:ty, |
67 | $elem:ty, |
68 | $raw_mut:tt, |
69 | {$( $mut_:tt )?}, |
70 | $into_ref:ident, |
71 | {$($extra:tt)*} |
72 | ) => { |
73 | impl<'a, T> $name<'a, T> { |
74 | /// Returns the last element and moves the end of the iterator backwards by 1. |
75 | /// |
76 | /// # Safety |
77 | /// |
78 | /// The iterator must not be empty |
79 | #[inline] |
80 | unsafe fn next_back_unchecked(&mut self) -> $elem { |
81 | // SAFETY: the caller promised it's not empty, so |
82 | // the offsetting is in-bounds and there's an element to return. |
83 | unsafe { self.pre_dec_end(1).$into_ref() } |
84 | } |
85 | |
86 | // Helper function for creating a slice from the iterator. |
87 | #[inline(always)] |
88 | fn make_slice(&self) -> &'a [T] { |
89 | // SAFETY: the iterator was created from a slice with pointer |
90 | // `self.ptr` and length `len!(self)`. This guarantees that all |
91 | // the prerequisites for `from_raw_parts` are fulfilled. |
92 | unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } |
93 | } |
94 | |
95 | // Helper function for moving the start of the iterator forwards by `offset` elements, |
96 | // returning the old start. |
97 | // Unsafe because the offset must not exceed `self.len()`. |
98 | #[inline(always)] |
99 | unsafe fn post_inc_start(&mut self, offset: usize) -> NonNull<T> { |
100 | let old = self.ptr; |
101 | |
102 | // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, |
103 | // so this new pointer is inside `self` and thus guaranteed to be non-null. |
104 | unsafe { |
105 | if_zst!(mut self, |
106 | // Using the intrinsic directly avoids emitting a UbCheck |
107 | len => *len = crate::intrinsics::unchecked_sub(*len, offset), |
108 | _end => self.ptr = self.ptr.add(offset), |
109 | ); |
110 | } |
111 | old |
112 | } |
113 | |
114 | // Helper function for moving the end of the iterator backwards by `offset` elements, |
115 | // returning the new end. |
116 | // Unsafe because the offset must not exceed `self.len()`. |
117 | #[inline(always)] |
118 | unsafe fn pre_dec_end(&mut self, offset: usize) -> NonNull<T> { |
119 | if_zst!(mut self, |
120 | // SAFETY: By our precondition, `offset` can be at most the |
121 | // current length, so the subtraction can never overflow. |
122 | len => unsafe { |
123 | // Using the intrinsic directly avoids emitting a UbCheck |
124 | *len = crate::intrinsics::unchecked_sub(*len, offset); |
125 | self.ptr |
126 | }, |
127 | // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, |
128 | // which is guaranteed to not overflow an `isize`. Also, the resulting pointer |
129 | // is in bounds of `slice`, which fulfills the other requirements for `offset`. |
130 | end => unsafe { |
131 | *end = end.sub(offset); |
132 | *end |
133 | }, |
134 | ) |
135 | } |
136 | } |
137 | |
138 | #[stable(feature = "rust1" , since = "1.0.0" )] |
139 | impl<T> ExactSizeIterator for $name<'_, T> { |
140 | #[inline(always)] |
141 | fn len(&self) -> usize { |
142 | len!(self) |
143 | } |
144 | |
145 | #[inline(always)] |
146 | fn is_empty(&self) -> bool { |
147 | is_empty!(self) |
148 | } |
149 | } |
150 | |
151 | #[stable(feature = "rust1" , since = "1.0.0" )] |
152 | impl<'a, T> Iterator for $name<'a, T> { |
153 | type Item = $elem; |
154 | |
155 | #[inline] |
156 | fn next(&mut self) -> Option<$elem> { |
157 | // intentionally not using the helpers because this is |
158 | // one of the most mono'd things in the library. |
159 | |
160 | let ptr = self.ptr; |
161 | let end_or_len = self.end_or_len; |
162 | // SAFETY: See inner comments. (For some reason having multiple |
163 | // block breaks inlining this -- if you can fix that please do!) |
164 | unsafe { |
165 | if T::IS_ZST { |
166 | let len = end_or_len.addr(); |
167 | if len == 0 { |
168 | return None; |
169 | } |
170 | // SAFETY: just checked that it's not zero, so subtracting one |
171 | // cannot wrap. (Ideally this would be `checked_sub`, which |
172 | // does the same thing internally, but as of 2025-02 that |
173 | // doesn't optimize quite as small in MIR.) |
174 | self.end_or_len = without_provenance_mut(len.unchecked_sub(1)); |
175 | } else { |
176 | // SAFETY: by type invariant, the `end_or_len` field is always |
177 | // non-null for a non-ZST pointee. (This transmute ensures we |
178 | // get `!nonnull` metadata on the load of the field.) |
179 | if ptr == crate::intrinsics::transmute::<$ptr, NonNull<T>>(end_or_len) { |
180 | return None; |
181 | } |
182 | // SAFETY: since it's not empty, per the check above, moving |
183 | // forward one keeps us inside the slice, and this is valid. |
184 | self.ptr = ptr.add(1); |
185 | } |
186 | // SAFETY: Now that we know it wasn't empty and we've moved past |
187 | // the first one (to avoid giving a duplicate `&mut` next time), |
188 | // we can give out a reference to it. |
189 | Some({ptr}.$into_ref()) |
190 | } |
191 | } |
192 | |
193 | #[inline] |
194 | fn size_hint(&self) -> (usize, Option<usize>) { |
195 | let exact = len!(self); |
196 | (exact, Some(exact)) |
197 | } |
198 | |
199 | #[inline] |
200 | fn count(self) -> usize { |
201 | len!(self) |
202 | } |
203 | |
204 | #[inline] |
205 | fn nth(&mut self, n: usize) -> Option<$elem> { |
206 | if n >= len!(self) { |
207 | // This iterator is now empty. |
208 | if_zst!(mut self, |
209 | len => *len = 0, |
210 | end => self.ptr = *end, |
211 | ); |
212 | return None; |
213 | } |
214 | // SAFETY: We are in bounds. `post_inc_start` does the right thing even for ZSTs. |
215 | unsafe { |
216 | self.post_inc_start(n); |
217 | Some(self.next_unchecked()) |
218 | } |
219 | } |
220 | |
221 | #[inline] |
222 | fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
223 | let advance = cmp::min(len!(self), n); |
224 | // SAFETY: By construction, `advance` does not exceed `self.len()`. |
225 | unsafe { self.post_inc_start(advance) }; |
226 | NonZero::new(n - advance).map_or(Ok(()), Err) |
227 | } |
228 | |
229 | #[inline] |
230 | fn last(mut self) -> Option<$elem> { |
231 | self.next_back() |
232 | } |
233 | |
234 | #[inline] |
235 | fn fold<B, F>(self, init: B, mut f: F) -> B |
236 | where |
237 | F: FnMut(B, Self::Item) -> B, |
238 | { |
239 | // this implementation consists of the following optimizations compared to the |
240 | // default implementation: |
241 | // - do-while loop, as is llvm's preferred loop shape, |
242 | // see https://releases.llvm.org/16.0.0/docs/LoopTerminology.html#more-canonical-loops |
243 | // - bumps an index instead of a pointer since the latter case inhibits |
244 | // some optimizations, see #111603 |
245 | // - avoids Option wrapping/matching |
246 | if is_empty!(self) { |
247 | return init; |
248 | } |
249 | let mut acc = init; |
250 | let mut i = 0; |
251 | let len = len!(self); |
252 | loop { |
253 | // SAFETY: the loop iterates `i in 0..len`, which always is in bounds of |
254 | // the slice allocation |
255 | acc = f(acc, unsafe { & $( $mut_ )? *self.ptr.add(i).as_ptr() }); |
256 | // SAFETY: `i` can't overflow since it'll only reach usize::MAX if the |
257 | // slice had that length, in which case we'll break out of the loop |
258 | // after the increment |
259 | i = unsafe { i.unchecked_add(1) }; |
260 | if i == len { |
261 | break; |
262 | } |
263 | } |
264 | acc |
265 | } |
266 | |
267 | // We override the default implementation, which uses `try_fold`, |
268 | // because this simple implementation generates less LLVM IR and is |
269 | // faster to compile. |
270 | #[inline] |
271 | fn for_each<F>(mut self, mut f: F) |
272 | where |
273 | Self: Sized, |
274 | F: FnMut(Self::Item), |
275 | { |
276 | while let Some(x) = self.next() { |
277 | f(x); |
278 | } |
279 | } |
280 | |
281 | // We override the default implementation, which uses `try_fold`, |
282 | // because this simple implementation generates less LLVM IR and is |
283 | // faster to compile. |
284 | #[inline] |
285 | fn all<F>(&mut self, mut f: F) -> bool |
286 | where |
287 | Self: Sized, |
288 | F: FnMut(Self::Item) -> bool, |
289 | { |
290 | while let Some(x) = self.next() { |
291 | if !f(x) { |
292 | return false; |
293 | } |
294 | } |
295 | true |
296 | } |
297 | |
298 | // We override the default implementation, which uses `try_fold`, |
299 | // because this simple implementation generates less LLVM IR and is |
300 | // faster to compile. |
301 | #[inline] |
302 | fn any<F>(&mut self, mut f: F) -> bool |
303 | where |
304 | Self: Sized, |
305 | F: FnMut(Self::Item) -> bool, |
306 | { |
307 | while let Some(x) = self.next() { |
308 | if f(x) { |
309 | return true; |
310 | } |
311 | } |
312 | false |
313 | } |
314 | |
315 | // We override the default implementation, which uses `try_fold`, |
316 | // because this simple implementation generates less LLVM IR and is |
317 | // faster to compile. |
318 | #[inline] |
319 | fn find<P>(&mut self, mut predicate: P) -> Option<Self::Item> |
320 | where |
321 | Self: Sized, |
322 | P: FnMut(&Self::Item) -> bool, |
323 | { |
324 | while let Some(x) = self.next() { |
325 | if predicate(&x) { |
326 | return Some(x); |
327 | } |
328 | } |
329 | None |
330 | } |
331 | |
332 | // We override the default implementation, which uses `try_fold`, |
333 | // because this simple implementation generates less LLVM IR and is |
334 | // faster to compile. |
335 | #[inline] |
336 | fn find_map<B, F>(&mut self, mut f: F) -> Option<B> |
337 | where |
338 | Self: Sized, |
339 | F: FnMut(Self::Item) -> Option<B>, |
340 | { |
341 | while let Some(x) = self.next() { |
342 | if let Some(y) = f(x) { |
343 | return Some(y); |
344 | } |
345 | } |
346 | None |
347 | } |
348 | |
349 | // We override the default implementation, which uses `try_fold`, |
350 | // because this simple implementation generates less LLVM IR and is |
351 | // faster to compile. Also, the `assume` avoids a bounds check. |
352 | #[inline] |
353 | #[rustc_inherit_overflow_checks] |
354 | fn position<P>(&mut self, mut predicate: P) -> Option<usize> where |
355 | Self: Sized, |
356 | P: FnMut(Self::Item) -> bool, |
357 | { |
358 | let n = len!(self); |
359 | let mut i = 0; |
360 | while let Some(x) = self.next() { |
361 | if predicate(x) { |
362 | // SAFETY: we are guaranteed to be in bounds by the loop invariant: |
363 | // when `i >= n`, `self.next()` returns `None` and the loop breaks. |
364 | unsafe { assert_unchecked(i < n) }; |
365 | return Some(i); |
366 | } |
367 | i += 1; |
368 | } |
369 | None |
370 | } |
371 | |
372 | // We override the default implementation, which uses `try_fold`, |
373 | // because this simple implementation generates less LLVM IR and is |
374 | // faster to compile. Also, the `assume` avoids a bounds check. |
375 | #[inline] |
376 | fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where |
377 | P: FnMut(Self::Item) -> bool, |
378 | Self: Sized + ExactSizeIterator + DoubleEndedIterator |
379 | { |
380 | let n = len!(self); |
381 | let mut i = n; |
382 | while let Some(x) = self.next_back() { |
383 | i -= 1; |
384 | if predicate(x) { |
385 | // SAFETY: `i` must be lower than `n` since it starts at `n` |
386 | // and is only decreasing. |
387 | unsafe { assert_unchecked(i < n) }; |
388 | return Some(i); |
389 | } |
390 | } |
391 | None |
392 | } |
393 | |
394 | #[inline] |
395 | unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { |
396 | // SAFETY: the caller must guarantee that `i` is in bounds of |
397 | // the underlying slice, so `i` cannot overflow an `isize`, and |
398 | // the returned references is guaranteed to refer to an element |
399 | // of the slice and thus guaranteed to be valid. |
400 | // |
401 | // Also note that the caller also guarantees that we're never |
402 | // called with the same index again, and that no other methods |
403 | // that will access this subslice are called, so it is valid |
404 | // for the returned reference to be mutable in the case of |
405 | // `IterMut` |
406 | unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } |
407 | } |
408 | |
409 | $($extra)* |
410 | } |
411 | |
412 | #[stable(feature = "rust1" , since = "1.0.0" )] |
413 | impl<'a, T> DoubleEndedIterator for $name<'a, T> { |
414 | #[inline] |
415 | fn next_back(&mut self) -> Option<$elem> { |
416 | // could be implemented with slices, but this avoids bounds checks |
417 | |
418 | // SAFETY: The call to `next_back_unchecked` |
419 | // is safe since we check if the iterator is empty first. |
420 | unsafe { |
421 | if is_empty!(self) { |
422 | None |
423 | } else { |
424 | Some(self.next_back_unchecked()) |
425 | } |
426 | } |
427 | } |
428 | |
429 | #[inline] |
430 | fn nth_back(&mut self, n: usize) -> Option<$elem> { |
431 | if n >= len!(self) { |
432 | // This iterator is now empty. |
433 | if_zst!(mut self, |
434 | len => *len = 0, |
435 | end => *end = self.ptr, |
436 | ); |
437 | return None; |
438 | } |
439 | // SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs. |
440 | unsafe { |
441 | self.pre_dec_end(n); |
442 | Some(self.next_back_unchecked()) |
443 | } |
444 | } |
445 | |
446 | #[inline] |
447 | fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
448 | let advance = cmp::min(len!(self), n); |
449 | // SAFETY: By construction, `advance` does not exceed `self.len()`. |
450 | unsafe { self.pre_dec_end(advance) }; |
451 | NonZero::new(n - advance).map_or(Ok(()), Err) |
452 | } |
453 | } |
454 | |
455 | #[stable(feature = "fused" , since = "1.26.0" )] |
456 | impl<T> FusedIterator for $name<'_, T> {} |
457 | |
458 | #[unstable(feature = "trusted_len" , issue = "37572" )] |
459 | unsafe impl<T> TrustedLen for $name<'_, T> {} |
460 | |
461 | impl<'a, T> UncheckedIterator for $name<'a, T> { |
462 | #[inline] |
463 | unsafe fn next_unchecked(&mut self) -> $elem { |
464 | // SAFETY: The caller promised there's at least one more item. |
465 | unsafe { |
466 | self.post_inc_start(1).$into_ref() |
467 | } |
468 | } |
469 | } |
470 | |
471 | #[stable(feature = "default_iters" , since = "1.70.0" )] |
472 | impl<T> Default for $name<'_, T> { |
473 | /// Creates an empty slice iterator. |
474 | /// |
475 | /// ``` |
476 | #[doc = concat!("# use core::slice::" , stringify!($name), ";" )] |
477 | #[doc = concat!("let iter: " , stringify!($name<'_, u8>), " = Default::default();" )] |
478 | /// assert_eq!(iter.len(), 0); |
479 | /// ``` |
480 | fn default() -> Self { |
481 | (& $( $mut_ )? []).into_iter() |
482 | } |
483 | } |
484 | } |
485 | } |
486 | |
487 | macro_rules! forward_iterator { |
488 | ($name:ident: $elem:ident, $iter_of:ty) => { |
489 | #[stable(feature = "rust1" , since = "1.0.0" )] |
490 | impl<'a, $elem, P> Iterator for $name<'a, $elem, P> |
491 | where |
492 | P: FnMut(&T) -> bool, |
493 | { |
494 | type Item = $iter_of; |
495 | |
496 | #[inline] |
497 | fn next(&mut self) -> Option<$iter_of> { |
498 | self.inner.next() |
499 | } |
500 | |
501 | #[inline] |
502 | fn size_hint(&self) -> (usize, Option<usize>) { |
503 | self.inner.size_hint() |
504 | } |
505 | } |
506 | |
507 | #[stable(feature = "fused" , since = "1.26.0" )] |
508 | impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {} |
509 | }; |
510 | } |
511 | |