1 | use core::iter::{FusedIterator, TrustedLen}; |
2 | use core::mem::MaybeUninit; |
3 | use core::num::NonZero; |
4 | use core::ops::Try; |
5 | use core::{array, fmt, ptr}; |
6 | |
7 | use super::VecDeque; |
8 | use crate::alloc::{Allocator, Global}; |
9 | |
10 | /// An owning iterator over the elements of a `VecDeque`. |
11 | /// |
12 | /// This `struct` is created by the [`into_iter`] method on [`VecDeque`] |
13 | /// (provided by the [`IntoIterator`] trait). See its documentation for more. |
14 | /// |
15 | /// [`into_iter`]: VecDeque::into_iter |
16 | #[derive (Clone)] |
17 | #[stable (feature = "rust1" , since = "1.0.0" )] |
18 | pub struct IntoIter< |
19 | T, |
20 | #[unstable (feature = "allocator_api" , issue = "32838" )] A: Allocator = Global, |
21 | > { |
22 | inner: VecDeque<T, A>, |
23 | } |
24 | |
25 | impl<T, A: Allocator> IntoIter<T, A> { |
26 | pub(super) fn new(inner: VecDeque<T, A>) -> Self { |
27 | IntoIter { inner } |
28 | } |
29 | |
30 | pub(super) fn into_vecdeque(self) -> VecDeque<T, A> { |
31 | self.inner |
32 | } |
33 | } |
34 | |
35 | #[stable (feature = "collection_debug" , since = "1.17.0" )] |
36 | impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { |
37 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
38 | f.debug_tuple(name:"IntoIter" ).field(&self.inner).finish() |
39 | } |
40 | } |
41 | |
42 | #[stable (feature = "rust1" , since = "1.0.0" )] |
43 | impl<T, A: Allocator> Iterator for IntoIter<T, A> { |
44 | type Item = T; |
45 | |
46 | #[inline ] |
47 | fn next(&mut self) -> Option<T> { |
48 | self.inner.pop_front() |
49 | } |
50 | |
51 | #[inline ] |
52 | fn size_hint(&self) -> (usize, Option<usize>) { |
53 | let len = self.inner.len(); |
54 | (len, Some(len)) |
55 | } |
56 | |
57 | #[inline ] |
58 | fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
59 | let len = self.inner.len; |
60 | let rem = if len < n { |
61 | self.inner.clear(); |
62 | n - len |
63 | } else { |
64 | self.inner.drain(..n); |
65 | 0 |
66 | }; |
67 | NonZero::new(rem).map_or(Ok(()), Err) |
68 | } |
69 | |
70 | #[inline ] |
71 | fn count(self) -> usize { |
72 | self.inner.len |
73 | } |
74 | |
75 | fn try_fold<B, F, R>(&mut self, mut init: B, mut f: F) -> R |
76 | where |
77 | F: FnMut(B, Self::Item) -> R, |
78 | R: Try<Output = B>, |
79 | { |
80 | struct Guard<'a, T, A: Allocator> { |
81 | deque: &'a mut VecDeque<T, A>, |
82 | // `consumed <= deque.len` always holds. |
83 | consumed: usize, |
84 | } |
85 | |
86 | impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { |
87 | fn drop(&mut self) { |
88 | self.deque.len -= self.consumed; |
89 | self.deque.head = self.deque.to_physical_idx(self.consumed); |
90 | } |
91 | } |
92 | |
93 | let mut guard = Guard { deque: &mut self.inner, consumed: 0 }; |
94 | |
95 | let (head, tail) = guard.deque.as_slices(); |
96 | |
97 | init = head |
98 | .iter() |
99 | .map(|elem| { |
100 | guard.consumed += 1; |
101 | // SAFETY: Because we incremented `guard.consumed`, the |
102 | // deque effectively forgot the element, so we can take |
103 | // ownership |
104 | unsafe { ptr::read(elem) } |
105 | }) |
106 | .try_fold(init, &mut f)?; |
107 | |
108 | tail.iter() |
109 | .map(|elem| { |
110 | guard.consumed += 1; |
111 | // SAFETY: Same as above. |
112 | unsafe { ptr::read(elem) } |
113 | }) |
114 | .try_fold(init, &mut f) |
115 | } |
116 | |
117 | #[inline ] |
118 | fn fold<B, F>(mut self, init: B, mut f: F) -> B |
119 | where |
120 | F: FnMut(B, Self::Item) -> B, |
121 | { |
122 | match self.try_fold(init, |b, item| Ok::<B, !>(f(b, item))) { |
123 | Ok(b) => b, |
124 | } |
125 | } |
126 | |
127 | #[inline ] |
128 | fn last(mut self) -> Option<Self::Item> { |
129 | self.inner.pop_back() |
130 | } |
131 | |
132 | fn next_chunk<const N: usize>( |
133 | &mut self, |
134 | ) -> Result<[Self::Item; N], array::IntoIter<Self::Item, N>> { |
135 | let mut raw_arr = [const { MaybeUninit::uninit() }; N]; |
136 | let raw_arr_ptr = raw_arr.as_mut_ptr().cast(); |
137 | let (head, tail) = self.inner.as_slices(); |
138 | |
139 | if head.len() >= N { |
140 | // SAFETY: By manually adjusting the head and length of the deque, we effectively |
141 | // make it forget the first `N` elements, so taking ownership of them is safe. |
142 | unsafe { ptr::copy_nonoverlapping(head.as_ptr(), raw_arr_ptr, N) }; |
143 | self.inner.head = self.inner.to_physical_idx(N); |
144 | self.inner.len -= N; |
145 | // SAFETY: We initialized the entire array with items from `head` |
146 | return Ok(unsafe { raw_arr.transpose().assume_init() }); |
147 | } |
148 | |
149 | // SAFETY: Same argument as above. |
150 | unsafe { ptr::copy_nonoverlapping(head.as_ptr(), raw_arr_ptr, head.len()) }; |
151 | let remaining = N - head.len(); |
152 | |
153 | if tail.len() >= remaining { |
154 | // SAFETY: Same argument as above. |
155 | unsafe { |
156 | ptr::copy_nonoverlapping(tail.as_ptr(), raw_arr_ptr.add(head.len()), remaining) |
157 | }; |
158 | self.inner.head = self.inner.to_physical_idx(N); |
159 | self.inner.len -= N; |
160 | // SAFETY: We initialized the entire array with items from `head` and `tail` |
161 | Ok(unsafe { raw_arr.transpose().assume_init() }) |
162 | } else { |
163 | // SAFETY: Same argument as above. |
164 | unsafe { |
165 | ptr::copy_nonoverlapping(tail.as_ptr(), raw_arr_ptr.add(head.len()), tail.len()) |
166 | }; |
167 | let init = head.len() + tail.len(); |
168 | // We completely drained all the deques elements. |
169 | self.inner.head = 0; |
170 | self.inner.len = 0; |
171 | // SAFETY: We copied all elements from both slices to the beginning of the array, so |
172 | // the given range is initialized. |
173 | Err(unsafe { array::IntoIter::new_unchecked(raw_arr, 0..init) }) |
174 | } |
175 | } |
176 | } |
177 | |
178 | #[stable (feature = "rust1" , since = "1.0.0" )] |
179 | impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { |
180 | #[inline ] |
181 | fn next_back(&mut self) -> Option<T> { |
182 | self.inner.pop_back() |
183 | } |
184 | |
185 | #[inline ] |
186 | fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
187 | let len = self.inner.len; |
188 | let rem = if len < n { |
189 | self.inner.clear(); |
190 | n - len |
191 | } else { |
192 | self.inner.truncate(len - n); |
193 | 0 |
194 | }; |
195 | NonZero::new(rem).map_or(Ok(()), Err) |
196 | } |
197 | |
198 | fn try_rfold<B, F, R>(&mut self, mut init: B, mut f: F) -> R |
199 | where |
200 | F: FnMut(B, Self::Item) -> R, |
201 | R: Try<Output = B>, |
202 | { |
203 | struct Guard<'a, T, A: Allocator> { |
204 | deque: &'a mut VecDeque<T, A>, |
205 | // `consumed <= deque.len` always holds. |
206 | consumed: usize, |
207 | } |
208 | |
209 | impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> { |
210 | fn drop(&mut self) { |
211 | self.deque.len -= self.consumed; |
212 | } |
213 | } |
214 | |
215 | let mut guard = Guard { deque: &mut self.inner, consumed: 0 }; |
216 | |
217 | let (head, tail) = guard.deque.as_slices(); |
218 | |
219 | init = tail |
220 | .iter() |
221 | .map(|elem| { |
222 | guard.consumed += 1; |
223 | // SAFETY: See `try_fold`'s safety comment. |
224 | unsafe { ptr::read(elem) } |
225 | }) |
226 | .try_rfold(init, &mut f)?; |
227 | |
228 | head.iter() |
229 | .map(|elem| { |
230 | guard.consumed += 1; |
231 | // SAFETY: Same as above. |
232 | unsafe { ptr::read(elem) } |
233 | }) |
234 | .try_rfold(init, &mut f) |
235 | } |
236 | |
237 | #[inline ] |
238 | fn rfold<B, F>(mut self, init: B, mut f: F) -> B |
239 | where |
240 | F: FnMut(B, Self::Item) -> B, |
241 | { |
242 | match self.try_rfold(init, |b, item| Ok::<B, !>(f(b, item))) { |
243 | Ok(b) => b, |
244 | } |
245 | } |
246 | } |
247 | |
248 | #[stable (feature = "rust1" , since = "1.0.0" )] |
249 | impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> { |
250 | #[inline ] |
251 | fn is_empty(&self) -> bool { |
252 | self.inner.is_empty() |
253 | } |
254 | } |
255 | |
256 | #[stable (feature = "fused" , since = "1.26.0" )] |
257 | impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} |
258 | |
259 | #[unstable (feature = "trusted_len" , issue = "37572" )] |
260 | unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {} |
261 | |