1 | use core::fmt; |
2 | use core::iter::FusedIterator; |
3 | use core::marker::PhantomData; |
4 | use core::mem::{self, size_of, ManuallyDrop}; |
5 | |
6 | use core::ptr::{self, NonNull}; |
7 | use core::slice::{self}; |
8 | |
9 | use crate::stable::addr; |
10 | |
11 | use super::{Allocator, Global, RawVec}; |
12 | |
13 | #[cfg (not(no_global_oom_handling))] |
14 | use super::Vec; |
15 | |
16 | /// An iterator that moves out of a vector. |
17 | /// |
18 | /// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) |
19 | /// (provided by the [`IntoIterator`] trait). |
20 | /// |
21 | /// # Example |
22 | /// |
23 | /// ``` |
24 | /// let v = vec![0, 1, 2]; |
25 | /// let iter: std::vec::IntoIter<_> = v.into_iter(); |
26 | /// ``` |
27 | pub struct IntoIter<T, A: Allocator = Global> { |
28 | pub(super) buf: NonNull<T>, |
29 | pub(super) phantom: PhantomData<T>, |
30 | pub(super) cap: usize, |
31 | // the drop impl reconstructs a RawVec from buf, cap and alloc |
32 | // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop |
33 | pub(super) alloc: ManuallyDrop<A>, |
34 | pub(super) ptr: *const T, |
35 | pub(super) end: *const T, |
36 | } |
37 | |
38 | impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { |
39 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
40 | f.debug_tuple(name:"IntoIter" ).field(&self.as_slice()).finish() |
41 | } |
42 | } |
43 | |
44 | impl<T, A: Allocator> IntoIter<T, A> { |
45 | /// Returns the remaining items of this iterator as a slice. |
46 | /// |
47 | /// # Examples |
48 | /// |
49 | /// ``` |
50 | /// let vec = vec!['a' , 'b' , 'c' ]; |
51 | /// let mut into_iter = vec.into_iter(); |
52 | /// assert_eq!(into_iter.as_slice(), &['a' , 'b' , 'c' ]); |
53 | /// let _ = into_iter.next().unwrap(); |
54 | /// assert_eq!(into_iter.as_slice(), &['b' , 'c' ]); |
55 | /// ``` |
56 | pub fn as_slice(&self) -> &[T] { |
57 | unsafe { slice::from_raw_parts(self.ptr, self.len()) } |
58 | } |
59 | |
60 | /// Returns the remaining items of this iterator as a mutable slice. |
61 | /// |
62 | /// # Examples |
63 | /// |
64 | /// ``` |
65 | /// let vec = vec!['a' , 'b' , 'c' ]; |
66 | /// let mut into_iter = vec.into_iter(); |
67 | /// assert_eq!(into_iter.as_slice(), &['a' , 'b' , 'c' ]); |
68 | /// into_iter.as_mut_slice()[2] = 'z' ; |
69 | /// assert_eq!(into_iter.next().unwrap(), 'a' ); |
70 | /// assert_eq!(into_iter.next().unwrap(), 'b' ); |
71 | /// assert_eq!(into_iter.next().unwrap(), 'z' ); |
72 | /// ``` |
73 | pub fn as_mut_slice(&mut self) -> &mut [T] { |
74 | unsafe { &mut *self.as_raw_mut_slice() } |
75 | } |
76 | |
77 | /// Returns a reference to the underlying allocator. |
78 | #[inline (always)] |
79 | pub fn allocator(&self) -> &A { |
80 | &self.alloc |
81 | } |
82 | |
83 | fn as_raw_mut_slice(&mut self) -> *mut [T] { |
84 | ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) |
85 | } |
86 | } |
87 | |
88 | impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> { |
89 | fn as_ref(&self) -> &[T] { |
90 | self.as_slice() |
91 | } |
92 | } |
93 | |
94 | unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {} |
95 | |
96 | unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {} |
97 | |
98 | impl<T, A: Allocator> Iterator for IntoIter<T, A> { |
99 | type Item = T; |
100 | |
101 | #[inline (always)] |
102 | fn next(&mut self) -> Option<T> { |
103 | if self.ptr == self.end { |
104 | None |
105 | } else if size_of::<T>() == 0 { |
106 | // purposefully don't use 'ptr.offset' because for |
107 | // vectors with 0-size elements this would return the |
108 | // same pointer. |
109 | self.ptr = self.ptr.cast::<u8>().wrapping_add(1).cast(); |
110 | |
111 | // Make up a value of this ZST. |
112 | Some(unsafe { mem::zeroed() }) |
113 | } else { |
114 | let old = self.ptr; |
115 | self.ptr = unsafe { self.ptr.add(1) }; |
116 | |
117 | Some(unsafe { ptr::read(old) }) |
118 | } |
119 | } |
120 | |
121 | #[inline (always)] |
122 | fn size_hint(&self) -> (usize, Option<usize>) { |
123 | let exact = if size_of::<T>() == 0 { |
124 | addr(self.end).wrapping_sub(addr(self.ptr)) |
125 | } else { |
126 | unsafe { self.end.offset_from(self.ptr) as usize } |
127 | }; |
128 | (exact, Some(exact)) |
129 | } |
130 | |
131 | #[inline (always)] |
132 | fn count(self) -> usize { |
133 | self.len() |
134 | } |
135 | } |
136 | |
137 | impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { |
138 | #[inline (always)] |
139 | fn next_back(&mut self) -> Option<T> { |
140 | if self.end == self.ptr { |
141 | None |
142 | } else if size_of::<T>() == 0 { |
143 | // See above for why 'ptr.offset' isn't used |
144 | self.end = self.end.cast::<u8>().wrapping_add(count:1).cast(); |
145 | |
146 | // Make up a value of this ZST. |
147 | Some(unsafe { mem::zeroed() }) |
148 | } else { |
149 | self.end = unsafe { self.end.sub(count:1) }; |
150 | |
151 | Some(unsafe { ptr::read(self.end) }) |
152 | } |
153 | } |
154 | } |
155 | |
156 | impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {} |
157 | |
158 | impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} |
159 | |
160 | #[doc (hidden)] |
161 | pub trait NonDrop {} |
162 | |
163 | // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr |
164 | // and thus we can't implement drop-handling |
165 | impl<T: Copy> NonDrop for T {} |
166 | |
167 | #[cfg (not(no_global_oom_handling))] |
168 | impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> { |
169 | fn clone(&self) -> Self { |
170 | let mut vec: Vec = Vec::<T, A>::with_capacity_in(self.len(), (*self.alloc).clone()); |
171 | vec.extend(self.as_slice().iter().cloned()); |
172 | vec.into_iter() |
173 | } |
174 | } |
175 | |
176 | impl<T, A: Allocator> Drop for IntoIter<T, A> { |
177 | fn drop(&mut self) { |
178 | struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); |
179 | |
180 | impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { |
181 | fn drop(&mut self) { |
182 | unsafe { |
183 | // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec |
184 | let alloc: A = ManuallyDrop::take(&mut self.0.alloc); |
185 | // RawVec handles deallocation |
186 | let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); |
187 | } |
188 | } |
189 | } |
190 | |
191 | let guard: DropGuard<'_, T, A> = DropGuard(self); |
192 | // destroy the remaining elements |
193 | unsafe { |
194 | ptr::drop_in_place(to_drop:guard.0.as_raw_mut_slice()); |
195 | } |
196 | // now `guard` will be dropped and do the rest |
197 | } |
198 | } |
199 | |