1 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
2 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
3 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
4 | // option. This file may not be copied, modified, or distributed |
5 | // except according to those terms. |
6 | |
7 | //! Small vectors in various sizes. These store a certain number of elements inline, and fall back |
8 | //! to the heap for larger allocations. This can be a useful optimization for improving cache |
9 | //! locality and reducing allocator traffic for workloads that fit within the inline buffer. |
10 | //! |
11 | //! ## `no_std` support |
12 | //! |
13 | //! By default, `smallvec` does not depend on `std`. However, the optional |
14 | //! `write` feature implements the `std::io::Write` trait for vectors of `u8`. |
15 | //! When this feature is enabled, `smallvec` depends on `std`. |
16 | //! |
17 | //! ## Optional features |
18 | //! |
19 | //! ### `serde` |
20 | //! |
21 | //! When this optional dependency is enabled, `SmallVec` implements the `serde::Serialize` and |
22 | //! `serde::Deserialize` traits. |
23 | //! |
24 | //! ### `write` |
25 | //! |
26 | //! When this feature is enabled, `SmallVec<[u8; _]>` implements the `std::io::Write` trait. |
27 | //! This feature is not compatible with `#![no_std]` programs. |
28 | //! |
29 | //! ### `union` |
30 | //! |
31 | //! **This feature requires Rust 1.49.** |
32 | //! |
33 | //! When the `union` feature is enabled `smallvec` will track its state (inline or spilled) |
34 | //! without the use of an enum tag, reducing the size of the `smallvec` by one machine word. |
35 | //! This means that there is potentially no space overhead compared to `Vec`. |
36 | //! Note that `smallvec` can still be larger than `Vec` if the inline buffer is larger than two |
37 | //! machine words. |
38 | //! |
39 | //! To use this feature add `features = ["union"]` in the `smallvec` section of Cargo.toml. |
40 | //! Note that this feature requires Rust 1.49. |
41 | //! |
42 | //! Tracking issue: [rust-lang/rust#55149](https://github.com/rust-lang/rust/issues/55149) |
43 | //! |
44 | //! ### `const_generics` |
45 | //! |
46 | //! **This feature requires Rust 1.51.** |
47 | //! |
48 | //! When this feature is enabled, `SmallVec` works with any arrays of any size, not just a fixed |
49 | //! list of sizes. |
50 | //! |
51 | //! ### `const_new` |
52 | //! |
53 | //! **This feature requires Rust 1.51.** |
54 | //! |
55 | //! This feature exposes the functions [`SmallVec::new_const`], [`SmallVec::from_const`], and [`smallvec_inline`] which enables the `SmallVec` to be initialized from a const context. |
56 | //! For details, see the |
57 | //! [Rust Reference](https://doc.rust-lang.org/reference/const_eval.html#const-functions). |
58 | //! |
59 | //! ### `drain_filter` |
60 | //! |
61 | //! **This feature is unstable.** It may change to match the unstable `drain_filter` method in libstd. |
62 | //! |
63 | //! Enables the `drain_filter` method, which produces an iterator that calls a user-provided |
64 | //! closure to determine which elements of the vector to remove and yield from the iterator. |
65 | //! |
66 | //! ### `drain_keep_rest` |
67 | //! |
68 | //! **This feature is unstable.** It may change to match the unstable `drain_keep_rest` method in libstd. |
69 | //! |
70 | //! Enables the `DrainFilter::keep_rest` method. |
71 | //! |
72 | //! ### `specialization` |
73 | //! |
74 | //! **This feature is unstable and requires a nightly build of the Rust toolchain.** |
75 | //! |
76 | //! When this feature is enabled, `SmallVec::from(slice)` has improved performance for slices |
77 | //! of `Copy` types. (Without this feature, you can use `SmallVec::from_slice` to get optimal |
78 | //! performance for `Copy` types.) |
79 | //! |
80 | //! Tracking issue: [rust-lang/rust#31844](https://github.com/rust-lang/rust/issues/31844) |
81 | //! |
82 | //! ### `may_dangle` |
83 | //! |
84 | //! **This feature is unstable and requires a nightly build of the Rust toolchain.** |
85 | //! |
86 | //! This feature makes the Rust compiler less strict about use of vectors that contain borrowed |
87 | //! references. For details, see the |
88 | //! [Rustonomicon](https://doc.rust-lang.org/1.42.0/nomicon/dropck.html#an-escape-hatch). |
89 | //! |
90 | //! Tracking issue: [rust-lang/rust#34761](https://github.com/rust-lang/rust/issues/34761) |
91 | |
92 | #![no_std ] |
93 | #![cfg_attr (docsrs, feature(doc_cfg))] |
94 | #![cfg_attr (feature = "specialization" , allow(incomplete_features))] |
95 | #![cfg_attr (feature = "specialization" , feature(specialization))] |
96 | #![cfg_attr (feature = "may_dangle" , feature(dropck_eyepatch))] |
97 | #![cfg_attr ( |
98 | feature = "debugger_visualizer" , |
99 | feature(debugger_visualizer), |
100 | debugger_visualizer(natvis_file = "../debug_metadata/smallvec.natvis" ) |
101 | )] |
102 | #![deny (missing_docs)] |
103 | |
104 | #[doc (hidden)] |
105 | pub extern crate alloc; |
106 | |
107 | #[cfg (any(test, feature = "write" ))] |
108 | extern crate std; |
109 | |
110 | #[cfg (test)] |
111 | mod tests; |
112 | |
113 | #[allow (deprecated)] |
114 | use alloc::alloc::{Layout, LayoutErr}; |
115 | use alloc::boxed::Box; |
116 | use alloc::{vec, vec::Vec}; |
117 | use core::borrow::{Borrow, BorrowMut}; |
118 | use core::cmp; |
119 | use core::fmt; |
120 | use core::hash::{Hash, Hasher}; |
121 | use core::hint::unreachable_unchecked; |
122 | use core::iter::{repeat, FromIterator, FusedIterator, IntoIterator}; |
123 | use core::mem; |
124 | use core::mem::MaybeUninit; |
125 | use core::ops::{self, Range, RangeBounds}; |
126 | use core::ptr::{self, NonNull}; |
127 | use core::slice::{self, SliceIndex}; |
128 | |
129 | #[cfg (feature = "serde" )] |
130 | use serde::{ |
131 | de::{Deserialize, Deserializer, SeqAccess, Visitor}, |
132 | ser::{Serialize, SerializeSeq, Serializer}, |
133 | }; |
134 | |
135 | #[cfg (feature = "serde" )] |
136 | use core::marker::PhantomData; |
137 | |
138 | #[cfg (feature = "write" )] |
139 | use std::io; |
140 | |
141 | #[cfg (feature = "drain_keep_rest" )] |
142 | use core::mem::ManuallyDrop; |
143 | |
144 | /// Creates a [`SmallVec`] containing the arguments. |
145 | /// |
146 | /// `smallvec!` allows `SmallVec`s to be defined with the same syntax as array expressions. |
147 | /// There are two forms of this macro: |
148 | /// |
149 | /// - Create a [`SmallVec`] containing a given list of elements: |
150 | /// |
151 | /// ``` |
152 | /// # #[macro_use ] extern crate smallvec; |
153 | /// # use smallvec::SmallVec; |
154 | /// # fn main() { |
155 | /// let v: SmallVec<[_; 128]> = smallvec![1, 2, 3]; |
156 | /// assert_eq!(v[0], 1); |
157 | /// assert_eq!(v[1], 2); |
158 | /// assert_eq!(v[2], 3); |
159 | /// # } |
160 | /// ``` |
161 | /// |
162 | /// - Create a [`SmallVec`] from a given element and size: |
163 | /// |
164 | /// ``` |
165 | /// # #[macro_use ] extern crate smallvec; |
166 | /// # use smallvec::SmallVec; |
167 | /// # fn main() { |
168 | /// let v: SmallVec<[_; 0x8000]> = smallvec![1; 3]; |
169 | /// assert_eq!(v, SmallVec::from_buf([1, 1, 1])); |
170 | /// # } |
171 | /// ``` |
172 | /// |
173 | /// Note that unlike array expressions this syntax supports all elements |
174 | /// which implement [`Clone`] and the number of elements doesn't have to be |
175 | /// a constant. |
176 | /// |
177 | /// This will use `clone` to duplicate an expression, so one should be careful |
178 | /// using this with types having a nonstandard `Clone` implementation. For |
179 | /// example, `smallvec![Rc::new(1); 5]` will create a vector of five references |
180 | /// to the same boxed integer value, not five references pointing to independently |
181 | /// boxed integers. |
182 | |
183 | #[macro_export ] |
184 | macro_rules! smallvec { |
185 | // count helper: transform any expression into 1 |
186 | (@one $x:expr) => (1usize); |
187 | ($elem:expr; $n:expr) => ({ |
188 | $crate::SmallVec::from_elem($elem, $n) |
189 | }); |
190 | ($($x:expr),*$(,)*) => ({ |
191 | let count = 0usize $(+ $crate::smallvec!(@one $x))*; |
192 | #[allow(unused_mut)] |
193 | let mut vec = $crate::SmallVec::new(); |
194 | if count <= vec.inline_size() { |
195 | $(vec.push($x);)* |
196 | vec |
197 | } else { |
198 | $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*]) |
199 | } |
200 | }); |
201 | } |
202 | |
203 | /// Creates an inline [`SmallVec`] containing the arguments. This macro is enabled by the feature `const_new`. |
204 | /// |
205 | /// `smallvec_inline!` allows `SmallVec`s to be defined with the same syntax as array expressions in `const` contexts. |
206 | /// The inline storage `A` will always be an array of the size specified by the arguments. |
207 | /// There are two forms of this macro: |
208 | /// |
209 | /// - Create a [`SmallVec`] containing a given list of elements: |
210 | /// |
211 | /// ``` |
212 | /// # #[macro_use] extern crate smallvec; |
213 | /// # use smallvec::SmallVec; |
214 | /// # fn main() { |
215 | /// const V: SmallVec<[i32; 3]> = smallvec_inline![1, 2, 3]; |
216 | /// assert_eq!(V[0], 1); |
217 | /// assert_eq!(V[1], 2); |
218 | /// assert_eq!(V[2], 3); |
219 | /// # } |
220 | /// ``` |
221 | /// |
222 | /// - Create a [`SmallVec`] from a given element and size: |
223 | /// |
224 | /// ``` |
225 | /// # #[macro_use] extern crate smallvec; |
226 | /// # use smallvec::SmallVec; |
227 | /// # fn main() { |
228 | /// const V: SmallVec<[i32; 3]> = smallvec_inline![1; 3]; |
229 | /// assert_eq!(V, SmallVec::from_buf([1, 1, 1])); |
230 | /// # } |
231 | /// ``` |
232 | /// |
233 | /// Note that the behavior mimics that of array expressions, in contrast to [`smallvec`]. |
234 | #[cfg (feature = "const_new" )] |
235 | #[cfg_attr (docsrs, doc(cfg(feature = "const_new" )))] |
236 | #[macro_export ] |
237 | macro_rules! smallvec_inline { |
238 | // count helper: transform any expression into 1 |
239 | (@one $x:expr) => (1usize); |
240 | ($elem:expr; $n:expr) => ({ |
241 | $crate::SmallVec::<[_; $n]>::from_const([$elem; $n]) |
242 | }); |
243 | ($($x:expr),+ $(,)?) => ({ |
244 | const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*; |
245 | $crate::SmallVec::<[_; N]>::from_const([$($x,)*]) |
246 | }); |
247 | } |
248 | |
249 | /// `panic!()` in debug builds, optimization hint in release. |
250 | #[cfg (not(feature = "union" ))] |
251 | macro_rules! debug_unreachable { |
252 | () => { |
253 | debug_unreachable!("entered unreachable code" ) |
254 | }; |
255 | ($e:expr) => { |
256 | if cfg!(debug_assertions) { |
257 | panic!($e); |
258 | } else { |
259 | unreachable_unchecked(); |
260 | } |
261 | }; |
262 | } |
263 | |
264 | /// Trait to be implemented by a collection that can be extended from a slice |
265 | /// |
266 | /// ## Example |
267 | /// |
268 | /// ```rust |
269 | /// use smallvec::{ExtendFromSlice, SmallVec}; |
270 | /// |
271 | /// fn initialize<V: ExtendFromSlice<u8>>(v: &mut V) { |
272 | /// v.extend_from_slice(b"Test!" ); |
273 | /// } |
274 | /// |
275 | /// let mut vec = Vec::new(); |
276 | /// initialize(&mut vec); |
277 | /// assert_eq!(&vec, b"Test!" ); |
278 | /// |
279 | /// let mut small_vec = SmallVec::<[u8; 8]>::new(); |
280 | /// initialize(&mut small_vec); |
281 | /// assert_eq!(&small_vec as &[_], b"Test!" ); |
282 | /// ``` |
283 | #[doc (hidden)] |
284 | #[deprecated ] |
285 | pub trait ExtendFromSlice<T> { |
286 | /// Extends a collection from a slice of its element type |
287 | fn extend_from_slice(&mut self, other: &[T]); |
288 | } |
289 | |
290 | #[allow (deprecated)] |
291 | impl<T: Clone> ExtendFromSlice<T> for Vec<T> { |
292 | fn extend_from_slice(&mut self, other: &[T]) { |
293 | Vec::extend_from_slice(self, other) |
294 | } |
295 | } |
296 | |
297 | /// Error type for APIs with fallible heap allocation |
298 | #[derive (Debug)] |
299 | pub enum CollectionAllocErr { |
300 | /// Overflow `usize::MAX` or other error during size computation |
301 | CapacityOverflow, |
302 | /// The allocator return an error |
303 | AllocErr { |
304 | /// The layout that was passed to the allocator |
305 | layout: Layout, |
306 | }, |
307 | } |
308 | |
309 | impl fmt::Display for CollectionAllocErr { |
310 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
311 | write!(f, "Allocation error: {:?}" , self) |
312 | } |
313 | } |
314 | |
315 | #[allow (deprecated)] |
316 | impl From<LayoutErr> for CollectionAllocErr { |
317 | fn from(_: LayoutErr) -> Self { |
318 | CollectionAllocErr::CapacityOverflow |
319 | } |
320 | } |
321 | |
322 | fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T { |
323 | match result { |
324 | Ok(x: T) => x, |
325 | Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow" ), |
326 | Err(CollectionAllocErr::AllocErr { layout: Layout }) => alloc::alloc::handle_alloc_error(layout), |
327 | } |
328 | } |
329 | |
330 | /// FIXME: use `Layout::array` when we require a Rust version where it’s stable |
331 | /// https://github.com/rust-lang/rust/issues/55724 |
332 | fn layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr> { |
333 | let size: usize = mem::size_of::<T>() |
334 | .checked_mul(n) |
335 | .ok_or(err:CollectionAllocErr::CapacityOverflow)?; |
336 | let align: usize = mem::align_of::<T>(); |
337 | Layout::from_size_align(size, align).map_err(|_| CollectionAllocErr::CapacityOverflow) |
338 | } |
339 | |
340 | unsafe fn deallocate<T>(ptr: NonNull<T>, capacity: usize) { |
341 | // This unwrap should succeed since the same did when allocating. |
342 | let layout: Layout = layout_array::<T>(capacity).unwrap(); |
343 | alloc::alloc::dealloc(ptr.as_ptr() as *mut u8, layout) |
344 | } |
345 | |
346 | /// An iterator that removes the items from a `SmallVec` and yields them by value. |
347 | /// |
348 | /// Returned from [`SmallVec::drain`][1]. |
349 | /// |
350 | /// [1]: struct.SmallVec.html#method.drain |
351 | pub struct Drain<'a, T: 'a + Array> { |
352 | tail_start: usize, |
353 | tail_len: usize, |
354 | iter: slice::Iter<'a, T::Item>, |
355 | vec: NonNull<SmallVec<T>>, |
356 | } |
357 | |
358 | impl<'a, T: 'a + Array> fmt::Debug for Drain<'a, T> |
359 | where |
360 | T::Item: fmt::Debug, |
361 | { |
362 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
363 | f.debug_tuple(name:"Drain" ).field(&self.iter.as_slice()).finish() |
364 | } |
365 | } |
366 | |
367 | unsafe impl<'a, T: Sync + Array> Sync for Drain<'a, T> {} |
368 | unsafe impl<'a, T: Send + Array> Send for Drain<'a, T> {} |
369 | |
370 | impl<'a, T: 'a + Array> Iterator for Drain<'a, T> { |
371 | type Item = T::Item; |
372 | |
373 | #[inline ] |
374 | fn next(&mut self) -> Option<T::Item> { |
375 | self.iter |
376 | .next() |
377 | .map(|reference: &::Item| unsafe { ptr::read(src:reference) }) |
378 | } |
379 | |
380 | #[inline ] |
381 | fn size_hint(&self) -> (usize, Option<usize>) { |
382 | self.iter.size_hint() |
383 | } |
384 | } |
385 | |
386 | impl<'a, T: 'a + Array> DoubleEndedIterator for Drain<'a, T> { |
387 | #[inline ] |
388 | fn next_back(&mut self) -> Option<T::Item> { |
389 | self.iter |
390 | .next_back() |
391 | .map(|reference: &::Item| unsafe { ptr::read(src:reference) }) |
392 | } |
393 | } |
394 | |
395 | impl<'a, T: Array> ExactSizeIterator for Drain<'a, T> { |
396 | #[inline ] |
397 | fn len(&self) -> usize { |
398 | self.iter.len() |
399 | } |
400 | } |
401 | |
402 | impl<'a, T: Array> FusedIterator for Drain<'a, T> {} |
403 | |
404 | impl<'a, T: 'a + Array> Drop for Drain<'a, T> { |
405 | fn drop(&mut self) { |
406 | self.for_each(drop); |
407 | |
408 | if self.tail_len > 0 { |
409 | unsafe { |
410 | let source_vec: &mut SmallVec = self.vec.as_mut(); |
411 | |
412 | // memmove back untouched tail, update to new length |
413 | let start: usize = source_vec.len(); |
414 | let tail: usize = self.tail_start; |
415 | if tail != start { |
416 | // as_mut_ptr creates a &mut, invalidating other pointers. |
417 | // This pattern avoids calling it with a pointer already present. |
418 | let ptr: *mut ::Item = source_vec.as_mut_ptr(); |
419 | let src: *mut ::Item = ptr.add(count:tail); |
420 | let dst: *mut ::Item = ptr.add(count:start); |
421 | ptr::copy(src, dst, self.tail_len); |
422 | } |
423 | source_vec.set_len(new_len:start + self.tail_len); |
424 | } |
425 | } |
426 | } |
427 | } |
428 | |
429 | #[cfg (feature = "drain_filter" )] |
430 | /// An iterator which uses a closure to determine if an element should be removed. |
431 | /// |
432 | /// Returned from [`SmallVec::drain_filter`][1]. |
433 | /// |
434 | /// [1]: struct.SmallVec.html#method.drain_filter |
435 | pub struct DrainFilter<'a, T, F> |
436 | where |
437 | F: FnMut(&mut T::Item) -> bool, |
438 | T: Array, |
439 | { |
440 | vec: &'a mut SmallVec<T>, |
441 | /// The index of the item that will be inspected by the next call to `next`. |
442 | idx: usize, |
443 | /// The number of items that have been drained (removed) thus far. |
444 | del: usize, |
445 | /// The original length of `vec` prior to draining. |
446 | old_len: usize, |
447 | /// The filter test predicate. |
448 | pred: F, |
449 | /// A flag that indicates a panic has occurred in the filter test predicate. |
450 | /// This is used as a hint in the drop implementation to prevent consumption |
451 | /// of the remainder of the `DrainFilter`. Any unprocessed items will be |
452 | /// backshifted in the `vec`, but no further items will be dropped or |
453 | /// tested by the filter predicate. |
454 | panic_flag: bool, |
455 | } |
456 | |
457 | #[cfg (feature = "drain_filter" )] |
458 | impl <T, F> fmt::Debug for DrainFilter<'_, T, F> |
459 | where |
460 | F: FnMut(&mut T::Item) -> bool, |
461 | T: Array, |
462 | T::Item: fmt::Debug, |
463 | { |
464 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
465 | f.debug_tuple("DrainFilter" ).field(&self.vec.as_slice()).finish() |
466 | } |
467 | } |
468 | |
469 | #[cfg (feature = "drain_filter" )] |
470 | impl <T, F> Iterator for DrainFilter<'_, T, F> |
471 | where |
472 | F: FnMut(&mut T::Item) -> bool, |
473 | T: Array, |
474 | { |
475 | type Item = T::Item; |
476 | |
477 | fn next(&mut self) -> Option<T::Item> |
478 | { |
479 | unsafe { |
480 | while self.idx < self.old_len { |
481 | let i = self.idx; |
482 | let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); |
483 | self.panic_flag = true; |
484 | let drained = (self.pred)(&mut v[i]); |
485 | self.panic_flag = false; |
486 | // Update the index *after* the predicate is called. If the index |
487 | // is updated prior and the predicate panics, the element at this |
488 | // index would be leaked. |
489 | self.idx += 1; |
490 | if drained { |
491 | self.del += 1; |
492 | return Some(ptr::read(&v[i])); |
493 | } else if self.del > 0 { |
494 | let del = self.del; |
495 | let src: *const Self::Item = &v[i]; |
496 | let dst: *mut Self::Item = &mut v[i - del]; |
497 | ptr::copy_nonoverlapping(src, dst, 1); |
498 | } |
499 | } |
500 | None |
501 | } |
502 | } |
503 | |
504 | fn size_hint(&self) -> (usize, Option<usize>) { |
505 | (0, Some(self.old_len - self.idx)) |
506 | } |
507 | } |
508 | |
509 | #[cfg (feature = "drain_filter" )] |
510 | impl <T, F> Drop for DrainFilter<'_, T, F> |
511 | where |
512 | F: FnMut(&mut T::Item) -> bool, |
513 | T: Array, |
514 | { |
515 | fn drop(&mut self) { |
516 | struct BackshiftOnDrop<'a, 'b, T, F> |
517 | where |
518 | F: FnMut(&mut T::Item) -> bool, |
519 | T: Array |
520 | { |
521 | drain: &'b mut DrainFilter<'a, T, F>, |
522 | } |
523 | |
524 | impl<'a, 'b, T, F> Drop for BackshiftOnDrop<'a, 'b, T, F> |
525 | where |
526 | F: FnMut(&mut T::Item) -> bool, |
527 | T: Array |
528 | { |
529 | fn drop(&mut self) { |
530 | unsafe { |
531 | if self.drain.idx < self.drain.old_len && self.drain.del > 0 { |
532 | // This is a pretty messed up state, and there isn't really an |
533 | // obviously right thing to do. We don't want to keep trying |
534 | // to execute `pred`, so we just backshift all the unprocessed |
535 | // elements and tell the vec that they still exist. The backshift |
536 | // is required to prevent a double-drop of the last successfully |
537 | // drained item prior to a panic in the predicate. |
538 | let ptr = self.drain.vec.as_mut_ptr(); |
539 | let src = ptr.add(self.drain.idx); |
540 | let dst = src.sub(self.drain.del); |
541 | let tail_len = self.drain.old_len - self.drain.idx; |
542 | src.copy_to(dst, tail_len); |
543 | } |
544 | self.drain.vec.set_len(self.drain.old_len - self.drain.del); |
545 | } |
546 | } |
547 | } |
548 | |
549 | let backshift = BackshiftOnDrop { drain: self }; |
550 | |
551 | // Attempt to consume any remaining elements if the filter predicate |
552 | // has not yet panicked. We'll backshift any remaining elements |
553 | // whether we've already panicked or if the consumption here panics. |
554 | if !backshift.drain.panic_flag { |
555 | backshift.drain.for_each(drop); |
556 | } |
557 | } |
558 | } |
559 | |
560 | #[cfg (feature = "drain_keep_rest" )] |
561 | impl <T, F> DrainFilter<'_, T, F> |
562 | where |
563 | F: FnMut(&mut T::Item) -> bool, |
564 | T: Array |
565 | { |
566 | /// Keep unyielded elements in the source `Vec`. |
567 | /// |
568 | /// # Examples |
569 | /// |
570 | /// ``` |
571 | /// # use smallvec::{smallvec, SmallVec}; |
572 | /// |
573 | /// let mut vec: SmallVec<[char; 2]> = smallvec!['a', 'b', 'c']; |
574 | /// let mut drain = vec.drain_filter(|_| true); |
575 | /// |
576 | /// assert_eq!(drain.next().unwrap(), 'a'); |
577 | /// |
578 | /// // This call keeps 'b' and 'c' in the vec. |
579 | /// drain.keep_rest(); |
580 | /// |
581 | /// // If we wouldn't call `keep_rest()`, |
582 | /// // `vec` would be empty. |
583 | /// assert_eq!(vec, SmallVec::<[char; 2]>::from_slice(&['b', 'c'])); |
584 | /// ``` |
585 | pub fn keep_rest(self) |
586 | { |
587 | // At this moment layout looks like this: |
588 | // |
589 | // _____________________/-- old_len |
590 | // / \ |
591 | // [kept] [yielded] [tail] |
592 | // \_______/ ^-- idx |
593 | // \-- del |
594 | // |
595 | // Normally `Drop` impl would drop [tail] (via .for_each(drop), ie still calling `pred`) |
596 | // |
597 | // 1. Move [tail] after [kept] |
598 | // 2. Update length of the original vec to `old_len - del` |
599 | // a. In case of ZST, this is the only thing we want to do |
600 | // 3. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do |
601 | let mut this = ManuallyDrop::new(self); |
602 | |
603 | unsafe { |
604 | // ZSTs have no identity, so we don't need to move them around. |
605 | let needs_move = mem::size_of::<T>() != 0; |
606 | |
607 | if needs_move && this.idx < this.old_len && this.del > 0 { |
608 | let ptr = this.vec.as_mut_ptr(); |
609 | let src = ptr.add(this.idx); |
610 | let dst = src.sub(this.del); |
611 | let tail_len = this.old_len - this.idx; |
612 | src.copy_to(dst, tail_len); |
613 | } |
614 | |
615 | let new_len = this.old_len - this.del; |
616 | this.vec.set_len(new_len); |
617 | } |
618 | } |
619 | } |
620 | |
621 | #[cfg (feature = "union" )] |
622 | union SmallVecData<A: Array> { |
623 | inline: core::mem::ManuallyDrop<MaybeUninit<A>>, |
624 | heap: (NonNull<A::Item>, usize), |
625 | } |
626 | |
627 | #[cfg (all(feature = "union" , feature = "const_new" ))] |
628 | impl<T, const N: usize> SmallVecData<[T; N]> { |
629 | #[cfg_attr (docsrs, doc(cfg(feature = "const_new" )))] |
630 | #[inline ] |
631 | const fn from_const(inline: MaybeUninit<[T; N]>) -> Self { |
632 | SmallVecData { |
633 | inline: core::mem::ManuallyDrop::new(inline), |
634 | } |
635 | } |
636 | } |
637 | |
638 | #[cfg (feature = "union" )] |
639 | impl<A: Array> SmallVecData<A> { |
640 | #[inline ] |
641 | unsafe fn inline(&self) -> ConstNonNull<A::Item> { |
642 | ConstNonNull::new(self.inline.as_ptr() as *const A::Item).unwrap() |
643 | } |
644 | #[inline ] |
645 | unsafe fn inline_mut(&mut self) -> NonNull<A::Item> { |
646 | NonNull::new(self.inline.as_mut_ptr() as *mut A::Item).unwrap() |
647 | } |
648 | #[inline ] |
649 | fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> { |
650 | SmallVecData { |
651 | inline: core::mem::ManuallyDrop::new(inline), |
652 | } |
653 | } |
654 | #[inline ] |
655 | unsafe fn into_inline(self) -> MaybeUninit<A> { |
656 | core::mem::ManuallyDrop::into_inner(self.inline) |
657 | } |
658 | #[inline ] |
659 | unsafe fn heap(&self) -> (ConstNonNull<A::Item>, usize) { |
660 | (ConstNonNull(self.heap.0), self.heap.1) |
661 | } |
662 | #[inline ] |
663 | unsafe fn heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize) { |
664 | let h = &mut self.heap; |
665 | (h.0, &mut h.1) |
666 | } |
667 | #[inline ] |
668 | fn from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A> { |
669 | SmallVecData { heap: (ptr, len) } |
670 | } |
671 | } |
672 | |
673 | #[cfg (not(feature = "union" ))] |
674 | enum SmallVecData<A: Array> { |
675 | Inline(MaybeUninit<A>), |
676 | // Using NonNull and NonZero here allows to reduce size of `SmallVec`. |
677 | Heap { |
678 | // Since we never allocate on heap |
679 | // unless our capacity is bigger than inline capacity |
680 | // heap capacity cannot be less than 1. |
681 | // Therefore, pointer cannot be null too. |
682 | ptr: NonNull<A::Item>, |
683 | len: usize, |
684 | }, |
685 | } |
686 | |
687 | #[cfg (all(not(feature = "union" ), feature = "const_new" ))] |
688 | impl<T, const N: usize> SmallVecData<[T; N]> { |
689 | #[cfg_attr (docsrs, doc(cfg(feature = "const_new" )))] |
690 | #[inline ] |
691 | const fn from_const(inline: MaybeUninit<[T; N]>) -> Self { |
692 | SmallVecData::Inline(inline) |
693 | } |
694 | } |
695 | |
696 | #[cfg (not(feature = "union" ))] |
697 | impl<A: Array> SmallVecData<A> { |
698 | #[inline ] |
699 | unsafe fn inline(&self) -> ConstNonNull<A::Item> { |
700 | match self { |
701 | SmallVecData::Inline(a) => ConstNonNull::new(a.as_ptr() as *const A::Item).unwrap(), |
702 | _ => debug_unreachable!(), |
703 | } |
704 | } |
705 | #[inline ] |
706 | unsafe fn inline_mut(&mut self) -> NonNull<A::Item> { |
707 | match self { |
708 | SmallVecData::Inline(a) => NonNull::new(a.as_mut_ptr() as *mut A::Item).unwrap(), |
709 | _ => debug_unreachable!(), |
710 | } |
711 | } |
712 | #[inline ] |
713 | fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> { |
714 | SmallVecData::Inline(inline) |
715 | } |
716 | #[inline ] |
717 | unsafe fn into_inline(self) -> MaybeUninit<A> { |
718 | match self { |
719 | SmallVecData::Inline(a) => a, |
720 | _ => debug_unreachable!(), |
721 | } |
722 | } |
723 | #[inline ] |
724 | unsafe fn heap(&self) -> (ConstNonNull<A::Item>, usize) { |
725 | match self { |
726 | SmallVecData::Heap { ptr, len } => (ConstNonNull(*ptr), *len), |
727 | _ => debug_unreachable!(), |
728 | } |
729 | } |
730 | #[inline ] |
731 | unsafe fn heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize) { |
732 | match self { |
733 | SmallVecData::Heap { ptr, len } => (*ptr, len), |
734 | _ => debug_unreachable!(), |
735 | } |
736 | } |
737 | #[inline ] |
738 | fn from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A> { |
739 | SmallVecData::Heap { ptr, len } |
740 | } |
741 | } |
742 | |
743 | unsafe impl<A: Array + Send> Send for SmallVecData<A> {} |
744 | unsafe impl<A: Array + Sync> Sync for SmallVecData<A> {} |
745 | |
746 | /// A `Vec`-like container that can store a small number of elements inline. |
747 | /// |
748 | /// `SmallVec` acts like a vector, but can store a limited amount of data inline within the |
749 | /// `SmallVec` struct rather than in a separate allocation. If the data exceeds this limit, the |
750 | /// `SmallVec` will "spill" its data onto the heap, allocating a new buffer to hold it. |
751 | /// |
752 | /// The amount of data that a `SmallVec` can store inline depends on its backing store. The backing |
753 | /// store can be any type that implements the `Array` trait; usually it is a small fixed-sized |
754 | /// array. For example a `SmallVec<[u64; 8]>` can hold up to eight 64-bit integers inline. |
755 | /// |
756 | /// ## Example |
757 | /// |
758 | /// ```rust |
759 | /// use smallvec::SmallVec; |
760 | /// let mut v = SmallVec::<[u8; 4]>::new(); // initialize an empty vector |
761 | /// |
762 | /// // The vector can hold up to 4 items without spilling onto the heap. |
763 | /// v.extend(0..4); |
764 | /// assert_eq!(v.len(), 4); |
765 | /// assert!(!v.spilled()); |
766 | /// |
767 | /// // Pushing another element will force the buffer to spill: |
768 | /// v.push(4); |
769 | /// assert_eq!(v.len(), 5); |
770 | /// assert!(v.spilled()); |
771 | /// ``` |
772 | pub struct SmallVec<A: Array> { |
773 | // The capacity field is used to determine which of the storage variants is active: |
774 | // If capacity <= Self::inline_capacity() then the inline variant is used and capacity holds the current length of the vector (number of elements actually in use). |
775 | // If capacity > Self::inline_capacity() then the heap variant is used and capacity holds the size of the memory allocation. |
776 | capacity: usize, |
777 | data: SmallVecData<A>, |
778 | } |
779 | |
780 | impl<A: Array> SmallVec<A> { |
781 | /// Construct an empty vector |
782 | #[inline ] |
783 | pub fn new() -> SmallVec<A> { |
784 | // Try to detect invalid custom implementations of `Array`. Hopefully, |
785 | // this check should be optimized away entirely for valid ones. |
786 | assert!( |
787 | mem::size_of::<A>() == A::size() * mem::size_of::<A::Item>() |
788 | && mem::align_of::<A>() >= mem::align_of::<A::Item>() |
789 | ); |
790 | SmallVec { |
791 | capacity: 0, |
792 | data: SmallVecData::from_inline(MaybeUninit::uninit()), |
793 | } |
794 | } |
795 | |
796 | /// Construct an empty vector with enough capacity pre-allocated to store at least `n` |
797 | /// elements. |
798 | /// |
799 | /// Will create a heap allocation only if `n` is larger than the inline capacity. |
800 | /// |
801 | /// ``` |
802 | /// # use smallvec::SmallVec; |
803 | /// |
804 | /// let v: SmallVec<[u8; 3]> = SmallVec::with_capacity(100); |
805 | /// |
806 | /// assert!(v.is_empty()); |
807 | /// assert!(v.capacity() >= 100); |
808 | /// ``` |
809 | #[inline ] |
810 | pub fn with_capacity(n: usize) -> Self { |
811 | let mut v = SmallVec::new(); |
812 | v.reserve_exact(n); |
813 | v |
814 | } |
815 | |
816 | /// Construct a new `SmallVec` from a `Vec<A::Item>`. |
817 | /// |
818 | /// Elements will be copied to the inline buffer if vec.capacity() <= Self::inline_capacity(). |
819 | /// |
820 | /// ```rust |
821 | /// use smallvec::SmallVec; |
822 | /// |
823 | /// let vec = vec![1, 2, 3, 4, 5]; |
824 | /// let small_vec: SmallVec<[_; 3]> = SmallVec::from_vec(vec); |
825 | /// |
826 | /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); |
827 | /// ``` |
828 | #[inline ] |
829 | pub fn from_vec(mut vec: Vec<A::Item>) -> SmallVec<A> { |
830 | if vec.capacity() <= Self::inline_capacity() { |
831 | // Cannot use Vec with smaller capacity |
832 | // because we use value of `Self::capacity` field as indicator. |
833 | unsafe { |
834 | let mut data = SmallVecData::<A>::from_inline(MaybeUninit::uninit()); |
835 | let len = vec.len(); |
836 | vec.set_len(0); |
837 | ptr::copy_nonoverlapping(vec.as_ptr(), data.inline_mut().as_ptr(), len); |
838 | |
839 | SmallVec { |
840 | capacity: len, |
841 | data, |
842 | } |
843 | } |
844 | } else { |
845 | let (ptr, cap, len) = (vec.as_mut_ptr(), vec.capacity(), vec.len()); |
846 | mem::forget(vec); |
847 | let ptr = NonNull::new(ptr) |
848 | // See docs: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.as_mut_ptr |
849 | .expect("Cannot be null by `Vec` invariant" ); |
850 | |
851 | SmallVec { |
852 | capacity: cap, |
853 | data: SmallVecData::from_heap(ptr, len), |
854 | } |
855 | } |
856 | } |
857 | |
858 | /// Constructs a new `SmallVec` on the stack from an `A` without |
859 | /// copying elements. |
860 | /// |
861 | /// ```rust |
862 | /// use smallvec::SmallVec; |
863 | /// |
864 | /// let buf = [1, 2, 3, 4, 5]; |
865 | /// let small_vec: SmallVec<_> = SmallVec::from_buf(buf); |
866 | /// |
867 | /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); |
868 | /// ``` |
869 | #[inline ] |
870 | pub fn from_buf(buf: A) -> SmallVec<A> { |
871 | SmallVec { |
872 | capacity: A::size(), |
873 | data: SmallVecData::from_inline(MaybeUninit::new(buf)), |
874 | } |
875 | } |
876 | |
877 | /// Constructs a new `SmallVec` on the stack from an `A` without |
878 | /// copying elements. Also sets the length, which must be less or |
879 | /// equal to the size of `buf`. |
880 | /// |
881 | /// ```rust |
882 | /// use smallvec::SmallVec; |
883 | /// |
884 | /// let buf = [1, 2, 3, 4, 5, 0, 0, 0]; |
885 | /// let small_vec: SmallVec<_> = SmallVec::from_buf_and_len(buf, 5); |
886 | /// |
887 | /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); |
888 | /// ``` |
889 | #[inline ] |
890 | pub fn from_buf_and_len(buf: A, len: usize) -> SmallVec<A> { |
891 | assert!(len <= A::size()); |
892 | unsafe { SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), len) } |
893 | } |
894 | |
895 | /// Constructs a new `SmallVec` on the stack from an `A` without |
896 | /// copying elements. Also sets the length. The user is responsible |
897 | /// for ensuring that `len <= A::size()`. |
898 | /// |
899 | /// ```rust |
900 | /// use smallvec::SmallVec; |
901 | /// use std::mem::MaybeUninit; |
902 | /// |
903 | /// let buf = [1, 2, 3, 4, 5, 0, 0, 0]; |
904 | /// let small_vec: SmallVec<_> = unsafe { |
905 | /// SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), 5) |
906 | /// }; |
907 | /// |
908 | /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); |
909 | /// ``` |
910 | #[inline ] |
911 | pub unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A> { |
912 | SmallVec { |
913 | capacity: len, |
914 | data: SmallVecData::from_inline(buf), |
915 | } |
916 | } |
917 | |
918 | /// Sets the length of a vector. |
919 | /// |
920 | /// This will explicitly set the size of the vector, without actually |
921 | /// modifying its buffers, so it is up to the caller to ensure that the |
922 | /// vector is actually the specified size. |
923 | pub unsafe fn set_len(&mut self, new_len: usize) { |
924 | let (_, len_ptr, _) = self.triple_mut(); |
925 | *len_ptr = new_len; |
926 | } |
927 | |
928 | /// The maximum number of elements this vector can hold inline |
929 | #[inline ] |
930 | fn inline_capacity() -> usize { |
931 | if mem::size_of::<A::Item>() > 0 { |
932 | A::size() |
933 | } else { |
934 | // For zero-size items code like `ptr.add(offset)` always returns the same pointer. |
935 | // Therefore all items are at the same address, |
936 | // and any array size has capacity for infinitely many items. |
937 | // The capacity is limited by the bit width of the length field. |
938 | // |
939 | // `Vec` also does this: |
940 | // https://github.com/rust-lang/rust/blob/1.44.0/src/liballoc/raw_vec.rs#L186 |
941 | // |
942 | // In our case, this also ensures that a smallvec of zero-size items never spills, |
943 | // and we never try to allocate zero bytes which `std::alloc::alloc` disallows. |
944 | core::usize::MAX |
945 | } |
946 | } |
947 | |
948 | /// The maximum number of elements this vector can hold inline |
949 | #[inline ] |
950 | pub fn inline_size(&self) -> usize { |
951 | Self::inline_capacity() |
952 | } |
953 | |
954 | /// The number of elements stored in the vector |
955 | #[inline ] |
956 | pub fn len(&self) -> usize { |
957 | self.triple().1 |
958 | } |
959 | |
960 | /// Returns `true` if the vector is empty |
961 | #[inline ] |
962 | pub fn is_empty(&self) -> bool { |
963 | self.len() == 0 |
964 | } |
965 | |
966 | /// The number of items the vector can hold without reallocating |
967 | #[inline ] |
968 | pub fn capacity(&self) -> usize { |
969 | self.triple().2 |
970 | } |
971 | |
972 | /// Returns a tuple with (data ptr, len, capacity) |
973 | /// Useful to get all SmallVec properties with a single check of the current storage variant. |
974 | #[inline ] |
975 | fn triple(&self) -> (ConstNonNull<A::Item>, usize, usize) { |
976 | unsafe { |
977 | if self.spilled() { |
978 | let (ptr, len) = self.data.heap(); |
979 | (ptr, len, self.capacity) |
980 | } else { |
981 | (self.data.inline(), self.capacity, Self::inline_capacity()) |
982 | } |
983 | } |
984 | } |
985 | |
986 | /// Returns a tuple with (data ptr, len ptr, capacity) |
987 | #[inline ] |
988 | fn triple_mut(&mut self) -> (NonNull<A::Item>, &mut usize, usize) { |
989 | unsafe { |
990 | if self.spilled() { |
991 | let (ptr, len_ptr) = self.data.heap_mut(); |
992 | (ptr, len_ptr, self.capacity) |
993 | } else { |
994 | ( |
995 | self.data.inline_mut(), |
996 | &mut self.capacity, |
997 | Self::inline_capacity(), |
998 | ) |
999 | } |
1000 | } |
1001 | } |
1002 | |
1003 | /// Returns `true` if the data has spilled into a separate heap-allocated buffer. |
1004 | #[inline ] |
1005 | pub fn spilled(&self) -> bool { |
1006 | self.capacity > Self::inline_capacity() |
1007 | } |
1008 | |
1009 | /// Creates a draining iterator that removes the specified range in the vector |
1010 | /// and yields the removed items. |
1011 | /// |
1012 | /// Note 1: The element range is removed even if the iterator is only |
1013 | /// partially consumed or not consumed at all. |
1014 | /// |
1015 | /// Note 2: It is unspecified how many elements are removed from the vector |
1016 | /// if the `Drain` value is leaked. |
1017 | /// |
1018 | /// # Panics |
1019 | /// |
1020 | /// Panics if the starting point is greater than the end point or if |
1021 | /// the end point is greater than the length of the vector. |
1022 | pub fn drain<R>(&mut self, range: R) -> Drain<'_, A> |
1023 | where |
1024 | R: RangeBounds<usize>, |
1025 | { |
1026 | use core::ops::Bound::*; |
1027 | |
1028 | let len = self.len(); |
1029 | let start = match range.start_bound() { |
1030 | Included(&n) => n, |
1031 | Excluded(&n) => n.checked_add(1).expect("Range start out of bounds" ), |
1032 | Unbounded => 0, |
1033 | }; |
1034 | let end = match range.end_bound() { |
1035 | Included(&n) => n.checked_add(1).expect("Range end out of bounds" ), |
1036 | Excluded(&n) => n, |
1037 | Unbounded => len, |
1038 | }; |
1039 | |
1040 | assert!(start <= end); |
1041 | assert!(end <= len); |
1042 | |
1043 | unsafe { |
1044 | self.set_len(start); |
1045 | |
1046 | let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start); |
1047 | |
1048 | Drain { |
1049 | tail_start: end, |
1050 | tail_len: len - end, |
1051 | iter: range_slice.iter(), |
1052 | // Since self is a &mut, passing it to a function would invalidate the slice iterator. |
1053 | vec: NonNull::new_unchecked(self as *mut _), |
1054 | } |
1055 | } |
1056 | } |
1057 | |
1058 | |
1059 | #[cfg (feature = "drain_filter" )] |
1060 | /// Creates an iterator which uses a closure to determine if an element should be removed. |
1061 | /// |
1062 | /// If the closure returns true, the element is removed and yielded. If the closure returns |
1063 | /// false, the element will remain in the vector and will not be yielded by the iterator. |
1064 | /// |
1065 | /// Using this method is equivalent to the following code: |
1066 | /// ``` |
1067 | /// # use smallvec::SmallVec; |
1068 | /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 }; |
1069 | /// # let mut vec: SmallVec<[i32; 8]> = SmallVec::from_slice(&[1i32, 2, 3, 4, 5, 6]); |
1070 | /// let mut i = 0; |
1071 | /// while i < vec.len() { |
1072 | /// if some_predicate(&mut vec[i]) { |
1073 | /// let val = vec.remove(i); |
1074 | /// // your code here |
1075 | /// } else { |
1076 | /// i += 1; |
1077 | /// } |
1078 | /// } |
1079 | /// |
1080 | /// # assert_eq!(vec, SmallVec::<[i32; 8]>::from_slice(&[1i32, 4, 5])); |
1081 | /// ``` |
1082 | /// /// |
1083 | /// But `drain_filter` is easier to use. `drain_filter` is also more efficient, |
1084 | /// because it can backshift the elements of the array in bulk. |
1085 | /// |
1086 | /// Note that `drain_filter` also lets you mutate every element in the filter closure, |
1087 | /// regardless of whether you choose to keep or remove it. |
1088 | /// |
1089 | /// # Examples |
1090 | /// |
1091 | /// Splitting an array into evens and odds, reusing the original allocation: |
1092 | /// |
1093 | /// ``` |
1094 | /// # use smallvec::SmallVec; |
1095 | /// let mut numbers: SmallVec<[i32; 16]> = SmallVec::from_slice(&[1i32, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]); |
1096 | /// |
1097 | /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<SmallVec<[i32; 16]>>(); |
1098 | /// let odds = numbers; |
1099 | /// |
1100 | /// assert_eq!(evens, SmallVec::<[i32; 16]>::from_slice(&[2i32, 4, 6, 8, 14])); |
1101 | /// assert_eq!(odds, SmallVec::<[i32; 16]>::from_slice(&[1i32, 3, 5, 9, 11, 13, 15])); |
1102 | /// ``` |
1103 | pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, A, F,> |
1104 | where |
1105 | F: FnMut(&mut A::Item) -> bool, |
1106 | { |
1107 | let old_len = self.len(); |
1108 | |
1109 | // Guard against us getting leaked (leak amplification) |
1110 | unsafe { |
1111 | self.set_len(0); |
1112 | } |
1113 | |
1114 | DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } |
1115 | } |
1116 | |
1117 | /// Append an item to the vector. |
1118 | #[inline ] |
1119 | pub fn push(&mut self, value: A::Item) { |
1120 | unsafe { |
1121 | let (mut ptr, mut len, cap) = self.triple_mut(); |
1122 | if *len == cap { |
1123 | self.reserve(1); |
1124 | let (heap_ptr, heap_len) = self.data.heap_mut(); |
1125 | ptr = heap_ptr; |
1126 | len = heap_len; |
1127 | } |
1128 | ptr::write(ptr.as_ptr().add(*len), value); |
1129 | *len += 1; |
1130 | } |
1131 | } |
1132 | |
1133 | /// Remove an item from the end of the vector and return it, or None if empty. |
1134 | #[inline ] |
1135 | pub fn pop(&mut self) -> Option<A::Item> { |
1136 | unsafe { |
1137 | let (ptr, len_ptr, _) = self.triple_mut(); |
1138 | let ptr: *const _ = ptr.as_ptr(); |
1139 | if *len_ptr == 0 { |
1140 | return None; |
1141 | } |
1142 | let last_index = *len_ptr - 1; |
1143 | *len_ptr = last_index; |
1144 | Some(ptr::read(ptr.add(last_index))) |
1145 | } |
1146 | } |
1147 | |
1148 | /// Moves all the elements of `other` into `self`, leaving `other` empty. |
1149 | /// |
1150 | /// # Example |
1151 | /// |
1152 | /// ``` |
1153 | /// # use smallvec::{SmallVec, smallvec}; |
1154 | /// let mut v0: SmallVec<[u8; 16]> = smallvec![1, 2, 3]; |
1155 | /// let mut v1: SmallVec<[u8; 32]> = smallvec![4, 5, 6]; |
1156 | /// v0.append(&mut v1); |
1157 | /// assert_eq!(*v0, [1, 2, 3, 4, 5, 6]); |
1158 | /// assert_eq!(*v1, []); |
1159 | /// ``` |
1160 | pub fn append<B>(&mut self, other: &mut SmallVec<B>) |
1161 | where |
1162 | B: Array<Item = A::Item>, |
1163 | { |
1164 | self.extend(other.drain(..)) |
1165 | } |
1166 | |
1167 | /// Re-allocate to set the capacity to `max(new_cap, inline_size())`. |
1168 | /// |
1169 | /// Panics if `new_cap` is less than the vector's length |
1170 | /// or if the capacity computation overflows `usize`. |
1171 | pub fn grow(&mut self, new_cap: usize) { |
1172 | infallible(self.try_grow(new_cap)) |
1173 | } |
1174 | |
1175 | /// Re-allocate to set the capacity to `max(new_cap, inline_size())`. |
1176 | /// |
1177 | /// Panics if `new_cap` is less than the vector's length |
1178 | pub fn try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr> { |
1179 | unsafe { |
1180 | let unspilled = !self.spilled(); |
1181 | let (ptr, &mut len, cap) = self.triple_mut(); |
1182 | assert!(new_cap >= len); |
1183 | if new_cap <= Self::inline_capacity() { |
1184 | if unspilled { |
1185 | return Ok(()); |
1186 | } |
1187 | self.data = SmallVecData::from_inline(MaybeUninit::uninit()); |
1188 | ptr::copy_nonoverlapping(ptr.as_ptr(), self.data.inline_mut().as_ptr(), len); |
1189 | self.capacity = len; |
1190 | deallocate(ptr, cap); |
1191 | } else if new_cap != cap { |
1192 | let layout = layout_array::<A::Item>(new_cap)?; |
1193 | debug_assert!(layout.size() > 0); |
1194 | let new_alloc; |
1195 | if unspilled { |
1196 | new_alloc = NonNull::new(alloc::alloc::alloc(layout)) |
1197 | .ok_or(CollectionAllocErr::AllocErr { layout })? |
1198 | .cast(); |
1199 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_alloc.as_ptr(), len); |
1200 | } else { |
1201 | // This should never fail since the same succeeded |
1202 | // when previously allocating `ptr`. |
1203 | let old_layout = layout_array::<A::Item>(cap)?; |
1204 | |
1205 | let new_ptr = |
1206 | alloc::alloc::realloc(ptr.as_ptr() as *mut u8, old_layout, layout.size()); |
1207 | new_alloc = NonNull::new(new_ptr) |
1208 | .ok_or(CollectionAllocErr::AllocErr { layout })? |
1209 | .cast(); |
1210 | } |
1211 | self.data = SmallVecData::from_heap(new_alloc, len); |
1212 | self.capacity = new_cap; |
1213 | } |
1214 | Ok(()) |
1215 | } |
1216 | } |
1217 | |
1218 | /// Reserve capacity for `additional` more elements to be inserted. |
1219 | /// |
1220 | /// May reserve more space to avoid frequent reallocations. |
1221 | /// |
1222 | /// Panics if the capacity computation overflows `usize`. |
1223 | #[inline ] |
1224 | pub fn reserve(&mut self, additional: usize) { |
1225 | infallible(self.try_reserve(additional)) |
1226 | } |
1227 | |
1228 | /// Reserve capacity for `additional` more elements to be inserted. |
1229 | /// |
1230 | /// May reserve more space to avoid frequent reallocations. |
1231 | pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { |
1232 | // prefer triple_mut() even if triple() would work |
1233 | // so that the optimizer removes duplicated calls to it |
1234 | // from callers like insert() |
1235 | let (_, &mut len, cap) = self.triple_mut(); |
1236 | if cap - len >= additional { |
1237 | return Ok(()); |
1238 | } |
1239 | let new_cap = len |
1240 | .checked_add(additional) |
1241 | .and_then(usize::checked_next_power_of_two) |
1242 | .ok_or(CollectionAllocErr::CapacityOverflow)?; |
1243 | self.try_grow(new_cap) |
1244 | } |
1245 | |
1246 | /// Reserve the minimum capacity for `additional` more elements to be inserted. |
1247 | /// |
1248 | /// Panics if the new capacity overflows `usize`. |
1249 | pub fn reserve_exact(&mut self, additional: usize) { |
1250 | infallible(self.try_reserve_exact(additional)) |
1251 | } |
1252 | |
1253 | /// Reserve the minimum capacity for `additional` more elements to be inserted. |
1254 | pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { |
1255 | let (_, &mut len, cap) = self.triple_mut(); |
1256 | if cap - len >= additional { |
1257 | return Ok(()); |
1258 | } |
1259 | let new_cap = len |
1260 | .checked_add(additional) |
1261 | .ok_or(CollectionAllocErr::CapacityOverflow)?; |
1262 | self.try_grow(new_cap) |
1263 | } |
1264 | |
1265 | /// Shrink the capacity of the vector as much as possible. |
1266 | /// |
1267 | /// When possible, this will move data from an external heap buffer to the vector's inline |
1268 | /// storage. |
1269 | pub fn shrink_to_fit(&mut self) { |
1270 | if !self.spilled() { |
1271 | return; |
1272 | } |
1273 | let len = self.len(); |
1274 | if self.inline_size() >= len { |
1275 | unsafe { |
1276 | let (ptr, len) = self.data.heap(); |
1277 | self.data = SmallVecData::from_inline(MaybeUninit::uninit()); |
1278 | ptr::copy_nonoverlapping(ptr.as_ptr(), self.data.inline_mut().as_ptr(), len); |
1279 | deallocate(ptr.0, self.capacity); |
1280 | self.capacity = len; |
1281 | } |
1282 | } else if self.capacity() > len { |
1283 | self.grow(len); |
1284 | } |
1285 | } |
1286 | |
1287 | /// Shorten the vector, keeping the first `len` elements and dropping the rest. |
1288 | /// |
1289 | /// If `len` is greater than or equal to the vector's current length, this has no |
1290 | /// effect. |
1291 | /// |
1292 | /// This does not re-allocate. If you want the vector's capacity to shrink, call |
1293 | /// `shrink_to_fit` after truncating. |
1294 | pub fn truncate(&mut self, len: usize) { |
1295 | unsafe { |
1296 | let (ptr, len_ptr, _) = self.triple_mut(); |
1297 | let ptr = ptr.as_ptr(); |
1298 | while len < *len_ptr { |
1299 | let last_index = *len_ptr - 1; |
1300 | *len_ptr = last_index; |
1301 | ptr::drop_in_place(ptr.add(last_index)); |
1302 | } |
1303 | } |
1304 | } |
1305 | |
1306 | /// Extracts a slice containing the entire vector. |
1307 | /// |
1308 | /// Equivalent to `&s[..]`. |
1309 | pub fn as_slice(&self) -> &[A::Item] { |
1310 | self |
1311 | } |
1312 | |
1313 | /// Extracts a mutable slice of the entire vector. |
1314 | /// |
1315 | /// Equivalent to `&mut s[..]`. |
1316 | pub fn as_mut_slice(&mut self) -> &mut [A::Item] { |
1317 | self |
1318 | } |
1319 | |
1320 | /// Remove the element at position `index`, replacing it with the last element. |
1321 | /// |
1322 | /// This does not preserve ordering, but is O(1). |
1323 | /// |
1324 | /// Panics if `index` is out of bounds. |
1325 | #[inline ] |
1326 | pub fn swap_remove(&mut self, index: usize) -> A::Item { |
1327 | let len = self.len(); |
1328 | self.swap(len - 1, index); |
1329 | self.pop() |
1330 | .unwrap_or_else(|| unsafe { unreachable_unchecked() }) |
1331 | } |
1332 | |
1333 | /// Remove all elements from the vector. |
1334 | #[inline ] |
1335 | pub fn clear(&mut self) { |
1336 | self.truncate(0); |
1337 | } |
1338 | |
1339 | /// Remove and return the element at position `index`, shifting all elements after it to the |
1340 | /// left. |
1341 | /// |
1342 | /// Panics if `index` is out of bounds. |
1343 | pub fn remove(&mut self, index: usize) -> A::Item { |
1344 | unsafe { |
1345 | let (ptr, len_ptr, _) = self.triple_mut(); |
1346 | let len = *len_ptr; |
1347 | assert!(index < len); |
1348 | *len_ptr = len - 1; |
1349 | let ptr = ptr.as_ptr().add(index); |
1350 | let item = ptr::read(ptr); |
1351 | ptr::copy(ptr.add(1), ptr, len - index - 1); |
1352 | item |
1353 | } |
1354 | } |
1355 | |
1356 | /// Insert an element at position `index`, shifting all elements after it to the right. |
1357 | /// |
1358 | /// Panics if `index > len`. |
1359 | pub fn insert(&mut self, index: usize, element: A::Item) { |
1360 | self.reserve(1); |
1361 | |
1362 | unsafe { |
1363 | let (ptr, len_ptr, _) = self.triple_mut(); |
1364 | let mut ptr = ptr.as_ptr(); |
1365 | let len = *len_ptr; |
1366 | ptr = ptr.add(index); |
1367 | if index < len { |
1368 | ptr::copy(ptr, ptr.add(1), len - index); |
1369 | } else if index == len { |
1370 | // No elements need shifting. |
1371 | } else { |
1372 | panic!("index exceeds length" ); |
1373 | } |
1374 | *len_ptr = len + 1; |
1375 | ptr::write(ptr, element); |
1376 | } |
1377 | } |
1378 | |
1379 | /// Insert multiple elements at position `index`, shifting all following elements toward the |
1380 | /// back. |
1381 | pub fn insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I) { |
1382 | let mut iter = iterable.into_iter(); |
1383 | if index == self.len() { |
1384 | return self.extend(iter); |
1385 | } |
1386 | |
1387 | let (lower_size_bound, _) = iter.size_hint(); |
1388 | assert!(lower_size_bound <= core::isize::MAX as usize); // Ensure offset is indexable |
1389 | assert!(index + lower_size_bound >= index); // Protect against overflow |
1390 | |
1391 | let mut num_added = 0; |
1392 | let old_len = self.len(); |
1393 | assert!(index <= old_len); |
1394 | |
1395 | unsafe { |
1396 | // Reserve space for `lower_size_bound` elements. |
1397 | self.reserve(lower_size_bound); |
1398 | let start = self.as_mut_ptr(); |
1399 | let ptr = start.add(index); |
1400 | |
1401 | // Move the trailing elements. |
1402 | ptr::copy(ptr, ptr.add(lower_size_bound), old_len - index); |
1403 | |
1404 | // In case the iterator panics, don't double-drop the items we just copied above. |
1405 | self.set_len(0); |
1406 | let mut guard = DropOnPanic { |
1407 | start, |
1408 | skip: index..(index + lower_size_bound), |
1409 | len: old_len + lower_size_bound, |
1410 | }; |
1411 | |
1412 | // The set_len above invalidates the previous pointers, so we must re-create them. |
1413 | let start = self.as_mut_ptr(); |
1414 | let ptr = start.add(index); |
1415 | |
1416 | while num_added < lower_size_bound { |
1417 | let element = match iter.next() { |
1418 | Some(x) => x, |
1419 | None => break, |
1420 | }; |
1421 | let cur = ptr.add(num_added); |
1422 | ptr::write(cur, element); |
1423 | guard.skip.start += 1; |
1424 | num_added += 1; |
1425 | } |
1426 | |
1427 | if num_added < lower_size_bound { |
1428 | // Iterator provided fewer elements than the hint. Move the tail backward. |
1429 | ptr::copy( |
1430 | ptr.add(lower_size_bound), |
1431 | ptr.add(num_added), |
1432 | old_len - index, |
1433 | ); |
1434 | } |
1435 | // There are no more duplicate or uninitialized slots, so the guard is not needed. |
1436 | self.set_len(old_len + num_added); |
1437 | mem::forget(guard); |
1438 | } |
1439 | |
1440 | // Insert any remaining elements one-by-one. |
1441 | for element in iter { |
1442 | self.insert(index + num_added, element); |
1443 | num_added += 1; |
1444 | } |
1445 | |
1446 | struct DropOnPanic<T> { |
1447 | start: *mut T, |
1448 | skip: Range<usize>, // Space we copied-out-of, but haven't written-to yet. |
1449 | len: usize, |
1450 | } |
1451 | |
1452 | impl<T> Drop for DropOnPanic<T> { |
1453 | fn drop(&mut self) { |
1454 | for i in 0..self.len { |
1455 | if !self.skip.contains(&i) { |
1456 | unsafe { |
1457 | ptr::drop_in_place(self.start.add(i)); |
1458 | } |
1459 | } |
1460 | } |
1461 | } |
1462 | } |
1463 | } |
1464 | |
1465 | /// Convert a SmallVec to a Vec, without reallocating if the SmallVec has already spilled onto |
1466 | /// the heap. |
1467 | pub fn into_vec(mut self) -> Vec<A::Item> { |
1468 | if self.spilled() { |
1469 | unsafe { |
1470 | let (ptr, &mut len) = self.data.heap_mut(); |
1471 | let v = Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity); |
1472 | mem::forget(self); |
1473 | v |
1474 | } |
1475 | } else { |
1476 | self.into_iter().collect() |
1477 | } |
1478 | } |
1479 | |
1480 | /// Converts a `SmallVec` into a `Box<[T]>` without reallocating if the `SmallVec` has already spilled |
1481 | /// onto the heap. |
1482 | /// |
1483 | /// Note that this will drop any excess capacity. |
1484 | pub fn into_boxed_slice(self) -> Box<[A::Item]> { |
1485 | self.into_vec().into_boxed_slice() |
1486 | } |
1487 | |
1488 | /// Convert the SmallVec into an `A` if possible. Otherwise return `Err(Self)`. |
1489 | /// |
1490 | /// This method returns `Err(Self)` if the SmallVec is too short (and the `A` contains uninitialized elements), |
1491 | /// or if the SmallVec is too long (and all the elements were spilled to the heap). |
1492 | pub fn into_inner(self) -> Result<A, Self> { |
1493 | if self.spilled() || self.len() != A::size() { |
1494 | // Note: A::size, not Self::inline_capacity |
1495 | Err(self) |
1496 | } else { |
1497 | unsafe { |
1498 | let data = ptr::read(&self.data); |
1499 | mem::forget(self); |
1500 | Ok(data.into_inline().assume_init()) |
1501 | } |
1502 | } |
1503 | } |
1504 | |
1505 | /// Retains only the elements specified by the predicate. |
1506 | /// |
1507 | /// In other words, remove all elements `e` such that `f(&e)` returns `false`. |
1508 | /// This method operates in place and preserves the order of the retained |
1509 | /// elements. |
1510 | pub fn retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F) { |
1511 | let mut del = 0; |
1512 | let len = self.len(); |
1513 | for i in 0..len { |
1514 | if !f(&mut self[i]) { |
1515 | del += 1; |
1516 | } else if del > 0 { |
1517 | self.swap(i - del, i); |
1518 | } |
1519 | } |
1520 | self.truncate(len - del); |
1521 | } |
1522 | |
1523 | /// Retains only the elements specified by the predicate. |
1524 | /// |
1525 | /// This method is identical in behaviour to [`retain`]; it is included only |
1526 | /// to maintain api-compatability with `std::Vec`, where the methods are |
1527 | /// separate for historical reasons. |
1528 | pub fn retain_mut<F: FnMut(&mut A::Item) -> bool>(&mut self, f: F) { |
1529 | self.retain(f) |
1530 | } |
1531 | |
1532 | /// Removes consecutive duplicate elements. |
1533 | pub fn dedup(&mut self) |
1534 | where |
1535 | A::Item: PartialEq<A::Item>, |
1536 | { |
1537 | self.dedup_by(|a, b| a == b); |
1538 | } |
1539 | |
1540 | /// Removes consecutive duplicate elements using the given equality relation. |
1541 | pub fn dedup_by<F>(&mut self, mut same_bucket: F) |
1542 | where |
1543 | F: FnMut(&mut A::Item, &mut A::Item) -> bool, |
1544 | { |
1545 | // See the implementation of Vec::dedup_by in the |
1546 | // standard library for an explanation of this algorithm. |
1547 | let len = self.len(); |
1548 | if len <= 1 { |
1549 | return; |
1550 | } |
1551 | |
1552 | let ptr = self.as_mut_ptr(); |
1553 | let mut w: usize = 1; |
1554 | |
1555 | unsafe { |
1556 | for r in 1..len { |
1557 | let p_r = ptr.add(r); |
1558 | let p_wm1 = ptr.add(w - 1); |
1559 | if !same_bucket(&mut *p_r, &mut *p_wm1) { |
1560 | if r != w { |
1561 | let p_w = p_wm1.add(1); |
1562 | mem::swap(&mut *p_r, &mut *p_w); |
1563 | } |
1564 | w += 1; |
1565 | } |
1566 | } |
1567 | } |
1568 | |
1569 | self.truncate(w); |
1570 | } |
1571 | |
1572 | /// Removes consecutive elements that map to the same key. |
1573 | pub fn dedup_by_key<F, K>(&mut self, mut key: F) |
1574 | where |
1575 | F: FnMut(&mut A::Item) -> K, |
1576 | K: PartialEq<K>, |
1577 | { |
1578 | self.dedup_by(|a, b| key(a) == key(b)); |
1579 | } |
1580 | |
1581 | /// Resizes the `SmallVec` in-place so that `len` is equal to `new_len`. |
1582 | /// |
1583 | /// If `new_len` is greater than `len`, the `SmallVec` is extended by the difference, with each |
1584 | /// additional slot filled with the result of calling the closure `f`. The return values from `f` |
1585 | //// will end up in the `SmallVec` in the order they have been generated. |
1586 | /// |
1587 | /// If `new_len` is less than `len`, the `SmallVec` is simply truncated. |
1588 | /// |
1589 | /// This method uses a closure to create new values on every push. If you'd rather `Clone` a given |
1590 | /// value, use `resize`. If you want to use the `Default` trait to generate values, you can pass |
1591 | /// `Default::default()` as the second argument. |
1592 | /// |
1593 | /// Added for std::vec::Vec compatibility (added in Rust 1.33.0) |
1594 | /// |
1595 | /// ``` |
1596 | /// # use smallvec::{smallvec, SmallVec}; |
1597 | /// let mut vec : SmallVec<[_; 4]> = smallvec![1, 2, 3]; |
1598 | /// vec.resize_with(5, Default::default); |
1599 | /// assert_eq!(&*vec, &[1, 2, 3, 0, 0]); |
1600 | /// |
1601 | /// let mut vec : SmallVec<[_; 4]> = smallvec![]; |
1602 | /// let mut p = 1; |
1603 | /// vec.resize_with(4, || { p *= 2; p }); |
1604 | /// assert_eq!(&*vec, &[2, 4, 8, 16]); |
1605 | /// ``` |
1606 | pub fn resize_with<F>(&mut self, new_len: usize, f: F) |
1607 | where |
1608 | F: FnMut() -> A::Item, |
1609 | { |
1610 | let old_len = self.len(); |
1611 | if old_len < new_len { |
1612 | let mut f = f; |
1613 | let additional = new_len - old_len; |
1614 | self.reserve(additional); |
1615 | for _ in 0..additional { |
1616 | self.push(f()); |
1617 | } |
1618 | } else if old_len > new_len { |
1619 | self.truncate(new_len); |
1620 | } |
1621 | } |
1622 | |
1623 | /// Creates a `SmallVec` directly from the raw components of another |
1624 | /// `SmallVec`. |
1625 | /// |
1626 | /// # Safety |
1627 | /// |
1628 | /// This is highly unsafe, due to the number of invariants that aren't |
1629 | /// checked: |
1630 | /// |
1631 | /// * `ptr` needs to have been previously allocated via `SmallVec` for its |
1632 | /// spilled storage (at least, it's highly likely to be incorrect if it |
1633 | /// wasn't). |
1634 | /// * `ptr`'s `A::Item` type needs to be the same size and alignment that |
1635 | /// it was allocated with |
1636 | /// * `length` needs to be less than or equal to `capacity`. |
1637 | /// * `capacity` needs to be the capacity that the pointer was allocated |
1638 | /// with. |
1639 | /// |
1640 | /// Violating these may cause problems like corrupting the allocator's |
1641 | /// internal data structures. |
1642 | /// |
1643 | /// Additionally, `capacity` must be greater than the amount of inline |
1644 | /// storage `A` has; that is, the new `SmallVec` must need to spill over |
1645 | /// into heap allocated storage. This condition is asserted against. |
1646 | /// |
1647 | /// The ownership of `ptr` is effectively transferred to the |
1648 | /// `SmallVec` which may then deallocate, reallocate or change the |
1649 | /// contents of memory pointed to by the pointer at will. Ensure |
1650 | /// that nothing else uses the pointer after calling this |
1651 | /// function. |
1652 | /// |
1653 | /// # Examples |
1654 | /// |
1655 | /// ``` |
1656 | /// # #[macro_use ] extern crate smallvec; |
1657 | /// # use smallvec::SmallVec; |
1658 | /// use std::mem; |
1659 | /// use std::ptr; |
1660 | /// |
1661 | /// fn main() { |
1662 | /// let mut v: SmallVec<[_; 1]> = smallvec![1, 2, 3]; |
1663 | /// |
1664 | /// // Pull out the important parts of `v`. |
1665 | /// let p = v.as_mut_ptr(); |
1666 | /// let len = v.len(); |
1667 | /// let cap = v.capacity(); |
1668 | /// let spilled = v.spilled(); |
1669 | /// |
1670 | /// unsafe { |
1671 | /// // Forget all about `v`. The heap allocation that stored the |
1672 | /// // three values won't be deallocated. |
1673 | /// mem::forget(v); |
1674 | /// |
1675 | /// // Overwrite memory with [4, 5, 6]. |
1676 | /// // |
1677 | /// // This is only safe if `spilled` is true! Otherwise, we are |
1678 | /// // writing into the old `SmallVec`'s inline storage on the |
1679 | /// // stack. |
1680 | /// assert!(spilled); |
1681 | /// for i in 0..len { |
1682 | /// ptr::write(p.add(i), 4 + i); |
1683 | /// } |
1684 | /// |
1685 | /// // Put everything back together into a SmallVec with a different |
1686 | /// // amount of inline storage, but which is still less than `cap`. |
1687 | /// let rebuilt = SmallVec::<[_; 2]>::from_raw_parts(p, len, cap); |
1688 | /// assert_eq!(&*rebuilt, &[4, 5, 6]); |
1689 | /// } |
1690 | /// } |
1691 | #[inline ] |
1692 | pub unsafe fn from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A> { |
1693 | // SAFETY: We require caller to provide same ptr as we alloc |
1694 | // and we never alloc null pointer. |
1695 | let ptr = unsafe { |
1696 | debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer." ); |
1697 | NonNull::new_unchecked(ptr) |
1698 | }; |
1699 | assert!(capacity > Self::inline_capacity()); |
1700 | SmallVec { |
1701 | capacity, |
1702 | data: SmallVecData::from_heap(ptr, length), |
1703 | } |
1704 | } |
1705 | |
1706 | /// Returns a raw pointer to the vector's buffer. |
1707 | pub fn as_ptr(&self) -> *const A::Item { |
1708 | // We shadow the slice method of the same name to avoid going through |
1709 | // `deref`, which creates an intermediate reference that may place |
1710 | // additional safety constraints on the contents of the slice. |
1711 | self.triple().0.as_ptr() |
1712 | } |
1713 | |
1714 | /// Returns a raw mutable pointer to the vector's buffer. |
1715 | pub fn as_mut_ptr(&mut self) -> *mut A::Item { |
1716 | // We shadow the slice method of the same name to avoid going through |
1717 | // `deref_mut`, which creates an intermediate reference that may place |
1718 | // additional safety constraints on the contents of the slice. |
1719 | self.triple_mut().0.as_ptr() |
1720 | } |
1721 | } |
1722 | |
1723 | impl<A: Array> SmallVec<A> |
1724 | where |
1725 | A::Item: Copy, |
1726 | { |
1727 | /// Copy the elements from a slice into a new `SmallVec`. |
1728 | /// |
1729 | /// For slices of `Copy` types, this is more efficient than `SmallVec::from(slice)`. |
1730 | pub fn from_slice(slice: &[A::Item]) -> Self { |
1731 | let len = slice.len(); |
1732 | if len <= Self::inline_capacity() { |
1733 | SmallVec { |
1734 | capacity: len, |
1735 | data: SmallVecData::from_inline(unsafe { |
1736 | let mut data: MaybeUninit<A> = MaybeUninit::uninit(); |
1737 | ptr::copy_nonoverlapping( |
1738 | slice.as_ptr(), |
1739 | data.as_mut_ptr() as *mut A::Item, |
1740 | len, |
1741 | ); |
1742 | data |
1743 | }), |
1744 | } |
1745 | } else { |
1746 | let mut b = slice.to_vec(); |
1747 | let cap = b.capacity(); |
1748 | let ptr = NonNull::new(b.as_mut_ptr()).expect("Vec always contain non null pointers." ); |
1749 | mem::forget(b); |
1750 | SmallVec { |
1751 | capacity: cap, |
1752 | data: SmallVecData::from_heap(ptr, len), |
1753 | } |
1754 | } |
1755 | } |
1756 | |
1757 | /// Copy elements from a slice into the vector at position `index`, shifting any following |
1758 | /// elements toward the back. |
1759 | /// |
1760 | /// For slices of `Copy` types, this is more efficient than `insert`. |
1761 | pub fn insert_from_slice(&mut self, index: usize, slice: &[A::Item]) { |
1762 | self.reserve(slice.len()); |
1763 | |
1764 | let len = self.len(); |
1765 | assert!(index <= len); |
1766 | |
1767 | unsafe { |
1768 | let slice_ptr = slice.as_ptr(); |
1769 | let ptr = self.as_mut_ptr().add(index); |
1770 | ptr::copy(ptr, ptr.add(slice.len()), len - index); |
1771 | ptr::copy_nonoverlapping(slice_ptr, ptr, slice.len()); |
1772 | self.set_len(len + slice.len()); |
1773 | } |
1774 | } |
1775 | |
1776 | /// Copy elements from a slice and append them to the vector. |
1777 | /// |
1778 | /// For slices of `Copy` types, this is more efficient than `extend`. |
1779 | #[inline ] |
1780 | pub fn extend_from_slice(&mut self, slice: &[A::Item]) { |
1781 | let len = self.len(); |
1782 | self.insert_from_slice(len, slice); |
1783 | } |
1784 | } |
1785 | |
1786 | impl<A: Array> SmallVec<A> |
1787 | where |
1788 | A::Item: Clone, |
1789 | { |
1790 | /// Resizes the vector so that its length is equal to `len`. |
1791 | /// |
1792 | /// If `len` is less than the current length, the vector simply truncated. |
1793 | /// |
1794 | /// If `len` is greater than the current length, `value` is appended to the |
1795 | /// vector until its length equals `len`. |
1796 | pub fn resize(&mut self, len: usize, value: A::Item) { |
1797 | let old_len = self.len(); |
1798 | |
1799 | if len > old_len { |
1800 | self.extend(repeat(value).take(len - old_len)); |
1801 | } else { |
1802 | self.truncate(len); |
1803 | } |
1804 | } |
1805 | |
1806 | /// Creates a `SmallVec` with `n` copies of `elem`. |
1807 | /// ``` |
1808 | /// use smallvec::SmallVec; |
1809 | /// |
1810 | /// let v = SmallVec::<[char; 128]>::from_elem('d' , 2); |
1811 | /// assert_eq!(v, SmallVec::from_buf(['d' , 'd' ])); |
1812 | /// ``` |
1813 | pub fn from_elem(elem: A::Item, n: usize) -> Self { |
1814 | if n > Self::inline_capacity() { |
1815 | vec![elem; n].into() |
1816 | } else { |
1817 | let mut v = SmallVec::<A>::new(); |
1818 | unsafe { |
1819 | let (ptr, len_ptr, _) = v.triple_mut(); |
1820 | let ptr = ptr.as_ptr(); |
1821 | let mut local_len = SetLenOnDrop::new(len_ptr); |
1822 | |
1823 | for i in 0..n { |
1824 | ::core::ptr::write(ptr.add(i), elem.clone()); |
1825 | local_len.increment_len(1); |
1826 | } |
1827 | } |
1828 | v |
1829 | } |
1830 | } |
1831 | } |
1832 | |
1833 | impl<A: Array> ops::Deref for SmallVec<A> { |
1834 | type Target = [A::Item]; |
1835 | #[inline ] |
1836 | fn deref(&self) -> &[A::Item] { |
1837 | unsafe { |
1838 | let (ptr: ConstNonNull<::Item>, len: usize, _) = self.triple(); |
1839 | slice::from_raw_parts(data:ptr.as_ptr(), len) |
1840 | } |
1841 | } |
1842 | } |
1843 | |
1844 | impl<A: Array> ops::DerefMut for SmallVec<A> { |
1845 | #[inline ] |
1846 | fn deref_mut(&mut self) -> &mut [A::Item] { |
1847 | unsafe { |
1848 | let (ptr: NonNull<::Item>, &mut len: usize, _) = self.triple_mut(); |
1849 | slice::from_raw_parts_mut(data:ptr.as_ptr(), len) |
1850 | } |
1851 | } |
1852 | } |
1853 | |
1854 | impl<A: Array> AsRef<[A::Item]> for SmallVec<A> { |
1855 | #[inline ] |
1856 | fn as_ref(&self) -> &[A::Item] { |
1857 | self |
1858 | } |
1859 | } |
1860 | |
1861 | impl<A: Array> AsMut<[A::Item]> for SmallVec<A> { |
1862 | #[inline ] |
1863 | fn as_mut(&mut self) -> &mut [A::Item] { |
1864 | self |
1865 | } |
1866 | } |
1867 | |
1868 | impl<A: Array> Borrow<[A::Item]> for SmallVec<A> { |
1869 | #[inline ] |
1870 | fn borrow(&self) -> &[A::Item] { |
1871 | self |
1872 | } |
1873 | } |
1874 | |
1875 | impl<A: Array> BorrowMut<[A::Item]> for SmallVec<A> { |
1876 | #[inline ] |
1877 | fn borrow_mut(&mut self) -> &mut [A::Item] { |
1878 | self |
1879 | } |
1880 | } |
1881 | |
1882 | #[cfg (feature = "write" )] |
1883 | #[cfg_attr (docsrs, doc(cfg(feature = "write" )))] |
1884 | impl<A: Array<Item = u8>> io::Write for SmallVec<A> { |
1885 | #[inline ] |
1886 | fn write(&mut self, buf: &[u8]) -> io::Result<usize> { |
1887 | self.extend_from_slice(buf); |
1888 | Ok(buf.len()) |
1889 | } |
1890 | |
1891 | #[inline ] |
1892 | fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { |
1893 | self.extend_from_slice(buf); |
1894 | Ok(()) |
1895 | } |
1896 | |
1897 | #[inline ] |
1898 | fn flush(&mut self) -> io::Result<()> { |
1899 | Ok(()) |
1900 | } |
1901 | } |
1902 | |
1903 | #[cfg (feature = "serde" )] |
1904 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
1905 | impl<A: Array> Serialize for SmallVec<A> |
1906 | where |
1907 | A::Item: Serialize, |
1908 | { |
1909 | fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { |
1910 | let mut state = serializer.serialize_seq(Some(self.len()))?; |
1911 | for item in self { |
1912 | state.serialize_element(&item)?; |
1913 | } |
1914 | state.end() |
1915 | } |
1916 | } |
1917 | |
1918 | #[cfg (feature = "serde" )] |
1919 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
1920 | impl<'de, A: Array> Deserialize<'de> for SmallVec<A> |
1921 | where |
1922 | A::Item: Deserialize<'de>, |
1923 | { |
1924 | fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { |
1925 | deserializer.deserialize_seq(SmallVecVisitor { |
1926 | phantom: PhantomData, |
1927 | }) |
1928 | } |
1929 | } |
1930 | |
1931 | #[cfg (feature = "serde" )] |
1932 | struct SmallVecVisitor<A> { |
1933 | phantom: PhantomData<A>, |
1934 | } |
1935 | |
1936 | #[cfg (feature = "serde" )] |
1937 | impl<'de, A: Array> Visitor<'de> for SmallVecVisitor<A> |
1938 | where |
1939 | A::Item: Deserialize<'de>, |
1940 | { |
1941 | type Value = SmallVec<A>; |
1942 | |
1943 | fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { |
1944 | formatter.write_str("a sequence" ) |
1945 | } |
1946 | |
1947 | fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error> |
1948 | where |
1949 | B: SeqAccess<'de>, |
1950 | { |
1951 | use serde::de::Error; |
1952 | let len = seq.size_hint().unwrap_or(0); |
1953 | let mut values = SmallVec::new(); |
1954 | values.try_reserve(len).map_err(B::Error::custom)?; |
1955 | |
1956 | while let Some(value) = seq.next_element()? { |
1957 | values.push(value); |
1958 | } |
1959 | |
1960 | Ok(values) |
1961 | } |
1962 | } |
1963 | |
1964 | #[cfg (feature = "specialization" )] |
1965 | trait SpecFrom<A: Array, S> { |
1966 | fn spec_from(slice: S) -> SmallVec<A>; |
1967 | } |
1968 | |
1969 | #[cfg (feature = "specialization" )] |
1970 | mod specialization; |
1971 | |
1972 | #[cfg (feature = "arbitrary" )] |
1973 | mod arbitrary; |
1974 | |
1975 | #[cfg (feature = "specialization" )] |
1976 | impl<'a, A: Array> SpecFrom<A, &'a [A::Item]> for SmallVec<A> |
1977 | where |
1978 | A::Item: Copy, |
1979 | { |
1980 | #[inline ] |
1981 | fn spec_from(slice: &'a [A::Item]) -> SmallVec<A> { |
1982 | SmallVec::from_slice(slice) |
1983 | } |
1984 | } |
1985 | |
1986 | impl<'a, A: Array> From<&'a [A::Item]> for SmallVec<A> |
1987 | where |
1988 | A::Item: Clone, |
1989 | { |
1990 | #[cfg (not(feature = "specialization" ))] |
1991 | #[inline ] |
1992 | fn from(slice: &'a [A::Item]) -> SmallVec<A> { |
1993 | slice.iter().cloned().collect() |
1994 | } |
1995 | |
1996 | #[cfg (feature = "specialization" )] |
1997 | #[inline ] |
1998 | fn from(slice: &'a [A::Item]) -> SmallVec<A> { |
1999 | SmallVec::spec_from(slice) |
2000 | } |
2001 | } |
2002 | |
2003 | impl<A: Array> From<Vec<A::Item>> for SmallVec<A> { |
2004 | #[inline ] |
2005 | fn from(vec: Vec<A::Item>) -> SmallVec<A> { |
2006 | SmallVec::from_vec(vec) |
2007 | } |
2008 | } |
2009 | |
2010 | impl<A: Array> From<A> for SmallVec<A> { |
2011 | #[inline ] |
2012 | fn from(array: A) -> SmallVec<A> { |
2013 | SmallVec::from_buf(array) |
2014 | } |
2015 | } |
2016 | |
2017 | impl<A: Array, I: SliceIndex<[A::Item]>> ops::Index<I> for SmallVec<A> { |
2018 | type Output = I::Output; |
2019 | |
2020 | fn index(&self, index: I) -> &I::Output { |
2021 | &(**self)[index] |
2022 | } |
2023 | } |
2024 | |
2025 | impl<A: Array, I: SliceIndex<[A::Item]>> ops::IndexMut<I> for SmallVec<A> { |
2026 | fn index_mut(&mut self, index: I) -> &mut I::Output { |
2027 | &mut (&mut **self)[index] |
2028 | } |
2029 | } |
2030 | |
2031 | #[allow (deprecated)] |
2032 | impl<A: Array> ExtendFromSlice<A::Item> for SmallVec<A> |
2033 | where |
2034 | A::Item: Copy, |
2035 | { |
2036 | fn extend_from_slice(&mut self, other: &[A::Item]) { |
2037 | SmallVec::extend_from_slice(self, slice:other) |
2038 | } |
2039 | } |
2040 | |
2041 | impl<A: Array> FromIterator<A::Item> for SmallVec<A> { |
2042 | #[inline ] |
2043 | fn from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A> { |
2044 | let mut v: SmallVec = SmallVec::new(); |
2045 | v.extend(iter:iterable); |
2046 | v |
2047 | } |
2048 | } |
2049 | |
2050 | impl<A: Array> Extend<A::Item> for SmallVec<A> { |
2051 | fn extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I) { |
2052 | let mut iter = iterable.into_iter(); |
2053 | let (lower_size_bound, _) = iter.size_hint(); |
2054 | self.reserve(lower_size_bound); |
2055 | |
2056 | unsafe { |
2057 | let (ptr, len_ptr, cap) = self.triple_mut(); |
2058 | let ptr = ptr.as_ptr(); |
2059 | let mut len = SetLenOnDrop::new(len_ptr); |
2060 | while len.get() < cap { |
2061 | if let Some(out) = iter.next() { |
2062 | ptr::write(ptr.add(len.get()), out); |
2063 | len.increment_len(1); |
2064 | } else { |
2065 | return; |
2066 | } |
2067 | } |
2068 | } |
2069 | |
2070 | for elem in iter { |
2071 | self.push(elem); |
2072 | } |
2073 | } |
2074 | } |
2075 | |
2076 | impl<A: Array> fmt::Debug for SmallVec<A> |
2077 | where |
2078 | A::Item: fmt::Debug, |
2079 | { |
2080 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
2081 | f.debug_list().entries(self.iter()).finish() |
2082 | } |
2083 | } |
2084 | |
2085 | impl<A: Array> Default for SmallVec<A> { |
2086 | #[inline ] |
2087 | fn default() -> SmallVec<A> { |
2088 | SmallVec::new() |
2089 | } |
2090 | } |
2091 | |
2092 | #[cfg (feature = "may_dangle" )] |
2093 | unsafe impl<#[may_dangle ] A: Array> Drop for SmallVec<A> { |
2094 | fn drop(&mut self) { |
2095 | unsafe { |
2096 | if self.spilled() { |
2097 | let (ptr: NonNull<::Item>, &mut len: usize) = self.data.heap_mut(); |
2098 | Vec::from_raw_parts(ptr.as_ptr(), length:len, self.capacity); |
2099 | } else { |
2100 | ptr::drop_in_place(&mut self[..]); |
2101 | } |
2102 | } |
2103 | } |
2104 | } |
2105 | |
2106 | #[cfg (not(feature = "may_dangle" ))] |
2107 | impl<A: Array> Drop for SmallVec<A> { |
2108 | fn drop(&mut self) { |
2109 | unsafe { |
2110 | if self.spilled() { |
2111 | let (ptr, &mut len) = self.data.heap_mut(); |
2112 | drop(Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity)); |
2113 | } else { |
2114 | ptr::drop_in_place(&mut self[..]); |
2115 | } |
2116 | } |
2117 | } |
2118 | } |
2119 | |
2120 | impl<A: Array> Clone for SmallVec<A> |
2121 | where |
2122 | A::Item: Clone, |
2123 | { |
2124 | #[inline ] |
2125 | fn clone(&self) -> SmallVec<A> { |
2126 | SmallVec::from(self.as_slice()) |
2127 | } |
2128 | |
2129 | fn clone_from(&mut self, source: &Self) { |
2130 | // Inspired from `impl Clone for Vec`. |
2131 | |
2132 | // drop anything that will not be overwritten |
2133 | self.truncate(source.len()); |
2134 | |
2135 | // self.len <= other.len due to the truncate above, so the |
2136 | // slices here are always in-bounds. |
2137 | let (init: &[::Item], tail: &[::Item]) = source.split_at(self.len()); |
2138 | |
2139 | // reuse the contained values' allocations/resources. |
2140 | self.clone_from_slice(src:init); |
2141 | self.extend(iter:tail.iter().cloned()); |
2142 | } |
2143 | } |
2144 | |
2145 | impl<A: Array, B: Array> PartialEq<SmallVec<B>> for SmallVec<A> |
2146 | where |
2147 | A::Item: PartialEq<B::Item>, |
2148 | { |
2149 | #[inline ] |
2150 | fn eq(&self, other: &SmallVec<B>) -> bool { |
2151 | self[..] == other[..] |
2152 | } |
2153 | } |
2154 | |
2155 | impl<A: Array> Eq for SmallVec<A> where A::Item: Eq {} |
2156 | |
2157 | impl<A: Array> PartialOrd for SmallVec<A> |
2158 | where |
2159 | A::Item: PartialOrd, |
2160 | { |
2161 | #[inline ] |
2162 | fn partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering> { |
2163 | PartialOrd::partial_cmp(&**self, &**other) |
2164 | } |
2165 | } |
2166 | |
2167 | impl<A: Array> Ord for SmallVec<A> |
2168 | where |
2169 | A::Item: Ord, |
2170 | { |
2171 | #[inline ] |
2172 | fn cmp(&self, other: &SmallVec<A>) -> cmp::Ordering { |
2173 | Ord::cmp(&**self, &**other) |
2174 | } |
2175 | } |
2176 | |
2177 | impl<A: Array> Hash for SmallVec<A> |
2178 | where |
2179 | A::Item: Hash, |
2180 | { |
2181 | fn hash<H: Hasher>(&self, state: &mut H) { |
2182 | (**self).hash(state) |
2183 | } |
2184 | } |
2185 | |
2186 | unsafe impl<A: Array> Send for SmallVec<A> where A::Item: Send {} |
2187 | |
2188 | /// An iterator that consumes a `SmallVec` and yields its items by value. |
2189 | /// |
2190 | /// Returned from [`SmallVec::into_iter`][1]. |
2191 | /// |
2192 | /// [1]: struct.SmallVec.html#method.into_iter |
2193 | pub struct IntoIter<A: Array> { |
2194 | data: SmallVec<A>, |
2195 | current: usize, |
2196 | end: usize, |
2197 | } |
2198 | |
2199 | impl<A: Array> fmt::Debug for IntoIter<A> |
2200 | where |
2201 | A::Item: fmt::Debug, |
2202 | { |
2203 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
2204 | f.debug_tuple(name:"IntoIter" ).field(&self.as_slice()).finish() |
2205 | } |
2206 | } |
2207 | |
2208 | impl<A: Array + Clone> Clone for IntoIter<A> |
2209 | where |
2210 | A::Item: Clone, |
2211 | { |
2212 | fn clone(&self) -> IntoIter<A> { |
2213 | SmallVec::from(self.as_slice()).into_iter() |
2214 | } |
2215 | } |
2216 | |
2217 | impl<A: Array> Drop for IntoIter<A> { |
2218 | fn drop(&mut self) { |
2219 | for _ in self {} |
2220 | } |
2221 | } |
2222 | |
2223 | impl<A: Array> Iterator for IntoIter<A> { |
2224 | type Item = A::Item; |
2225 | |
2226 | #[inline ] |
2227 | fn next(&mut self) -> Option<A::Item> { |
2228 | if self.current == self.end { |
2229 | None |
2230 | } else { |
2231 | unsafe { |
2232 | let current: usize = self.current; |
2233 | self.current += 1; |
2234 | Some(ptr::read(self.data.as_ptr().add(count:current))) |
2235 | } |
2236 | } |
2237 | } |
2238 | |
2239 | #[inline ] |
2240 | fn size_hint(&self) -> (usize, Option<usize>) { |
2241 | let size: usize = self.end - self.current; |
2242 | (size, Some(size)) |
2243 | } |
2244 | } |
2245 | |
2246 | impl<A: Array> DoubleEndedIterator for IntoIter<A> { |
2247 | #[inline ] |
2248 | fn next_back(&mut self) -> Option<A::Item> { |
2249 | if self.current == self.end { |
2250 | None |
2251 | } else { |
2252 | unsafe { |
2253 | self.end -= 1; |
2254 | Some(ptr::read(self.data.as_ptr().add(self.end))) |
2255 | } |
2256 | } |
2257 | } |
2258 | } |
2259 | |
2260 | impl<A: Array> ExactSizeIterator for IntoIter<A> {} |
2261 | impl<A: Array> FusedIterator for IntoIter<A> {} |
2262 | |
2263 | impl<A: Array> IntoIter<A> { |
2264 | /// Returns the remaining items of this iterator as a slice. |
2265 | pub fn as_slice(&self) -> &[A::Item] { |
2266 | let len: usize = self.end - self.current; |
2267 | unsafe { core::slice::from_raw_parts(self.data.as_ptr().add(self.current), len) } |
2268 | } |
2269 | |
2270 | /// Returns the remaining items of this iterator as a mutable slice. |
2271 | pub fn as_mut_slice(&mut self) -> &mut [A::Item] { |
2272 | let len: usize = self.end - self.current; |
2273 | unsafe { core::slice::from_raw_parts_mut(self.data.as_mut_ptr().add(self.current), len) } |
2274 | } |
2275 | } |
2276 | |
2277 | impl<A: Array> IntoIterator for SmallVec<A> { |
2278 | type IntoIter = IntoIter<A>; |
2279 | type Item = A::Item; |
2280 | fn into_iter(mut self) -> Self::IntoIter { |
2281 | unsafe { |
2282 | // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements |
2283 | let len: usize = self.len(); |
2284 | self.set_len(new_len:0); |
2285 | IntoIter { |
2286 | data: self, |
2287 | current: 0, |
2288 | end: len, |
2289 | } |
2290 | } |
2291 | } |
2292 | } |
2293 | |
2294 | impl<'a, A: Array> IntoIterator for &'a SmallVec<A> { |
2295 | type IntoIter = slice::Iter<'a, A::Item>; |
2296 | type Item = &'a A::Item; |
2297 | fn into_iter(self) -> Self::IntoIter { |
2298 | self.iter() |
2299 | } |
2300 | } |
2301 | |
2302 | impl<'a, A: Array> IntoIterator for &'a mut SmallVec<A> { |
2303 | type IntoIter = slice::IterMut<'a, A::Item>; |
2304 | type Item = &'a mut A::Item; |
2305 | fn into_iter(self) -> Self::IntoIter { |
2306 | self.iter_mut() |
2307 | } |
2308 | } |
2309 | |
2310 | /// Types that can be used as the backing store for a SmallVec |
2311 | pub unsafe trait Array { |
2312 | /// The type of the array's elements. |
2313 | type Item; |
2314 | /// Returns the number of items the array can hold. |
2315 | fn size() -> usize; |
2316 | } |
2317 | |
2318 | /// Set the length of the vec when the `SetLenOnDrop` value goes out of scope. |
2319 | /// |
2320 | /// Copied from https://github.com/rust-lang/rust/pull/36355 |
2321 | struct SetLenOnDrop<'a> { |
2322 | len: &'a mut usize, |
2323 | local_len: usize, |
2324 | } |
2325 | |
2326 | impl<'a> SetLenOnDrop<'a> { |
2327 | #[inline ] |
2328 | fn new(len: &'a mut usize) -> Self { |
2329 | SetLenOnDrop { |
2330 | local_len: *len, |
2331 | len, |
2332 | } |
2333 | } |
2334 | |
2335 | #[inline ] |
2336 | fn get(&self) -> usize { |
2337 | self.local_len |
2338 | } |
2339 | |
2340 | #[inline ] |
2341 | fn increment_len(&mut self, increment: usize) { |
2342 | self.local_len += increment; |
2343 | } |
2344 | } |
2345 | |
2346 | impl<'a> Drop for SetLenOnDrop<'a> { |
2347 | #[inline ] |
2348 | fn drop(&mut self) { |
2349 | *self.len = self.local_len; |
2350 | } |
2351 | } |
2352 | |
2353 | #[cfg (feature = "const_new" )] |
2354 | impl<T, const N: usize> SmallVec<[T; N]> { |
2355 | /// Construct an empty vector. |
2356 | /// |
2357 | /// This is a `const` version of [`SmallVec::new`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays. |
2358 | #[cfg_attr (docsrs, doc(cfg(feature = "const_new" )))] |
2359 | #[inline ] |
2360 | pub const fn new_const() -> Self { |
2361 | SmallVec { |
2362 | capacity: 0, |
2363 | data: SmallVecData::from_const(MaybeUninit::uninit()), |
2364 | } |
2365 | } |
2366 | |
2367 | /// The array passed as an argument is moved to be an inline version of `SmallVec`. |
2368 | /// |
2369 | /// This is a `const` version of [`SmallVec::from_buf`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays. |
2370 | #[cfg_attr (docsrs, doc(cfg(feature = "const_new" )))] |
2371 | #[inline ] |
2372 | pub const fn from_const(items: [T; N]) -> Self { |
2373 | SmallVec { |
2374 | capacity: N, |
2375 | data: SmallVecData::from_const(MaybeUninit::new(items)), |
2376 | } |
2377 | } |
2378 | } |
2379 | |
2380 | #[cfg (all(feature = "const_generics" , not(doc)))] |
2381 | #[cfg_attr (docsrs, doc(cfg(feature = "const_generics" )))] |
2382 | unsafe impl<T, const N: usize> Array for [T; N] { |
2383 | type Item = T; |
2384 | #[inline ] |
2385 | fn size() -> usize { |
2386 | N |
2387 | } |
2388 | } |
2389 | |
2390 | #[cfg (any(not(feature = "const_generics" ), doc))] |
2391 | macro_rules! impl_array( |
2392 | ($($size:expr),+) => { |
2393 | $( |
2394 | unsafe impl<T> Array for [T; $size] { |
2395 | type Item = T; |
2396 | #[inline] |
2397 | fn size() -> usize { $size } |
2398 | } |
2399 | )+ |
2400 | } |
2401 | ); |
2402 | |
2403 | #[cfg (any(not(feature = "const_generics" ), doc))] |
2404 | impl_array!( |
2405 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, |
2406 | 26, 27, 28, 29, 30, 31, 32, 36, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x600, 0x800, 0x1000, |
2407 | 0x2000, 0x4000, 0x6000, 0x8000, 0x10000, 0x20000, 0x40000, 0x60000, 0x80000, 0x10_0000 |
2408 | ); |
2409 | |
2410 | /// Convenience trait for constructing a `SmallVec` |
2411 | pub trait ToSmallVec<A: Array> { |
2412 | /// Construct a new `SmallVec` from a slice. |
2413 | fn to_smallvec(&self) -> SmallVec<A>; |
2414 | } |
2415 | |
2416 | impl<A: Array> ToSmallVec<A> for [A::Item] |
2417 | where |
2418 | A::Item: Copy, |
2419 | { |
2420 | #[inline ] |
2421 | fn to_smallvec(&self) -> SmallVec<A> { |
2422 | SmallVec::from_slice(self) |
2423 | } |
2424 | } |
2425 | |
2426 | // Immutable counterpart for `NonNull<T>`. |
2427 | #[repr (transparent)] |
2428 | struct ConstNonNull<T>(NonNull<T>); |
2429 | |
2430 | impl<T> ConstNonNull<T> { |
2431 | #[inline ] |
2432 | fn new(ptr: *const T) -> Option<Self> { |
2433 | NonNull::new(ptr as *mut T).map(Self) |
2434 | } |
2435 | #[inline ] |
2436 | fn as_ptr(self) -> *const T { |
2437 | self.0.as_ptr() |
2438 | } |
2439 | } |
2440 | |
2441 | impl<T> Clone for ConstNonNull<T> { |
2442 | #[inline ] |
2443 | fn clone(&self) -> Self { |
2444 | *self |
2445 | } |
2446 | } |
2447 | |
2448 | impl<T> Copy for ConstNonNull<T> {} |
2449 | |