1use core::iter::TrustedLen;
2use core::slice;
3
4use super::VecDeque;
5use crate::alloc::Allocator;
6#[cfg(not(test))]
7use crate::vec;
8
9// Specialization trait used for VecDeque::extend
10pub(super) trait SpecExtend<T, I> {
11 #[track_caller]
12 fn spec_extend(&mut self, iter: I);
13}
14
15impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A>
16where
17 I: Iterator<Item = T>,
18{
19 #[track_caller]
20 default fn spec_extend(&mut self, mut iter: I) {
21 // This function should be the moral equivalent of:
22 //
23 // for item in iter {
24 // self.push_back(item);
25 // }
26
27 while let Some(element) = iter.next() {
28 let (lower, _) = iter.size_hint();
29 self.reserve(lower.saturating_add(1));
30
31 // SAFETY: We just reserved space for at least one element.
32 unsafe { self.push_unchecked(element) };
33
34 // Inner loop to avoid repeatedly calling `reserve`.
35 while self.len < self.capacity() {
36 let Some(element) = iter.next() else {
37 return;
38 };
39 // SAFETY: The loop condition guarantees that `self.len() < self.capacity()`.
40 unsafe { self.push_unchecked(element) };
41 }
42 }
43 }
44}
45
46impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A>
47where
48 I: TrustedLen<Item = T>,
49{
50 #[track_caller]
51 default fn spec_extend(&mut self, iter: I) {
52 // This is the case for a TrustedLen iterator.
53 let (low, high) = iter.size_hint();
54 if let Some(additional) = high {
55 debug_assert_eq!(
56 low,
57 additional,
58 "TrustedLen iterator's size hint is not exact: {:?}",
59 (low, high)
60 );
61 self.reserve(additional);
62
63 let written = unsafe {
64 self.write_iter_wrapping(self.to_physical_idx(self.len), iter, additional)
65 };
66
67 debug_assert_eq!(
68 additional, written,
69 "The number of items written to VecDeque doesn't match the TrustedLen size hint"
70 );
71 } else {
72 // Per TrustedLen contract a `None` upper bound means that the iterator length
73 // truly exceeds usize::MAX, which would eventually lead to a capacity overflow anyway.
74 // Since the other branch already panics eagerly (via `reserve()`) we do the same here.
75 // This avoids additional codegen for a fallback code path which would eventually
76 // panic anyway.
77 panic!("capacity overflow");
78 }
79 }
80}
81
82#[cfg(not(test))]
83impl<T, A: Allocator> SpecExtend<T, vec::IntoIter<T>> for VecDeque<T, A> {
84 #[track_caller]
85 fn spec_extend(&mut self, mut iterator: vec::IntoIter<T>) {
86 let slice: &[T] = iterator.as_slice();
87 self.reserve(additional:slice.len());
88
89 unsafe {
90 self.copy_slice(self.to_physical_idx(self.len), src:slice);
91 self.len += slice.len();
92 }
93 iterator.forget_remaining_elements();
94 }
95}
96
97impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for VecDeque<T, A>
98where
99 I: Iterator<Item = &'a T>,
100 T: Copy,
101{
102 #[track_caller]
103 default fn spec_extend(&mut self, iterator: I) {
104 self.spec_extend(iter:iterator.copied())
105 }
106}
107
108impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque<T, A>
109where
110 T: Copy,
111{
112 #[track_caller]
113 fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
114 let slice: &[T] = iterator.as_slice();
115 self.reserve(additional:slice.len());
116
117 unsafe {
118 self.copy_slice(self.to_physical_idx(self.len), src:slice);
119 self.len += slice.len();
120 }
121 }
122}
123

Provided by KDAB

Privacy Policy