1 | #![cfg_attr (not(feature = "std" ), no_std)] |
2 | #![cfg_attr (docsrs, feature(doc_cfg))] |
3 | |
4 | //! # aligned-vec |
5 | //! |
6 | //! This crate provides the `AVec<T>` and `ABox<T>` types, which are intended to have a similar API |
7 | //! to `Vec<T>` and `Box<T>`, but align the data they contain to a runtime alignment value. |
8 | //! |
9 | //! This is useful for situations where the alignment of the data matters, such as when working with |
10 | //! numerical data that can get performance benefits from being aligned to a SIMD-compatible memory address. |
11 | //! |
12 | //! # Features |
13 | //! |
14 | //! - `std` (default feature): Links this crate to the `std-crate` instead of the `core-crate`. |
15 | //! - `serde`: Implements serialization and deserialization features for `ABox` and `AVec`. |
16 | |
17 | use core::{ |
18 | fmt::Debug, |
19 | marker::PhantomData, |
20 | mem::{align_of, size_of, ManuallyDrop}, |
21 | ops::{Deref, DerefMut}, |
22 | ptr::{null_mut, NonNull}, |
23 | }; |
24 | use raw::ARawVec; |
25 | |
26 | mod raw; |
27 | extern crate alloc; |
28 | |
29 | // https://rust-lang.github.io/hashbrown/src/crossbeam_utils/cache_padded.rs.html#128-130 |
30 | pub const CACHELINE_ALIGN: usize = { |
31 | #[cfg (any( |
32 | target_arch = "x86_64" , |
33 | target_arch = "aarch64" , |
34 | target_arch = "powerpc64" , |
35 | ))] |
36 | { |
37 | 128 |
38 | } |
39 | #[cfg (any( |
40 | target_arch = "arm" , |
41 | target_arch = "mips" , |
42 | target_arch = "mips64" , |
43 | target_arch = "riscv64" , |
44 | ))] |
45 | { |
46 | 32 |
47 | } |
48 | #[cfg (target_arch = "s390x" )] |
49 | { |
50 | 256 |
51 | } |
52 | #[cfg (not(any( |
53 | target_arch = "x86_64" , |
54 | target_arch = "aarch64" , |
55 | target_arch = "powerpc64" , |
56 | target_arch = "arm" , |
57 | target_arch = "mips" , |
58 | target_arch = "mips64" , |
59 | target_arch = "riscv64" , |
60 | target_arch = "s390x" , |
61 | )))] |
62 | { |
63 | 64 |
64 | } |
65 | }; |
66 | |
67 | mod private { |
68 | pub trait Seal {} |
69 | } |
70 | |
71 | /// Trait for types that wrap an alignment value. |
72 | pub trait Alignment: Copy + private::Seal { |
73 | /// Takes an alignment value and a minimum valid alignment, |
74 | /// and returns an alignment wrapper that contains a power of two alignment that is greater |
75 | /// than `minimum_align`, and if possible, greater than `align`. |
76 | #[must_use ] |
77 | fn new(align: usize, minimum_align: usize) -> Self; |
78 | /// Takes a minimum valid alignment, |
79 | /// and returns an alignment wrapper that contains a power of two alignment that is greater |
80 | /// than `minimum_align`, and if possible, greater than the contained value. |
81 | #[must_use ] |
82 | fn alignment(self, minimum_align: usize) -> usize; |
83 | } |
84 | |
85 | /// Type wrapping a runtime alignment value. |
86 | #[derive (Copy, Clone)] |
87 | pub struct RuntimeAlign { |
88 | align: usize, |
89 | } |
90 | |
91 | /// Type wrapping a compile-time alignment value. |
92 | #[derive (Copy, Clone)] |
93 | pub struct ConstAlign<const ALIGN: usize>; |
94 | |
95 | impl private::Seal for RuntimeAlign {} |
96 | impl<const ALIGN: usize> private::Seal for ConstAlign<ALIGN> {} |
97 | |
98 | impl Alignment for RuntimeAlign { |
99 | #[inline ] |
100 | fn new(align: usize, minimum_align: usize) -> Self { |
101 | RuntimeAlign { |
102 | align: fix_alignment(align, base_align:minimum_align), |
103 | } |
104 | } |
105 | |
106 | #[inline ] |
107 | fn alignment(self, minimum_align: usize) -> usize { |
108 | let _ = minimum_align; |
109 | self.align |
110 | } |
111 | } |
112 | impl<const ALIGN: usize> Alignment for ConstAlign<ALIGN> { |
113 | #[inline ] |
114 | fn new(align: usize, minimum_align: usize) -> Self { |
115 | let _ = align; |
116 | let _ = minimum_align; |
117 | ConstAlign::<ALIGN> |
118 | } |
119 | |
120 | #[inline ] |
121 | fn alignment(self, minimum_align: usize) -> usize { |
122 | fix_alignment(ALIGN, base_align:minimum_align) |
123 | } |
124 | } |
125 | |
126 | /// Aligned vector. See [`Vec`] for more info. |
127 | pub struct AVec<T, A: Alignment = ConstAlign<CACHELINE_ALIGN>> { |
128 | buf: ARawVec<T, A>, |
129 | len: usize, |
130 | } |
131 | |
132 | /// Aligned box. See [`Box`] for more info. |
133 | pub struct ABox<T: ?Sized, A: Alignment = ConstAlign<CACHELINE_ALIGN>> { |
134 | ptr: NonNull<T>, |
135 | align: A, |
136 | _marker: PhantomData<T>, |
137 | } |
138 | |
139 | impl<T: ?Sized, A: Alignment> Deref for ABox<T, A> { |
140 | type Target = T; |
141 | |
142 | #[inline ] |
143 | fn deref(&self) -> &Self::Target { |
144 | unsafe { &*self.ptr.as_ptr() } |
145 | } |
146 | } |
147 | |
148 | impl<T: ?Sized, A: Alignment> DerefMut for ABox<T, A> { |
149 | #[inline ] |
150 | fn deref_mut(&mut self) -> &mut Self::Target { |
151 | unsafe { &mut *self.ptr.as_ptr() } |
152 | } |
153 | } |
154 | |
155 | impl<T: ?Sized, A: Alignment> AsRef<T> for ABox<T, A> { |
156 | #[inline ] |
157 | fn as_ref(&self) -> &T { |
158 | &**self |
159 | } |
160 | } |
161 | |
162 | impl<T: ?Sized, A: Alignment> AsMut<T> for ABox<T, A> { |
163 | #[inline ] |
164 | fn as_mut(&mut self) -> &mut T { |
165 | &mut **self |
166 | } |
167 | } |
168 | |
169 | struct AllocDrop { |
170 | ptr: *mut u8, |
171 | size_bytes: usize, |
172 | align: usize, |
173 | } |
174 | impl Drop for AllocDrop { |
175 | #[inline ] |
176 | fn drop(&mut self) { |
177 | if self.size_bytes > 0 { |
178 | unsafe { |
179 | alloc::alloc::dealloc( |
180 | self.ptr, |
181 | alloc::alloc::Layout::from_size_align_unchecked(self.size_bytes, self.align), |
182 | ) |
183 | } |
184 | } |
185 | } |
186 | } |
187 | |
188 | impl<T: ?Sized, A: Alignment> Drop for ABox<T, A> { |
189 | #[inline ] |
190 | fn drop(&mut self) { |
191 | let size_bytes: usize = core::mem::size_of_val(self.deref_mut()); |
192 | let align_bytes: usize = core::mem::align_of_val(self.deref_mut()); |
193 | let ptr: *mut T = self.deref_mut() as *mut T; |
194 | let _alloc_drop: AllocDrop = AllocDrop { |
195 | ptr: ptr as *mut u8, |
196 | size_bytes, |
197 | align: self.align.alignment(minimum_align:align_bytes), |
198 | }; |
199 | unsafe { ptr.drop_in_place() }; |
200 | } |
201 | } |
202 | |
203 | impl<T, A: Alignment> Deref for AVec<T, A> { |
204 | type Target = [T]; |
205 | |
206 | #[inline ] |
207 | fn deref(&self) -> &Self::Target { |
208 | self.as_slice() |
209 | } |
210 | } |
211 | impl<T, A: Alignment> DerefMut for AVec<T, A> { |
212 | #[inline ] |
213 | fn deref_mut(&mut self) -> &mut Self::Target { |
214 | self.as_mut_slice() |
215 | } |
216 | } |
217 | |
218 | impl<T, A: Alignment> AsRef<[T]> for AVec<T, A> { |
219 | #[inline ] |
220 | fn as_ref(&self) -> &[T] { |
221 | &**self |
222 | } |
223 | } |
224 | |
225 | impl<T, A: Alignment> AsMut<[T]> for AVec<T, A> { |
226 | #[inline ] |
227 | fn as_mut(&mut self) -> &mut [T] { |
228 | &mut **self |
229 | } |
230 | } |
231 | |
232 | impl<T, A: Alignment> ABox<T, A> { |
233 | /// Creates a new [`ABox<T>`] containing `value` at an address aligned to `align` bytes. |
234 | #[inline ] |
235 | pub fn new(align: usize, value: T) -> Self { |
236 | let align: usize = A::new(align, align_of::<T>()).alignment(minimum_align:align_of::<T>()); |
237 | let ptr: *mut T = if size_of::<T>() == 0 { |
238 | null_mut::<u8>().wrapping_add(count:align) as *mut T |
239 | } else { |
240 | unsafe { raw::with_capacity_unchecked(capacity:1, align, size_of::<T>()) as *mut T } |
241 | }; |
242 | unsafe { ptr.write(val:value) }; |
243 | unsafe { Self::from_raw_parts(align, ptr) } |
244 | } |
245 | |
246 | /// Returns the alignment of the box. |
247 | #[inline ] |
248 | pub fn alignment(&self) -> usize { |
249 | self.align.alignment(minimum_align:align_of::<T>()) |
250 | } |
251 | } |
252 | |
253 | impl<T: ?Sized, A: Alignment> ABox<T, A> { |
254 | /// Creates a new [`ABox<T>`] from its raw parts. |
255 | /// |
256 | /// # Safety |
257 | /// |
258 | /// The arguments to this function must be acquired from a previous call to |
259 | /// [`Self::into_raw_parts`]. |
260 | #[inline ] |
261 | pub unsafe fn from_raw_parts(align: usize, ptr: *mut T) -> Self { |
262 | Self { |
263 | ptr: NonNull::<T>::new_unchecked(ptr), |
264 | align: A::new(align, minimum_align:core::mem::align_of_val(&*ptr)), |
265 | _marker: PhantomData, |
266 | } |
267 | } |
268 | |
269 | /// Decomposes a [`ABox<T>`] into its raw parts: `(ptr, alignment)`. |
270 | #[inline ] |
271 | pub fn into_raw_parts(self) -> (*mut T, usize) { |
272 | let this: ManuallyDrop> = ManuallyDrop::new(self); |
273 | let align: usize = core::mem::align_of_val(unsafe { &*this.ptr.as_ptr() }); |
274 | (this.ptr.as_ptr(), this.align.alignment(align)) |
275 | } |
276 | } |
277 | |
278 | impl<T, A: Alignment> Drop for AVec<T, A> { |
279 | #[inline ] |
280 | fn drop(&mut self) { |
281 | // SAFETY: dropping initialized elements |
282 | unsafe { (self.as_mut_slice() as *mut [T]).drop_in_place() } |
283 | } |
284 | } |
285 | |
286 | #[inline ] |
287 | fn fix_alignment(align: usize, base_align: usize) -> usize { |
288 | alignusize |
289 | .checked_next_power_of_two() |
290 | .unwrap_or(default:0) |
291 | .max(base_align) |
292 | } |
293 | |
294 | impl<T, A: Alignment> AVec<T, A> { |
295 | /// Returns a new [`AVec<T>`] with the provided alignment. |
296 | #[inline ] |
297 | #[must_use ] |
298 | pub fn new(align: usize) -> Self { |
299 | unsafe { |
300 | Self { |
301 | buf: ARawVec::new_unchecked( |
302 | A::new(align, align_of::<T>()).alignment(align_of::<T>()), |
303 | ), |
304 | len: 0, |
305 | } |
306 | } |
307 | } |
308 | |
309 | /// Creates a new empty vector with enough capacity for at least `capacity` elements to |
310 | /// be inserted in the vector. If `capacity` is 0, the vector will not allocate. |
311 | /// |
312 | /// # Panics |
313 | /// |
314 | /// Panics if the capacity exceeds `isize::MAX` bytes. |
315 | #[inline ] |
316 | #[must_use ] |
317 | pub fn with_capacity(align: usize, capacity: usize) -> Self { |
318 | unsafe { |
319 | Self { |
320 | buf: ARawVec::with_capacity_unchecked( |
321 | capacity, |
322 | A::new(align, align_of::<T>()).alignment(align_of::<T>()), |
323 | ), |
324 | len: 0, |
325 | } |
326 | } |
327 | } |
328 | |
329 | /// Returns a new [`AVec<T>`] from its raw parts. |
330 | /// |
331 | /// # Safety |
332 | /// |
333 | /// The arguments to this function must be acquired from a previous call to |
334 | /// [`Self::into_raw_parts`]. |
335 | #[inline ] |
336 | #[must_use ] |
337 | pub unsafe fn from_raw_parts(ptr: *mut T, align: usize, len: usize, capacity: usize) -> Self { |
338 | Self { |
339 | buf: ARawVec::from_raw_parts(ptr, capacity, align), |
340 | len, |
341 | } |
342 | } |
343 | |
344 | /// Decomposes an [`AVec<T>`] into its raw parts: `(ptr, alignment, length, capacity)`. |
345 | #[inline ] |
346 | pub fn into_raw_parts(self) -> (*mut T, usize, usize, usize) { |
347 | let mut this = ManuallyDrop::new(self); |
348 | let len = this.len(); |
349 | let cap = this.capacity(); |
350 | let align = this.alignment(); |
351 | let ptr = this.as_mut_ptr(); |
352 | (ptr, align, len, cap) |
353 | } |
354 | |
355 | /// Returns the length of the vector. |
356 | #[inline ] |
357 | #[must_use ] |
358 | pub fn len(&self) -> usize { |
359 | self.len |
360 | } |
361 | |
362 | /// Returns `true` if the vector's length is equal to `0`, and false otherwise. |
363 | #[inline ] |
364 | #[must_use ] |
365 | pub fn is_empty(&self) -> bool { |
366 | self.len() == 0 |
367 | } |
368 | |
369 | /// Returns the number of elements the vector can hold without needing to reallocate. |
370 | #[inline ] |
371 | #[must_use ] |
372 | pub fn capacity(&self) -> usize { |
373 | self.buf.capacity() |
374 | } |
375 | |
376 | /// Reserves enough capacity for at least `additional` more elements to be inserted in the |
377 | /// vector. After this call to `reserve`, capacity will be greater than or equal to `self.len() + additional`. |
378 | /// Does nothing if the capacity is already sufficient. |
379 | /// |
380 | /// # Panics |
381 | /// |
382 | /// Panics if the new capacity exceeds `isize::MAX` bytes. |
383 | #[inline ] |
384 | pub fn reserve(&mut self, additional: usize) { |
385 | if additional > self.capacity().wrapping_sub(self.len) { |
386 | unsafe { self.buf.grow_amortized(self.len, additional) }; |
387 | } |
388 | } |
389 | |
390 | /// Reserves enough capacity for exactly `additional` more elements to be inserted in the |
391 | /// vector. After this call to `reserve`, capacity will be greater than or equal to `self.len() + additional`. |
392 | /// Does nothing if the capacity is already sufficient. |
393 | /// |
394 | /// # Panics |
395 | /// |
396 | /// Panics if the new capacity exceeds `isize::MAX` bytes. |
397 | #[inline ] |
398 | pub fn reserve_exact(&mut self, additional: usize) { |
399 | if additional > self.capacity().wrapping_sub(self.len) { |
400 | unsafe { self.buf.grow_exact(self.len, additional) }; |
401 | } |
402 | } |
403 | |
404 | /// Returns the alignment of the vector. |
405 | #[inline ] |
406 | #[must_use ] |
407 | pub fn alignment(&self) -> usize { |
408 | self.buf.align() |
409 | } |
410 | |
411 | /// Returns a pointer to the objects held by the vector. |
412 | #[inline ] |
413 | #[must_use ] |
414 | pub fn as_ptr(&self) -> *const T { |
415 | self.buf.as_ptr() |
416 | } |
417 | |
418 | /// Returns a mutable pointer to the objects held by the vector. |
419 | #[inline ] |
420 | #[must_use ] |
421 | pub fn as_mut_ptr(&mut self) -> *mut T { |
422 | self.buf.as_mut_ptr() |
423 | } |
424 | |
425 | /// Returns a reference to a slice over the objects held by the vector. |
426 | #[inline ] |
427 | #[must_use ] |
428 | pub fn as_slice(&self) -> &[T] { |
429 | let len = self.len(); |
430 | let ptr = self.as_ptr(); |
431 | |
432 | // ptr points to `len` initialized elements and is properly aligned since |
433 | // self.align is at least `align_of::<T>()` |
434 | unsafe { core::slice::from_raw_parts(ptr, len) } |
435 | } |
436 | |
437 | /// Returns a mutable reference to a slice over the objects held by the vector. |
438 | #[inline ] |
439 | #[must_use ] |
440 | pub fn as_mut_slice(&mut self) -> &mut [T] { |
441 | let len = self.len(); |
442 | let ptr = self.as_mut_ptr(); |
443 | |
444 | // ptr points to `len` initialized elements and is properly aligned since |
445 | // self.align is at least `align_of::<T>()` |
446 | unsafe { core::slice::from_raw_parts_mut(ptr, len) } |
447 | } |
448 | |
449 | /// Push the given value to the end of the vector, reallocating if needed. |
450 | #[inline ] |
451 | pub fn push(&mut self, value: T) { |
452 | if self.len == self.capacity() { |
453 | unsafe { self.buf.grow_amortized(self.len, 1) }; |
454 | } |
455 | |
456 | // SAFETY: self.capacity is greater than self.len so the write is valid |
457 | unsafe { |
458 | let past_the_end = self.as_mut_ptr().add(self.len); |
459 | past_the_end.write(value); |
460 | self.len += 1; |
461 | } |
462 | } |
463 | |
464 | /// Remove the last value from the vector if it exists, otherwise returns `None`. |
465 | #[inline ] |
466 | pub fn pop(&mut self) -> Option<T> { |
467 | if self.len == 0 { |
468 | None |
469 | } else { |
470 | self.len -= 1; |
471 | // SAFETY: the len was greater than one so we had one valid element at the last address |
472 | Some(unsafe { self.as_mut_ptr().add(self.len()).read() }) |
473 | } |
474 | } |
475 | |
476 | /// Shrinks the capacity of the vector with a lower bound. |
477 | /// The capacity will remain at least as large as both the length and the supplied value. |
478 | /// If the current capacity is less than the lower limit, this is a no-op. |
479 | #[inline ] |
480 | pub fn shrink_to(&mut self, min_capacity: usize) { |
481 | let min_capacity = min_capacity.max(self.len()); |
482 | if self.capacity() > min_capacity { |
483 | unsafe { self.buf.shrink_to(min_capacity) }; |
484 | } |
485 | } |
486 | |
487 | /// Shrinks the capacity of the vector as much as possible without dropping any elements. |
488 | #[inline ] |
489 | pub fn shrink_to_fit(&mut self) { |
490 | if self.capacity() > self.len { |
491 | unsafe { self.buf.shrink_to(self.len) }; |
492 | } |
493 | } |
494 | |
495 | /// Drops the last elements of the vector until its length is equal to `len`. |
496 | /// If `len` is greater than or equal to `self.len()`, this is a no-op. |
497 | #[inline ] |
498 | pub fn truncate(&mut self, len: usize) { |
499 | if len < self.len { |
500 | let old_len = self.len; |
501 | self.len = len; |
502 | unsafe { |
503 | let ptr = self.as_mut_ptr(); |
504 | core::ptr::slice_from_raw_parts_mut(ptr.add(len), old_len - len).drop_in_place() |
505 | } |
506 | } |
507 | } |
508 | |
509 | /// Drops the all the elements of the vector, setting its length to `0`. |
510 | #[inline ] |
511 | pub fn clear(&mut self) { |
512 | let old_len = self.len; |
513 | self.len = 0; |
514 | unsafe { |
515 | let ptr = self.as_mut_ptr(); |
516 | core::ptr::slice_from_raw_parts_mut(ptr, old_len).drop_in_place() |
517 | } |
518 | } |
519 | |
520 | /// Converts the vector into [`ABox<T>`]. |
521 | /// This will drop any excess capacity. |
522 | #[inline ] |
523 | pub fn into_boxed_slice(self) -> ABox<[T], A> { |
524 | let mut this = self; |
525 | this.shrink_to_fit(); |
526 | let (ptr, align, len, _) = this.into_raw_parts(); |
527 | unsafe { |
528 | ABox::<[T], A>::from_raw_parts(align, core::ptr::slice_from_raw_parts_mut(ptr, len)) |
529 | } |
530 | } |
531 | |
532 | /// Collects an iterator into an [`AVec<T>`] with the provided alignment. |
533 | #[inline ] |
534 | pub fn from_iter<I: IntoIterator<Item = T>>(align: usize, iter: I) -> Self { |
535 | Self::from_iter_impl(iter.into_iter(), align) |
536 | } |
537 | |
538 | /// Collects a slice into an [`AVec<T>`] with the provided alignment. |
539 | #[inline ] |
540 | pub fn from_slice(align: usize, slice: &[T]) -> Self |
541 | where |
542 | T: Clone, |
543 | { |
544 | let len = slice.len(); |
545 | let mut vec = AVec::with_capacity(align, len); |
546 | { |
547 | let len = &mut vec.len; |
548 | let ptr: *mut T = vec.buf.ptr.as_ptr(); |
549 | |
550 | for (i, item) in slice.iter().enumerate() { |
551 | unsafe { ptr.add(i).write(item.clone()) }; |
552 | *len += 1; |
553 | } |
554 | } |
555 | vec |
556 | } |
557 | |
558 | fn from_iter_impl<I: Iterator<Item = T>>(mut iter: I, align: usize) -> Self { |
559 | let (lower_bound, upper_bound) = iter.size_hint(); |
560 | let mut this = Self::with_capacity(align, lower_bound); |
561 | |
562 | if upper_bound == Some(lower_bound) { |
563 | let len = &mut this.len; |
564 | let ptr = this.buf.ptr.as_ptr(); |
565 | |
566 | let first_chunk = iter.take(lower_bound); |
567 | first_chunk.enumerate().for_each(|(i, item)| { |
568 | unsafe { ptr.add(i).write(item) }; |
569 | *len += 1; |
570 | }); |
571 | } else { |
572 | let len = &mut this.len; |
573 | let ptr = this.buf.ptr.as_ptr(); |
574 | |
575 | let first_chunk = (&mut iter).take(lower_bound); |
576 | first_chunk.enumerate().for_each(|(i, item)| { |
577 | unsafe { ptr.add(i).write(item) }; |
578 | *len += 1; |
579 | }); |
580 | iter.for_each(|item| { |
581 | this.push(item); |
582 | }); |
583 | } |
584 | |
585 | this |
586 | } |
587 | |
588 | #[inline (always)] |
589 | #[doc (hidden)] |
590 | pub fn __from_elem(align: usize, elem: T, count: usize) -> Self |
591 | where |
592 | T: Clone, |
593 | { |
594 | Self::from_iter(align, core::iter::repeat(elem).take(count)) |
595 | } |
596 | |
597 | #[inline (always)] |
598 | #[doc (hidden)] |
599 | /// this is unsafe do not call this in user code |
600 | pub fn __copy_from_ptr(align: usize, src: *const T, len: usize) -> Self { |
601 | let mut v = Self::with_capacity(align, len); |
602 | let dst = v.as_mut_ptr(); |
603 | unsafe { core::ptr::copy_nonoverlapping(src, dst, len) }; |
604 | v.len = len; |
605 | v |
606 | } |
607 | } |
608 | |
609 | impl<T: Debug, A: Alignment> Debug for AVec<T, A> { |
610 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { |
611 | f.debug_list().entries(self.iter()).finish() |
612 | } |
613 | } |
614 | |
615 | impl<T: Debug + ?Sized, A: Alignment> Debug for ABox<T, A> { |
616 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { |
617 | (&**self).fmt(f) |
618 | } |
619 | } |
620 | |
621 | impl<T: Clone, A: Alignment> Clone for AVec<T, A> { |
622 | fn clone(&self) -> Self { |
623 | Self::from_slice(self.alignment(), self.deref()) |
624 | } |
625 | } |
626 | |
627 | impl<T: Clone, A: Alignment> Clone for ABox<T, A> { |
628 | fn clone(&self) -> Self { |
629 | ABox::new(self.align.alignment(align_of::<T>()), self.deref().clone()) |
630 | } |
631 | } |
632 | |
633 | impl<T: Clone, A: Alignment> Clone for ABox<[T], A> { |
634 | fn clone(&self) -> Self { |
635 | AVec::from_slice(self.align.alignment(align_of::<T>()), self.deref()).into_boxed_slice() |
636 | } |
637 | } |
638 | |
639 | impl<T: PartialEq, A: Alignment> PartialEq for AVec<T, A> { |
640 | fn eq(&self, other: &Self) -> bool { |
641 | self.as_slice().eq(other.as_slice()) |
642 | } |
643 | } |
644 | impl<T: Eq, A: Alignment> Eq for AVec<T, A> {} |
645 | impl<T: PartialOrd, A: Alignment> PartialOrd for AVec<T, A> { |
646 | fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> { |
647 | self.as_slice().partial_cmp(other.as_slice()) |
648 | } |
649 | } |
650 | impl<T: Ord, A: Alignment> Ord for AVec<T, A> { |
651 | fn cmp(&self, other: &Self) -> core::cmp::Ordering { |
652 | self.as_slice().cmp(other.as_slice()) |
653 | } |
654 | } |
655 | |
656 | impl<T: PartialEq + ?Sized, A: Alignment> PartialEq for ABox<T, A> { |
657 | fn eq(&self, other: &Self) -> bool { |
658 | (&**self).eq(&**other) |
659 | } |
660 | } |
661 | impl<T: Eq + ?Sized, A: Alignment> Eq for ABox<T, A> {} |
662 | impl<T: PartialOrd + ?Sized, A: Alignment> PartialOrd for ABox<T, A> { |
663 | fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> { |
664 | (&**self).partial_cmp(&**other) |
665 | } |
666 | } |
667 | impl<T: Ord + ?Sized, A: Alignment> Ord for ABox<T, A> { |
668 | fn cmp(&self, other: &Self) -> core::cmp::Ordering { |
669 | (&**self).cmp(&**other) |
670 | } |
671 | } |
672 | unsafe impl<T: Sync, A: Alignment + Sync> Sync for AVec<T, A> {} |
673 | unsafe impl<T: Send, A: Alignment + Send> Send for AVec<T, A> {} |
674 | unsafe impl<T: ?Sized + Sync, A: Alignment + Sync> Sync for ABox<T, A> {} |
675 | unsafe impl<T: ?Sized + Send, A: Alignment + Send> Send for ABox<T, A> {} |
676 | |
677 | #[cfg (feature = "serde" )] |
678 | mod serde { |
679 | use super::*; |
680 | use ::serde::{Deserialize, Serialize}; |
681 | |
682 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
683 | impl<T: ?Sized + Serialize, A: Alignment> Serialize for ABox<T, A> { |
684 | #[inline ] |
685 | fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> |
686 | where |
687 | S: ::serde::Serializer, |
688 | { |
689 | (&**self).serialize(serializer) |
690 | } |
691 | } |
692 | |
693 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
694 | impl<T: Serialize, A: Alignment> Serialize for AVec<T, A> { |
695 | #[inline ] |
696 | fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> |
697 | where |
698 | S: ::serde::Serializer, |
699 | { |
700 | (&**self).serialize(serializer) |
701 | } |
702 | } |
703 | |
704 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
705 | impl<'de, T: Deserialize<'de>, const N: usize> Deserialize<'de> for ABox<T, ConstAlign<N>> { |
706 | fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> |
707 | where |
708 | D: ::serde::Deserializer<'de>, |
709 | { |
710 | Ok(ABox::<T, ConstAlign<N>>::new( |
711 | N, |
712 | T::deserialize(deserializer)?, |
713 | )) |
714 | } |
715 | } |
716 | |
717 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
718 | impl<'de, T: Deserialize<'de>, const N: usize> Deserialize<'de> for ABox<[T], ConstAlign<N>> { |
719 | fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> |
720 | where |
721 | D: ::serde::Deserializer<'de>, |
722 | { |
723 | Ok(AVec::<T, ConstAlign<N>>::deserialize(deserializer)?.into_boxed_slice()) |
724 | } |
725 | } |
726 | |
727 | #[cfg_attr (docsrs, doc(cfg(feature = "serde" )))] |
728 | impl<'de, T: Deserialize<'de>, const N: usize> Deserialize<'de> for AVec<T, ConstAlign<N>> { |
729 | fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> |
730 | where |
731 | D: ::serde::Deserializer<'de>, |
732 | { |
733 | struct AVecVisitor<T, const N: usize> { |
734 | _marker: PhantomData<fn() -> AVec<T, ConstAlign<N>>>, |
735 | } |
736 | |
737 | impl<'de, T: Deserialize<'de>, const N: usize> ::serde::de::Visitor<'de> for AVecVisitor<T, N> { |
738 | type Value = AVec<T, ConstAlign<N>>; |
739 | |
740 | fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { |
741 | formatter.write_str("a sequence" ) |
742 | } |
743 | |
744 | fn visit_seq<S>(self, mut seq: S) -> Result<Self::Value, S::Error> |
745 | where |
746 | S: ::serde::de::SeqAccess<'de>, |
747 | { |
748 | let mut vec = |
749 | AVec::<T, ConstAlign<N>>::with_capacity(N, seq.size_hint().unwrap_or(0)); |
750 | |
751 | while let Some(elem) = seq.next_element::<T>()? { |
752 | vec.push(elem) |
753 | } |
754 | |
755 | Ok(vec) |
756 | } |
757 | } |
758 | |
759 | deserializer.deserialize_seq(AVecVisitor { |
760 | _marker: PhantomData, |
761 | }) |
762 | } |
763 | } |
764 | } |
765 | |
766 | /// Create a vector that is aligned to a cache line boundary. |
767 | #[macro_export ] |
768 | macro_rules! avec { |
769 | () => { |
770 | $crate::AVec::<_>::new(0) |
771 | }; |
772 | ($elem: expr; $count: expr) => { |
773 | $crate::AVec::<_>::__from_elem(0, $elem, $count) |
774 | }; |
775 | ($($elem: expr),*) => { |
776 | { |
777 | let __data = ::core::mem::ManuallyDrop::new([$($elem,)*]); |
778 | let __len = __data.len(); |
779 | let __ptr = __data.as_ptr(); |
780 | let mut __aligned_vec = $crate::AVec::<_>::__copy_from_ptr(0, __ptr, __len); |
781 | __aligned_vec |
782 | } |
783 | }; |
784 | } |
785 | |
786 | /// Create a vector that is aligned to a runtime alignment value. |
787 | #[macro_export ] |
788 | macro_rules! avec_rt { |
789 | ([$align: expr]$(|)?) => { |
790 | $crate::AVec::<_, $crate::RuntimeAlign>::new($align) |
791 | }; |
792 | ([$align: expr]| $elem: expr; $count: expr) => { |
793 | $crate::AVec::<_, $crate::RuntimeAlign>::__from_elem($align, $elem, $count) |
794 | }; |
795 | ([$align: expr]| $($elem: expr),*) => { |
796 | { |
797 | let __data = ::core::mem::ManuallyDrop::new([$($elem,)*]); |
798 | let __len = __data.len(); |
799 | let __ptr = __data.as_ptr(); |
800 | let mut __aligned_vec = $crate::AVec::<_>::__copy_from_ptr($align, __ptr, __len); |
801 | __aligned_vec |
802 | } |
803 | }; |
804 | } |
805 | |
806 | #[cfg (test)] |
807 | mod tests { |
808 | use core::iter::repeat; |
809 | |
810 | use super::*; |
811 | use alloc::vec; |
812 | |
813 | #[test ] |
814 | fn new() { |
815 | let v = AVec::<i32>::new(15); |
816 | assert_eq!(v.len(), 0); |
817 | assert_eq!(v.capacity(), 0); |
818 | assert_eq!(v.alignment(), CACHELINE_ALIGN); |
819 | assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0); |
820 | let v = AVec::<()>::new(15); |
821 | assert_eq!(v.len(), 0); |
822 | assert_eq!(v.capacity(), usize::MAX); |
823 | assert_eq!(v.alignment(), CACHELINE_ALIGN); |
824 | assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0); |
825 | |
826 | #[repr (align(4096))] |
827 | struct OverAligned; |
828 | let v = AVec::<OverAligned>::new(15); |
829 | assert_eq!(v.len(), 0); |
830 | assert_eq!(v.capacity(), usize::MAX); |
831 | assert_eq!(v.alignment(), 4096); |
832 | assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0); |
833 | assert_eq!(v.as_ptr().align_offset(4096), 0); |
834 | } |
835 | |
836 | #[test ] |
837 | fn collect() { |
838 | let v = AVec::<_>::from_iter(64, 0..4); |
839 | assert_eq!(&*v, &[0, 1, 2, 3]); |
840 | let v = AVec::<_>::from_iter(64, repeat(()).take(4)); |
841 | assert_eq!(&*v, &[(), (), (), ()]); |
842 | } |
843 | |
844 | #[test ] |
845 | fn push() { |
846 | let mut v = AVec::<i32>::new(16); |
847 | v.push(0); |
848 | v.push(1); |
849 | v.push(2); |
850 | v.push(3); |
851 | assert_eq!(&*v, &[0, 1, 2, 3]); |
852 | |
853 | let mut v = AVec::<_>::from_iter(64, 0..4); |
854 | v.push(4); |
855 | v.push(5); |
856 | v.push(6); |
857 | v.push(7); |
858 | assert_eq!(&*v, &[0, 1, 2, 3, 4, 5, 6, 7]); |
859 | |
860 | let mut v = AVec::<_>::from_iter(64, repeat(()).take(4)); |
861 | v.push(()); |
862 | v.push(()); |
863 | v.push(()); |
864 | v.push(()); |
865 | assert_eq!(&*v, &[(), (), (), (), (), (), (), ()]); |
866 | } |
867 | |
868 | #[test ] |
869 | fn pop() { |
870 | let mut v = AVec::<i32>::new(16); |
871 | v.push(0); |
872 | v.push(1); |
873 | v.push(2); |
874 | v.push(3); |
875 | assert_eq!(v.pop(), Some(3)); |
876 | assert_eq!(v.pop(), Some(2)); |
877 | assert_eq!(v.pop(), Some(1)); |
878 | assert_eq!(v.pop(), Some(0)); |
879 | assert_eq!(v.pop(), None); |
880 | assert_eq!(v.pop(), None); |
881 | assert_eq!(&*v, &[]); |
882 | assert!(v.is_empty()); |
883 | |
884 | let mut v = AVec::<()>::new(16); |
885 | v.push(()); |
886 | v.push(()); |
887 | v.push(()); |
888 | v.push(()); |
889 | assert_eq!(v.pop(), Some(())); |
890 | assert_eq!(v.pop(), Some(())); |
891 | assert_eq!(v.pop(), Some(())); |
892 | assert_eq!(v.pop(), Some(())); |
893 | assert_eq!(v.pop(), None); |
894 | assert_eq!(v.pop(), None); |
895 | assert_eq!(&*v, &[]); |
896 | assert!(v.is_empty()); |
897 | } |
898 | |
899 | #[test ] |
900 | fn shrink() { |
901 | let mut v = AVec::<i32>::with_capacity(16, 10); |
902 | v.push(0); |
903 | v.push(1); |
904 | v.push(2); |
905 | |
906 | assert_eq!(v.capacity(), 10); |
907 | v.shrink_to_fit(); |
908 | assert_eq!(v.len(), 3); |
909 | assert_eq!(v.capacity(), 3); |
910 | |
911 | let mut v = AVec::<i32>::with_capacity(16, 10); |
912 | v.push(0); |
913 | v.push(1); |
914 | v.push(2); |
915 | |
916 | assert_eq!(v.capacity(), 10); |
917 | v.shrink_to(0); |
918 | assert_eq!(v.len(), 3); |
919 | assert_eq!(v.capacity(), 3); |
920 | } |
921 | |
922 | #[test ] |
923 | fn truncate() { |
924 | let mut v = AVec::<i32>::new(16); |
925 | v.push(0); |
926 | v.push(1); |
927 | v.push(2); |
928 | |
929 | v.truncate(1); |
930 | assert_eq!(v.len(), 1); |
931 | assert_eq!(&*v, &[0]); |
932 | |
933 | v.clear(); |
934 | assert_eq!(v.len(), 0); |
935 | assert_eq!(&*v, &[]); |
936 | |
937 | let mut v = AVec::<()>::new(16); |
938 | v.push(()); |
939 | v.push(()); |
940 | v.push(()); |
941 | |
942 | v.truncate(1); |
943 | assert_eq!(v.len(), 1); |
944 | assert_eq!(&*v, &[()]); |
945 | |
946 | v.clear(); |
947 | assert_eq!(v.len(), 0); |
948 | assert_eq!(&*v, &[]); |
949 | } |
950 | |
951 | #[test ] |
952 | fn into_boxed_slice() { |
953 | let mut v = AVec::<i32>::new(16); |
954 | v.push(0); |
955 | v.push(1); |
956 | v.push(2); |
957 | |
958 | let boxed = v.into_boxed_slice(); |
959 | assert_eq!(&*boxed, &[0, 1, 2]); |
960 | } |
961 | |
962 | #[test ] |
963 | fn box_new() { |
964 | let boxed = ABox::<_>::new(64, 3); |
965 | assert_eq!(&*boxed, &3); |
966 | } |
967 | |
968 | #[test ] |
969 | fn box_clone() { |
970 | let boxed = ABox::<_>::new(64, 3); |
971 | assert_eq!(boxed, boxed.clone()); |
972 | } |
973 | |
974 | #[test ] |
975 | fn box_slice_clone() { |
976 | let boxed = AVec::<_>::from_iter(64, 0..123).into_boxed_slice(); |
977 | assert_eq!(boxed, boxed.clone()); |
978 | } |
979 | |
980 | #[test ] |
981 | fn macros() { |
982 | let u: AVec<()> = avec![]; |
983 | assert_eq!(u.len(), 0); |
984 | assert_eq!(u.as_ptr().align_offset(CACHELINE_ALIGN), 0); |
985 | |
986 | let v = avec![0; 4]; |
987 | assert_eq!(v.len(), 4); |
988 | assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0); |
989 | |
990 | let mut w = avec![vec![0, 1], vec![3, 4], vec![5, 6], vec![7, 8]]; |
991 | w[0].push(2); |
992 | w[3].pop(); |
993 | assert_eq!(w.len(), 4); |
994 | assert_eq!(w.as_ptr().align_offset(CACHELINE_ALIGN), 0); |
995 | assert_eq!(w[0], vec![0, 1, 2]); |
996 | assert_eq!(w[1], vec![3, 4]); |
997 | assert_eq!(w[2], vec![5, 6]); |
998 | assert_eq!(w[3], vec![7]); |
999 | } |
1000 | |
1001 | #[test ] |
1002 | fn macros_rt() { |
1003 | let u: AVec<(), _> = avec_rt![[32]]; |
1004 | assert_eq!(u.len(), 0); |
1005 | assert_eq!(u.as_ptr().align_offset(32), 0); |
1006 | |
1007 | let v = avec_rt![[32]| 0; 4]; |
1008 | assert_eq!(v.len(), 4); |
1009 | assert_eq!(v.as_ptr().align_offset(32), 0); |
1010 | |
1011 | let mut w = avec_rt![[64] | vec![0, 1], vec![3, 4], vec![5, 6], vec![7, 8]]; |
1012 | w[0].push(2); |
1013 | w[3].pop(); |
1014 | assert_eq!(w.len(), 4); |
1015 | assert_eq!(w.as_ptr().align_offset(64), 0); |
1016 | assert_eq!(w[0], vec![0, 1, 2]); |
1017 | assert_eq!(w[1], vec![3, 4]); |
1018 | assert_eq!(w[2], vec![5, 6]); |
1019 | assert_eq!(w[3], vec![7]); |
1020 | } |
1021 | } |
1022 | |