1 | use std::cell::UnsafeCell; |
2 | use std::fmt; |
3 | use std::ops; |
4 | |
5 | /// `AtomicUsize` providing an additional `unsync_load` function. |
6 | pub(crate) struct AtomicUsize { |
7 | inner: UnsafeCell<std::sync::atomic::AtomicUsize>, |
8 | } |
9 | |
10 | unsafe impl Send for AtomicUsize {} |
11 | unsafe impl Sync for AtomicUsize {} |
12 | |
13 | impl AtomicUsize { |
14 | pub(crate) const fn new(val: usize) -> AtomicUsize { |
15 | let inner: UnsafeCell = UnsafeCell::new(std::sync::atomic::AtomicUsize::new(val)); |
16 | AtomicUsize { inner } |
17 | } |
18 | |
19 | /// Performs an unsynchronized load. |
20 | /// |
21 | /// # Safety |
22 | /// |
23 | /// All mutations must have happened before the unsynchronized load. |
24 | /// Additionally, there must be no concurrent mutations. |
25 | pub(crate) unsafe fn unsync_load(&self) -> usize { |
26 | core::ptr::read(self.inner.get() as *const usize) |
27 | } |
28 | |
29 | pub(crate) fn with_mut<R>(&mut self, f: impl FnOnce(&mut usize) -> R) -> R { |
30 | // safety: we have mutable access |
31 | f(unsafe { (*self.inner.get()).get_mut() }) |
32 | } |
33 | } |
34 | |
35 | impl ops::Deref for AtomicUsize { |
36 | type Target = std::sync::atomic::AtomicUsize; |
37 | |
38 | fn deref(&self) -> &Self::Target { |
39 | // safety: it is always safe to access `&self` fns on the inner value as |
40 | // we never perform unsafe mutations. |
41 | unsafe { &*self.inner.get() } |
42 | } |
43 | } |
44 | |
45 | impl ops::DerefMut for AtomicUsize { |
46 | fn deref_mut(&mut self) -> &mut Self::Target { |
47 | // safety: we hold `&mut self` |
48 | unsafe { &mut *self.inner.get() } |
49 | } |
50 | } |
51 | |
52 | impl fmt::Debug for AtomicUsize { |
53 | fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { |
54 | (**self).fmt(fmt) |
55 | } |
56 | } |
57 | |