1 | //! Synchronization primitive for initializing a value once, allowing others to get a reference to the value. |
2 | |
3 | use core::cell::UnsafeCell; |
4 | use core::mem::ManuallyDrop; |
5 | use core::sync::atomic::{AtomicBool, Ordering}; |
6 | |
7 | /// The `LazyLock` is a synchronization primitive that allows for |
8 | /// initializing a value once, and allowing others to obtain a |
9 | /// reference to the value. This is useful for lazy initialization of |
10 | /// a static value. |
11 | /// |
12 | /// # Example |
13 | /// ``` |
14 | /// use futures_executor::block_on; |
15 | /// use embassy_sync::lazy_lock::LazyLock; |
16 | /// |
17 | /// // Define a static value that will be lazily initialized |
18 | /// // at runtime at the first access. |
19 | /// static VALUE: LazyLock<u32> = LazyLock::new(|| 20); |
20 | /// |
21 | /// let reference = VALUE.get(); |
22 | /// assert_eq!(reference, &20); |
23 | /// ``` |
24 | pub struct LazyLock<T, F = fn() -> T> { |
25 | init: AtomicBool, |
26 | data: UnsafeCell<Data<T, F>>, |
27 | } |
28 | |
29 | union Data<T, F> { |
30 | value: ManuallyDrop<T>, |
31 | f: ManuallyDrop<F>, |
32 | } |
33 | |
34 | unsafe impl<T, F> Sync for LazyLock<T, F> {} |
35 | |
36 | impl<T, F: FnOnce() -> T> LazyLock<T, F> { |
37 | /// Create a new uninitialized `StaticLock`. |
38 | pub const fn new(init_fn: F) -> Self { |
39 | Self { |
40 | init: AtomicBool::new(false), |
41 | data: UnsafeCell::new(Data { |
42 | f: ManuallyDrop::new(init_fn), |
43 | }), |
44 | } |
45 | } |
46 | |
47 | /// Get a reference to the underlying value, initializing it if it |
48 | /// has not been done already. |
49 | #[inline ] |
50 | pub fn get(&self) -> &T { |
51 | self.ensure_init_fast(); |
52 | unsafe { &(*self.data.get()).value } |
53 | } |
54 | |
55 | /// Consume the `LazyLock`, returning the underlying value. The |
56 | /// initialization function will be called if it has not been |
57 | /// already. |
58 | #[inline ] |
59 | pub fn into_inner(self) -> T { |
60 | self.ensure_init_fast(); |
61 | let this = ManuallyDrop::new(self); |
62 | let data = unsafe { core::ptr::read(&this.data) }.into_inner(); |
63 | |
64 | ManuallyDrop::into_inner(unsafe { data.value }) |
65 | } |
66 | |
67 | /// Initialize the `LazyLock` if it has not been initialized yet. |
68 | /// This function is a fast track to [`Self::ensure_init`] |
69 | /// which does not require a critical section in most cases when |
70 | /// the value has been initialized already. |
71 | /// When this function returns, `self.data` is guaranteed to be |
72 | /// initialized and visible on the current core. |
73 | #[inline ] |
74 | fn ensure_init_fast(&self) { |
75 | if !self.init.load(Ordering::Acquire) { |
76 | self.ensure_init(); |
77 | } |
78 | } |
79 | |
80 | /// Initialize the `LazyLock` if it has not been initialized yet. |
81 | /// When this function returns, `self.data` is guaranteed to be |
82 | /// initialized and visible on the current core. |
83 | fn ensure_init(&self) { |
84 | critical_section::with(|_| { |
85 | if !self.init.load(Ordering::Acquire) { |
86 | let data = unsafe { &mut *self.data.get() }; |
87 | let f = unsafe { ManuallyDrop::take(&mut data.f) }; |
88 | let value = f(); |
89 | data.value = ManuallyDrop::new(value); |
90 | |
91 | self.init.store(true, Ordering::Release); |
92 | } |
93 | }); |
94 | } |
95 | } |
96 | |
97 | impl<T, F> Drop for LazyLock<T, F> { |
98 | fn drop(&mut self) { |
99 | if self.init.load(order:Ordering::Acquire) { |
100 | unsafe { ManuallyDrop::drop(&mut self.data.get_mut().value) }; |
101 | } else { |
102 | unsafe { ManuallyDrop::drop(&mut self.data.get_mut().f) }; |
103 | } |
104 | } |
105 | } |
106 | |
107 | #[cfg (test)] |
108 | mod tests { |
109 | use core::sync::atomic::{AtomicU32, Ordering}; |
110 | |
111 | use super::*; |
112 | |
113 | #[test ] |
114 | fn test_lazy_lock() { |
115 | static VALUE: LazyLock<u32> = LazyLock::new(|| 20); |
116 | let reference = VALUE.get(); |
117 | assert_eq!(reference, &20); |
118 | } |
119 | #[test ] |
120 | fn test_lazy_lock_into_inner() { |
121 | let lazy: LazyLock<u32> = LazyLock::new(|| 20); |
122 | let value = lazy.into_inner(); |
123 | assert_eq!(value, 20); |
124 | } |
125 | |
126 | static DROP_CHECKER: AtomicU32 = AtomicU32::new(0); |
127 | struct DropCheck; |
128 | |
129 | impl Drop for DropCheck { |
130 | fn drop(&mut self) { |
131 | DROP_CHECKER.fetch_add(1, Ordering::Acquire); |
132 | } |
133 | } |
134 | |
135 | #[test ] |
136 | fn test_lazy_drop() { |
137 | let lazy: LazyLock<DropCheck> = LazyLock::new(|| DropCheck); |
138 | assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 0); |
139 | lazy.get(); |
140 | drop(lazy); |
141 | assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 1); |
142 | |
143 | let dropper = DropCheck; |
144 | let lazy_fn: LazyLock<u32, _> = LazyLock::new(move || { |
145 | let _a = dropper; |
146 | 20 |
147 | }); |
148 | assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 1); |
149 | drop(lazy_fn); |
150 | assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 2); |
151 | } |
152 | } |
153 | |