1use super::FreeList;
2use crate::sync::{
3 atomic::{AtomicUsize, Ordering},
4 hint, UnsafeCell,
5};
6use crate::{cfg, clear::Clear, Pack, Tid};
7use std::{fmt, marker::PhantomData, mem, ptr, thread};
8
9pub(crate) struct Slot<T, C> {
10 lifecycle: AtomicUsize,
11 /// The offset of the next item on the free list.
12 next: UnsafeCell<usize>,
13 /// The data stored in the slot.
14 item: UnsafeCell<T>,
15 _cfg: PhantomData<fn(C)>,
16}
17
18#[derive(Debug)]
19pub(crate) struct Guard<T, C: cfg::Config = cfg::DefaultConfig> {
20 slot: ptr::NonNull<Slot<T, C>>,
21}
22
23#[derive(Debug)]
24pub(crate) struct InitGuard<T, C: cfg::Config = cfg::DefaultConfig> {
25 slot: ptr::NonNull<Slot<T, C>>,
26 curr_lifecycle: usize,
27 released: bool,
28}
29
30#[repr(transparent)]
31pub(crate) struct Generation<C = cfg::DefaultConfig> {
32 value: usize,
33 _cfg: PhantomData<fn(C)>,
34}
35
36#[repr(transparent)]
37pub(crate) struct RefCount<C = cfg::DefaultConfig> {
38 value: usize,
39 _cfg: PhantomData<fn(C)>,
40}
41
42pub(crate) struct Lifecycle<C> {
43 state: State,
44 _cfg: PhantomData<fn(C)>,
45}
46struct LifecycleGen<C>(Generation<C>);
47
48#[derive(Debug, Eq, PartialEq, Copy, Clone)]
49#[repr(usize)]
50enum State {
51 Present = 0b00,
52 Marked = 0b01,
53 Removing = 0b11,
54}
55
56impl<C: cfg::Config> Pack<C> for Generation<C> {
57 /// Use all the remaining bits in the word for the generation counter, minus
58 /// any bits reserved by the user.
59 const LEN: usize = (cfg::WIDTH - C::RESERVED_BITS) - Self::SHIFT;
60
61 type Prev = Tid<C>;
62
63 #[inline(always)]
64 fn from_usize(u: usize) -> Self {
65 debug_assert!(u <= Self::BITS);
66 Self::new(u)
67 }
68
69 #[inline(always)]
70 fn as_usize(&self) -> usize {
71 self.value
72 }
73}
74
75impl<C: cfg::Config> Generation<C> {
76 fn new(value: usize) -> Self {
77 Self {
78 value,
79 _cfg: PhantomData,
80 }
81 }
82}
83
84// Slot methods which should work across all trait bounds
85impl<T, C> Slot<T, C>
86where
87 C: cfg::Config,
88{
89 #[inline(always)]
90 pub(super) fn next(&self) -> usize {
91 self.next.with(|next| unsafe { *next })
92 }
93
94 #[inline(always)]
95 pub(crate) fn value(&self) -> &T {
96 self.item.with(|item| unsafe { &*item })
97 }
98
99 #[inline(always)]
100 pub(super) fn set_next(&self, next: usize) {
101 self.next.with_mut(|n| unsafe {
102 (*n) = next;
103 })
104 }
105
106 #[inline(always)]
107 pub(crate) fn get(&self, gen: Generation<C>) -> Option<Guard<T, C>> {
108 let mut lifecycle = self.lifecycle.load(Ordering::Acquire);
109 loop {
110 // Unpack the current state.
111 let state = Lifecycle::<C>::from_packed(lifecycle);
112 let current_gen = LifecycleGen::<C>::from_packed(lifecycle).0;
113 let refs = RefCount::<C>::from_packed(lifecycle);
114
115 test_println!(
116 "-> get {:?}; current_gen={:?}; lifecycle={:#x}; state={:?}; refs={:?};",
117 gen,
118 current_gen,
119 lifecycle,
120 state,
121 refs,
122 );
123
124 // Is it okay to access this slot? The accessed generation must be
125 // current, and the slot must not be in the process of being
126 // removed. If we can no longer access the slot at the given
127 // generation, return `None`.
128 if gen != current_gen || state != Lifecycle::PRESENT {
129 test_println!("-> get: no longer exists!");
130 return None;
131 }
132
133 // Try to increment the slot's ref count by one.
134 let new_refs = refs.incr()?;
135 match self.lifecycle.compare_exchange(
136 lifecycle,
137 new_refs.pack(current_gen.pack(state.pack(0))),
138 Ordering::AcqRel,
139 Ordering::Acquire,
140 ) {
141 Ok(_) => {
142 test_println!("-> {:?}", new_refs);
143 return Some(Guard {
144 slot: ptr::NonNull::from(self),
145 });
146 }
147 Err(actual) => {
148 // Another thread modified the slot's state before us! We
149 // need to retry with the new state.
150 //
151 // Since the new state may mean that the accessed generation
152 // is no longer valid, we'll check again on the next
153 // iteration of the loop.
154 test_println!("-> get: retrying; lifecycle={:#x};", actual);
155 lifecycle = actual;
156 }
157 };
158 }
159 }
160
161 /// Marks this slot to be released, returning `true` if the slot can be
162 /// mutated *now* and `false` otherwise.
163 ///
164 /// This method checks if there are any references to this slot. If there _are_ valid
165 /// references, it just marks them for modification and returns and the next thread calling
166 /// either `clear_storage` or `remove_value` will try and modify the storage
167 fn mark_release(&self, gen: Generation<C>) -> Option<bool> {
168 let mut lifecycle = self.lifecycle.load(Ordering::Acquire);
169 let mut curr_gen;
170
171 // Try to advance the slot's state to "MARKED", which indicates that it
172 // should be removed when it is no longer concurrently accessed.
173 loop {
174 curr_gen = LifecycleGen::from_packed(lifecycle).0;
175 test_println!(
176 "-> mark_release; gen={:?}; current_gen={:?};",
177 gen,
178 curr_gen
179 );
180
181 // Is the slot still at the generation we are trying to remove?
182 if gen != curr_gen {
183 return None;
184 }
185
186 let state = Lifecycle::<C>::from_packed(lifecycle).state;
187 test_println!("-> mark_release; state={:?};", state);
188 match state {
189 State::Removing => {
190 test_println!("--> mark_release; cannot release (already removed!)");
191 return None;
192 }
193 State::Marked => {
194 test_println!("--> mark_release; already marked;");
195 break;
196 }
197 State::Present => {}
198 };
199
200 // Set the new state to `MARKED`.
201 let new_lifecycle = Lifecycle::<C>::MARKED.pack(lifecycle);
202 test_println!(
203 "-> mark_release; old_lifecycle={:#x}; new_lifecycle={:#x};",
204 lifecycle,
205 new_lifecycle
206 );
207
208 match self.lifecycle.compare_exchange(
209 lifecycle,
210 new_lifecycle,
211 Ordering::AcqRel,
212 Ordering::Acquire,
213 ) {
214 Ok(_) => break,
215 Err(actual) => {
216 test_println!("-> mark_release; retrying");
217 lifecycle = actual;
218 }
219 }
220 }
221
222 // Unpack the current reference count to see if we can remove the slot now.
223 let refs = RefCount::<C>::from_packed(lifecycle);
224 test_println!("-> mark_release: marked; refs={:?};", refs);
225
226 // Are there currently outstanding references to the slot? If so, it
227 // will have to be removed when those references are dropped.
228 Some(refs.value == 0)
229 }
230
231 /// Mutates this slot.
232 ///
233 /// This method spins until no references to this slot are left, and calls the mutator
234 fn release_with<F, M, R>(&self, gen: Generation<C>, offset: usize, free: &F, mutator: M) -> R
235 where
236 F: FreeList<C>,
237 M: FnOnce(Option<&mut T>) -> R,
238 {
239 let mut lifecycle = self.lifecycle.load(Ordering::Acquire);
240 let mut advanced = false;
241 // Exponential spin backoff while waiting for the slot to be released.
242 let mut spin_exp = 0;
243 let next_gen = gen.advance();
244 loop {
245 let current_gen = Generation::from_packed(lifecycle);
246 test_println!("-> release_with; lifecycle={:#x}; expected_gen={:?}; current_gen={:?}; next_gen={:?};",
247 lifecycle,
248 gen,
249 current_gen,
250 next_gen
251 );
252
253 // First, make sure we are actually able to remove the value.
254 // If we're going to remove the value, the generation has to match
255 // the value that `remove_value` was called with...unless we've
256 // already stored the new generation.
257 if (!advanced) && gen != current_gen {
258 test_println!("-> already removed!");
259 return mutator(None);
260 }
261
262 match self.lifecycle.compare_exchange(
263 lifecycle,
264 next_gen.pack(lifecycle),
265 Ordering::AcqRel,
266 Ordering::Acquire,
267 ) {
268 Ok(actual) => {
269 // If we're in this state, we have successfully advanced to
270 // the next generation.
271 advanced = true;
272
273 // Make sure that there are no outstanding references.
274 let refs = RefCount::<C>::from_packed(actual);
275 test_println!("-> advanced gen; lifecycle={:#x}; refs={:?};", actual, refs);
276 if refs.value == 0 {
277 test_println!("-> ok to remove!");
278 // safety: we've modified the generation of this slot and any other thread
279 // calling this method will exit out at the generation check above in the
280 // next iteraton of the loop.
281 let value = self
282 .item
283 .with_mut(|item| mutator(Some(unsafe { &mut *item })));
284 free.push(offset, self);
285 return value;
286 }
287
288 // Otherwise, a reference must be dropped before we can
289 // remove the value. Spin here until there are no refs remaining...
290 test_println!("-> refs={:?}; spin...", refs);
291
292 // Back off, spinning and possibly yielding.
293 exponential_backoff(&mut spin_exp);
294 }
295 Err(actual) => {
296 test_println!("-> retrying; lifecycle={:#x};", actual);
297 lifecycle = actual;
298 // The state changed; reset the spin backoff.
299 spin_exp = 0;
300 }
301 }
302 }
303 }
304
305 /// Initialize a slot
306 ///
307 /// This method initializes and sets up the state for a slot. When being used in `Pool`, we
308 /// only need to ensure that the `Slot` is in the right `state, while when being used in a
309 /// `Slab` we want to insert a value into it, as the memory is not initialized
310 pub(crate) fn init(&self) -> Option<InitGuard<T, C>> {
311 // Load the current lifecycle state.
312 let lifecycle = self.lifecycle.load(Ordering::Acquire);
313 let gen = LifecycleGen::<C>::from_packed(lifecycle).0;
314 let refs = RefCount::<C>::from_packed(lifecycle);
315
316 test_println!(
317 "-> initialize_state; state={:?}; gen={:?}; refs={:?};",
318 Lifecycle::<C>::from_packed(lifecycle),
319 gen,
320 refs,
321 );
322
323 if refs.value != 0 {
324 test_println!("-> initialize while referenced! cancelling");
325 return None;
326 }
327
328 Some(InitGuard {
329 slot: ptr::NonNull::from(self),
330 curr_lifecycle: lifecycle,
331 released: false,
332 })
333 }
334}
335
336// Slot impl which _needs_ an `Option` for self.item, this is for `Slab` to use.
337impl<T, C> Slot<Option<T>, C>
338where
339 C: cfg::Config,
340{
341 fn is_empty(&self) -> bool {
342 self.item.with(|item| unsafe { (*item).is_none() })
343 }
344
345 /// Insert a value into a slot
346 ///
347 /// We first initialize the state and then insert the pased in value into the slot.
348 #[inline]
349 pub(crate) fn insert(&self, value: &mut Option<T>) -> Option<Generation<C>> {
350 debug_assert!(self.is_empty(), "inserted into full slot");
351 debug_assert!(value.is_some(), "inserted twice");
352
353 let mut guard = self.init()?;
354 let gen = guard.generation();
355 unsafe {
356 // Safety: Accessing the value of an `InitGuard` is unsafe because
357 // it has a pointer to a slot which may dangle. Here, we know the
358 // pointed slot is alive because we have a reference to it in scope,
359 // and the `InitGuard` will be dropped when this function returns.
360 mem::swap(guard.value_mut(), value);
361 guard.release();
362 };
363 test_println!("-> inserted at {:?}", gen);
364
365 Some(gen)
366 }
367
368 /// Tries to remove the value in the slot, returning `true` if the value was
369 /// removed.
370 ///
371 /// This method tries to remove the value in the slot. If there are existing references, then
372 /// the slot is marked for removal and the next thread calling either this method or
373 /// `remove_value` will do the work instead.
374 #[inline]
375 pub(super) fn try_remove_value<F: FreeList<C>>(
376 &self,
377 gen: Generation<C>,
378 offset: usize,
379 free: &F,
380 ) -> bool {
381 let should_remove = match self.mark_release(gen) {
382 // If `mark_release` returns `Some`, a value exists at this
383 // generation. The bool inside this option indicates whether or not
384 // _we're_ allowed to remove the value.
385 Some(should_remove) => should_remove,
386 // Otherwise, the generation we tried to remove has already expired,
387 // and we did not mark anything for removal.
388 None => {
389 test_println!(
390 "-> try_remove_value; nothing exists at generation={:?}",
391 gen
392 );
393 return false;
394 }
395 };
396
397 test_println!("-> try_remove_value; marked!");
398
399 if should_remove {
400 // We're allowed to remove the slot now!
401 test_println!("-> try_remove_value; can remove now");
402 self.remove_value(gen, offset, free);
403 }
404
405 true
406 }
407
408 #[inline]
409 pub(super) fn remove_value<F: FreeList<C>>(
410 &self,
411 gen: Generation<C>,
412 offset: usize,
413 free: &F,
414 ) -> Option<T> {
415 self.release_with(gen, offset, free, |item| item.and_then(Option::take))
416 }
417}
418
419// These impls are specific to `Pool`
420impl<T, C> Slot<T, C>
421where
422 T: Default + Clear,
423 C: cfg::Config,
424{
425 pub(in crate::page) fn new(next: usize) -> Self {
426 Self {
427 lifecycle: AtomicUsize::new(Lifecycle::<C>::REMOVING.as_usize()),
428 item: UnsafeCell::new(T::default()),
429 next: UnsafeCell::new(next),
430 _cfg: PhantomData,
431 }
432 }
433
434 /// Try to clear this slot's storage
435 ///
436 /// If there are references to this slot, then we mark this slot for clearing and let the last
437 /// thread do the work for us.
438 #[inline]
439 pub(super) fn try_clear_storage<F: FreeList<C>>(
440 &self,
441 gen: Generation<C>,
442 offset: usize,
443 free: &F,
444 ) -> bool {
445 let should_clear = match self.mark_release(gen) {
446 // If `mark_release` returns `Some`, a value exists at this
447 // generation. The bool inside this option indicates whether or not
448 // _we're_ allowed to clear the value.
449 Some(should_clear) => should_clear,
450 // Otherwise, the generation we tried to remove has already expired,
451 // and we did not mark anything for removal.
452 None => {
453 test_println!(
454 "-> try_clear_storage; nothing exists at generation={:?}",
455 gen
456 );
457 return false;
458 }
459 };
460
461 test_println!("-> try_clear_storage; marked!");
462
463 if should_clear {
464 // We're allowed to remove the slot now!
465 test_println!("-> try_remove_value; can clear now");
466 return self.clear_storage(gen, offset, free);
467 }
468
469 true
470 }
471
472 /// Clear this slot's storage
473 ///
474 /// This method blocks until all references have been dropped and clears the storage.
475 pub(super) fn clear_storage<F: FreeList<C>>(
476 &self,
477 gen: Generation<C>,
478 offset: usize,
479 free: &F,
480 ) -> bool {
481 // release_with will _always_ wait unitl it can release the slot or just return if the slot
482 // has already been released.
483 self.release_with(gen, offset, free, |item| {
484 let cleared = item.map(|inner| Clear::clear(inner)).is_some();
485 test_println!("-> cleared: {}", cleared);
486 cleared
487 })
488 }
489}
490
491impl<T, C: cfg::Config> Slot<T, C> {
492 fn release(&self) -> bool {
493 let mut lifecycle = self.lifecycle.load(Ordering::Acquire);
494 loop {
495 let refs = RefCount::<C>::from_packed(lifecycle);
496 let state = Lifecycle::<C>::from_packed(lifecycle).state;
497 let gen = LifecycleGen::<C>::from_packed(lifecycle).0;
498
499 // Are we the last guard, and is the slot marked for removal?
500 let dropping = refs.value == 1 && state == State::Marked;
501 let new_lifecycle = if dropping {
502 // If so, we want to advance the state to "removing"
503 gen.pack(State::Removing as usize)
504 } else {
505 // Otherwise, just subtract 1 from the ref count.
506 refs.decr().pack(lifecycle)
507 };
508
509 test_println!(
510 "-> drop guard: state={:?}; gen={:?}; refs={:?}; lifecycle={:#x}; new_lifecycle={:#x}; dropping={:?}",
511 state,
512 gen,
513 refs,
514 lifecycle,
515 new_lifecycle,
516 dropping
517 );
518 match self.lifecycle.compare_exchange(
519 lifecycle,
520 new_lifecycle,
521 Ordering::AcqRel,
522 Ordering::Acquire,
523 ) {
524 Ok(_) => {
525 test_println!("-> drop guard: done; dropping={:?}", dropping);
526 return dropping;
527 }
528 Err(actual) => {
529 test_println!("-> drop guard; retry, actual={:#x}", actual);
530 lifecycle = actual;
531 }
532 }
533 }
534 }
535}
536
537impl<T, C: cfg::Config> fmt::Debug for Slot<T, C> {
538 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
539 let lifecycle: usize = self.lifecycle.load(order:Ordering::Relaxed);
540 f&mut DebugStruct<'_, '_>.debug_struct("Slot")
541 .field("lifecycle", &format_args!("{:#x}", lifecycle))
542 .field("state", &Lifecycle::<C>::from_packed(lifecycle).state)
543 .field("gen", &LifecycleGen::<C>::from_packed(lifecycle).0)
544 .field("refs", &RefCount::<C>::from_packed(lifecycle))
545 .field(name:"next", &self.next())
546 .finish()
547 }
548}
549
550// === impl Generation ===
551
552impl<C> fmt::Debug for Generation<C> {
553 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
554 f.debug_tuple(name:"Generation").field(&self.value).finish()
555 }
556}
557
558impl<C: cfg::Config> Generation<C> {
559 fn advance(self) -> Self {
560 Self::from_usize((self.value + 1) % Self::BITS)
561 }
562}
563
564impl<C: cfg::Config> PartialEq for Generation<C> {
565 fn eq(&self, other: &Self) -> bool {
566 self.value == other.value
567 }
568}
569
570impl<C: cfg::Config> Eq for Generation<C> {}
571
572impl<C: cfg::Config> PartialOrd for Generation<C> {
573 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
574 self.value.partial_cmp(&other.value)
575 }
576}
577
578impl<C: cfg::Config> Ord for Generation<C> {
579 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
580 self.value.cmp(&other.value)
581 }
582}
583
584impl<C: cfg::Config> Clone for Generation<C> {
585 fn clone(&self) -> Self {
586 Self::new(self.value)
587 }
588}
589
590impl<C: cfg::Config> Copy for Generation<C> {}
591
592// === impl Guard ===
593
594impl<T, C: cfg::Config> Guard<T, C> {
595 /// Releases the guard, returning `true` if the slot should be cleared.
596 ///
597 /// ## Safety
598 ///
599 /// This dereferences a raw pointer to the slot. The caller is responsible
600 /// for ensuring that the `Guard` does not outlive the slab that contains
601 /// the pointed slot. Failure to do so means this pointer may dangle.
602 #[inline]
603 pub(crate) unsafe fn release(&self) -> bool {
604 self.slot().release()
605 }
606
607 /// Returns a borrowed reference to the slot.
608 ///
609 /// ## Safety
610 ///
611 /// This dereferences a raw pointer to the slot. The caller is responsible
612 /// for ensuring that the `Guard` does not outlive the slab that contains
613 /// the pointed slot. Failure to do so means this pointer may dangle.
614 #[inline]
615 pub(crate) unsafe fn slot(&self) -> &Slot<T, C> {
616 self.slot.as_ref()
617 }
618
619 /// Returns a borrowed reference to the slot's value.
620 ///
621 /// ## Safety
622 ///
623 /// This dereferences a raw pointer to the slot. The caller is responsible
624 /// for ensuring that the `Guard` does not outlive the slab that contains
625 /// the pointed slot. Failure to do so means this pointer may dangle.
626 #[inline(always)]
627 pub(crate) unsafe fn value(&self) -> &T {
628 self.slot().item.with(|item| &*item)
629 }
630}
631
632// === impl Lifecycle ===
633
634impl<C: cfg::Config> Lifecycle<C> {
635 const MARKED: Self = Self {
636 state: State::Marked,
637 _cfg: PhantomData,
638 };
639 const REMOVING: Self = Self {
640 state: State::Removing,
641 _cfg: PhantomData,
642 };
643 const PRESENT: Self = Self {
644 state: State::Present,
645 _cfg: PhantomData,
646 };
647}
648
649impl<C: cfg::Config> Pack<C> for Lifecycle<C> {
650 const LEN: usize = 2;
651 type Prev = ();
652
653 fn from_usize(u: usize) -> Self {
654 Self {
655 state: match u & Self::MASK {
656 0b00 => State::Present,
657 0b01 => State::Marked,
658 0b11 => State::Removing,
659 bad: usize => unreachable!("weird lifecycle {:#b}", bad),
660 },
661 _cfg: PhantomData,
662 }
663 }
664
665 fn as_usize(&self) -> usize {
666 self.state as usize
667 }
668}
669
670impl<C> PartialEq for Lifecycle<C> {
671 fn eq(&self, other: &Self) -> bool {
672 self.state == other.state
673 }
674}
675
676impl<C> Eq for Lifecycle<C> {}
677
678impl<C> fmt::Debug for Lifecycle<C> {
679 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
680 f.debug_tuple(name:"Lifecycle").field(&self.state).finish()
681 }
682}
683
684// === impl RefCount ===
685
686impl<C: cfg::Config> Pack<C> for RefCount<C> {
687 const LEN: usize = cfg::WIDTH - (Lifecycle::<C>::LEN + Generation::<C>::LEN);
688 type Prev = Lifecycle<C>;
689
690 fn from_usize(value: usize) -> Self {
691 debug_assert!(value <= Self::BITS);
692 Self {
693 value,
694 _cfg: PhantomData,
695 }
696 }
697
698 fn as_usize(&self) -> usize {
699 self.value
700 }
701}
702
703impl<C: cfg::Config> RefCount<C> {
704 pub(crate) const MAX: usize = Self::BITS - 1;
705
706 #[inline]
707 fn incr(self) -> Option<Self> {
708 if self.value >= Self::MAX {
709 test_println!("-> get: {}; MAX={}", self.value, RefCount::<C>::MAX);
710 return None;
711 }
712
713 Some(Self::from_usize(self.value + 1))
714 }
715
716 #[inline]
717 fn decr(self) -> Self {
718 Self::from_usize(self.value - 1)
719 }
720}
721
722impl<C> fmt::Debug for RefCount<C> {
723 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
724 f.debug_tuple(name:"RefCount").field(&self.value).finish()
725 }
726}
727
728impl<C: cfg::Config> PartialEq for RefCount<C> {
729 fn eq(&self, other: &Self) -> bool {
730 self.value == other.value
731 }
732}
733
734impl<C: cfg::Config> Eq for RefCount<C> {}
735
736impl<C: cfg::Config> PartialOrd for RefCount<C> {
737 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
738 self.value.partial_cmp(&other.value)
739 }
740}
741
742impl<C: cfg::Config> Ord for RefCount<C> {
743 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
744 self.value.cmp(&other.value)
745 }
746}
747
748impl<C: cfg::Config> Clone for RefCount<C> {
749 fn clone(&self) -> Self {
750 Self::from_usize(self.value)
751 }
752}
753
754impl<C: cfg::Config> Copy for RefCount<C> {}
755
756// === impl LifecycleGen ===
757
758impl<C: cfg::Config> Pack<C> for LifecycleGen<C> {
759 const LEN: usize = Generation::<C>::LEN;
760 type Prev = RefCount<C>;
761
762 fn from_usize(value: usize) -> Self {
763 Self(Generation::from_usize(val:value))
764 }
765
766 fn as_usize(&self) -> usize {
767 self.0.as_usize()
768 }
769}
770
771impl<T, C: cfg::Config> InitGuard<T, C> {
772 pub(crate) fn generation(&self) -> Generation<C> {
773 LifecycleGen::<C>::from_packed(self.curr_lifecycle).0
774 }
775
776 /// Returns a borrowed reference to the slot's value.
777 ///
778 /// ## Safety
779 ///
780 /// This dereferences a raw pointer to the slot. The caller is responsible
781 /// for ensuring that the `InitGuard` does not outlive the slab that
782 /// contains the pointed slot. Failure to do so means this pointer may
783 /// dangle.
784 pub(crate) unsafe fn value(&self) -> &T {
785 self.slot.as_ref().item.with(|val| &*val)
786 }
787
788 /// Returns a mutably borrowed reference to the slot's value.
789 ///
790 /// ## Safety
791 ///
792 /// This dereferences a raw pointer to the slot. The caller is responsible
793 /// for ensuring that the `InitGuard` does not outlive the slab that
794 /// contains the pointed slot. Failure to do so means this pointer may
795 /// dangle.
796 ///
797 /// It's safe to reference the slot mutably, though, because creating an
798 /// `InitGuard` ensures there are no outstanding immutable references.
799 pub(crate) unsafe fn value_mut(&mut self) -> &mut T {
800 self.slot.as_ref().item.with_mut(|val| &mut *val)
801 }
802
803 /// Releases the guard, returning `true` if the slot should be cleared.
804 ///
805 /// ## Safety
806 ///
807 /// This dereferences a raw pointer to the slot. The caller is responsible
808 /// for ensuring that the `InitGuard` does not outlive the slab that
809 /// contains the pointed slot. Failure to do so means this pointer may
810 /// dangle.
811 pub(crate) unsafe fn release(&mut self) -> bool {
812 self.release2(0)
813 }
814
815 /// Downgrades the guard to an immutable guard
816 ///
817 /// ## Safety
818 ///
819 /// This dereferences a raw pointer to the slot. The caller is responsible
820 /// for ensuring that the `InitGuard` does not outlive the slab that
821 /// contains the pointed slot. Failure to do so means this pointer may
822 /// dangle.
823 pub(crate) unsafe fn downgrade(&mut self) -> Guard<T, C> {
824 let _ = self.release2(RefCount::<C>::from_usize(1).pack(0));
825 Guard { slot: self.slot }
826 }
827
828 unsafe fn release2(&mut self, new_refs: usize) -> bool {
829 test_println!(
830 "InitGuard::release; curr_lifecycle={:?}; downgrading={}",
831 Lifecycle::<C>::from_packed(self.curr_lifecycle),
832 new_refs != 0,
833 );
834 if self.released {
835 test_println!("-> already released!");
836 return false;
837 }
838 self.released = true;
839 let mut curr_lifecycle = self.curr_lifecycle;
840 let slot = self.slot.as_ref();
841 let new_lifecycle = LifecycleGen::<C>::from_packed(self.curr_lifecycle)
842 .pack(Lifecycle::<C>::PRESENT.pack(new_refs));
843
844 match slot.lifecycle.compare_exchange(
845 curr_lifecycle,
846 new_lifecycle,
847 Ordering::AcqRel,
848 Ordering::Acquire,
849 ) {
850 Ok(_) => {
851 test_println!("--> advanced to PRESENT; done");
852 return false;
853 }
854 Err(actual) => {
855 test_println!(
856 "--> lifecycle changed; actual={:?}",
857 Lifecycle::<C>::from_packed(actual)
858 );
859 curr_lifecycle = actual;
860 }
861 }
862
863 // if the state was no longer the prior state, we are now responsible
864 // for releasing the slot.
865 loop {
866 let refs = RefCount::<C>::from_packed(curr_lifecycle);
867 let state = Lifecycle::<C>::from_packed(curr_lifecycle).state;
868
869 test_println!(
870 "-> InitGuard::release; lifecycle={:#x}; state={:?}; refs={:?};",
871 curr_lifecycle,
872 state,
873 refs,
874 );
875
876 debug_assert!(state == State::Marked || thread::panicking(), "state was not MARKED; someone else has removed the slot while we have exclusive access!\nactual={:?}", state);
877 debug_assert!(refs.value == 0 || thread::panicking(), "ref count was not 0; someone else has referenced the slot while we have exclusive access!\nactual={:?}", refs);
878 let new_lifecycle = self.generation().pack(State::Removing as usize);
879
880 match slot.lifecycle.compare_exchange(
881 curr_lifecycle,
882 new_lifecycle,
883 Ordering::AcqRel,
884 Ordering::Acquire,
885 ) {
886 Ok(_) => {
887 test_println!("-> InitGuard::RELEASE: done!");
888 return true;
889 }
890 Err(actual) => {
891 debug_assert!(thread::panicking(), "we should not have to retry this CAS!");
892 test_println!("-> InitGuard::release; retry, actual={:#x}", actual);
893 curr_lifecycle = actual;
894 }
895 }
896 }
897 }
898}
899
900// === helpers ===
901
902#[inline(always)]
903fn exponential_backoff(exp: &mut usize) {
904 /// Maximum exponent we can back off to.
905 const MAX_EXPONENT: usize = 8;
906
907 // Issue 2^exp pause instructions.
908 for _ in 0..(1 << *exp) {
909 hint::spin_loop();
910 }
911
912 if *exp >= MAX_EXPONENT {
913 // If we have reached the max backoff, also yield to the scheduler
914 // explicitly.
915 crate::sync::yield_now();
916 } else {
917 // Otherwise, increment the exponent.
918 *exp += 1;
919 }
920}
921