| 1 | use super::{Container, Rb, RbBase, RbRead, RbWrite, SharedStorage}; |
| 2 | use crate::{consumer::Consumer, producer::Producer}; |
| 3 | use core::{ |
| 4 | cell::Cell, |
| 5 | mem::{ManuallyDrop, MaybeUninit}, |
| 6 | num::NonZeroUsize, |
| 7 | ptr, |
| 8 | }; |
| 9 | |
| 10 | #[cfg (feature = "alloc" )] |
| 11 | use alloc::rc::Rc; |
| 12 | |
| 13 | /// Ring buffer for using in single thread. |
| 14 | /// |
| 15 | /// Does *not* implement [`Sync`]. And its [`Producer`] and [`Consumer`] do *not* implement [`Send`]. |
| 16 | /// |
| 17 | #[cfg_attr ( |
| 18 | feature = "std" , |
| 19 | doc = r##" |
| 20 | This code must fail to compile: |
| 21 | |
| 22 | ```compile_fail |
| 23 | use std::{thread, vec::Vec}; |
| 24 | use ringbuf::LocalRb; |
| 25 | |
| 26 | let (mut prod, mut cons) = LocalRb::<i32, Vec<_>>::new(256).split(); |
| 27 | thread::spawn(move || { |
| 28 | prod.push(123).unwrap(); |
| 29 | }) |
| 30 | .join(); |
| 31 | thread::spawn(move || { |
| 32 | assert_eq!(cons.pop().unwrap(), 123); |
| 33 | }) |
| 34 | .join(); |
| 35 | ``` |
| 36 | "## |
| 37 | )] |
| 38 | pub struct LocalRb<T, C: Container<T>> { |
| 39 | storage: SharedStorage<T, C>, |
| 40 | head: Cell<usize>, |
| 41 | tail: Cell<usize>, |
| 42 | } |
| 43 | |
| 44 | impl<T, C: Container<T>> RbBase<T> for LocalRb<T, C> { |
| 45 | #[inline ] |
| 46 | unsafe fn slices( |
| 47 | &self, |
| 48 | head: usize, |
| 49 | tail: usize, |
| 50 | ) -> (&mut [MaybeUninit<T>], &mut [MaybeUninit<T>]) { |
| 51 | self.storage.as_mut_slices(head, tail) |
| 52 | } |
| 53 | |
| 54 | #[inline ] |
| 55 | fn capacity_nonzero(&self) -> NonZeroUsize { |
| 56 | self.storage.len() |
| 57 | } |
| 58 | |
| 59 | #[inline ] |
| 60 | fn head(&self) -> usize { |
| 61 | self.head.get() |
| 62 | } |
| 63 | |
| 64 | #[inline ] |
| 65 | fn tail(&self) -> usize { |
| 66 | self.tail.get() |
| 67 | } |
| 68 | } |
| 69 | |
| 70 | impl<T, C: Container<T>> RbRead<T> for LocalRb<T, C> { |
| 71 | #[inline ] |
| 72 | unsafe fn set_head(&self, value: usize) { |
| 73 | self.head.set(val:value); |
| 74 | } |
| 75 | } |
| 76 | |
| 77 | impl<T, C: Container<T>> RbWrite<T> for LocalRb<T, C> { |
| 78 | #[inline ] |
| 79 | unsafe fn set_tail(&self, value: usize) { |
| 80 | self.tail.set(val:value); |
| 81 | } |
| 82 | } |
| 83 | |
| 84 | impl<T, C: Container<T>> Rb<T> for LocalRb<T, C> {} |
| 85 | |
| 86 | impl<T, C: Container<T>> Drop for LocalRb<T, C> { |
| 87 | fn drop(&mut self) { |
| 88 | self.clear(); |
| 89 | } |
| 90 | } |
| 91 | |
| 92 | impl<T, C: Container<T>> LocalRb<T, C> { |
| 93 | /// Constructs ring buffer from container and counters. |
| 94 | /// |
| 95 | /// # Safety |
| 96 | /// |
| 97 | /// The items in container inside `head..tail` range must be initialized, items outside this range must be uninitialized. |
| 98 | /// `head` and `tail` values must be valid (see [`RbBase`](`crate::ring_buffer::RbBase`)). |
| 99 | pub unsafe fn from_raw_parts(container: C, head: usize, tail: usize) -> Self { |
| 100 | Self { |
| 101 | storage: SharedStorage::new(container), |
| 102 | head: Cell::new(head), |
| 103 | tail: Cell::new(tail), |
| 104 | } |
| 105 | } |
| 106 | |
| 107 | /// Destructures ring buffer into underlying container and `head` and `tail` counters. |
| 108 | /// |
| 109 | /// # Safety |
| 110 | /// |
| 111 | /// Initialized contents of the container must be properly dropped. |
| 112 | pub unsafe fn into_raw_parts(self) -> (C, usize, usize) { |
| 113 | let (head, tail) = (self.head(), self.tail()); |
| 114 | let self_ = ManuallyDrop::new(self); |
| 115 | (ptr::read(&self_.storage).into_inner(), head, tail) |
| 116 | } |
| 117 | |
| 118 | /// Splits ring buffer into producer and consumer. |
| 119 | /// |
| 120 | /// This method consumes the ring buffer and puts it on heap in [`Rc`]. If you don't want to use heap the see [`Self::split_ref`]. |
| 121 | #[cfg (feature = "alloc" )] |
| 122 | pub fn split(self) -> (Producer<T, Rc<Self>>, Consumer<T, Rc<Self>>) |
| 123 | where |
| 124 | Self: Sized, |
| 125 | { |
| 126 | let rc = Rc::new(self); |
| 127 | unsafe { (Producer::new(rc.clone()), Consumer::new(rc)) } |
| 128 | } |
| 129 | |
| 130 | /// Splits ring buffer into producer and consumer without using the heap. |
| 131 | /// |
| 132 | /// In this case producer and consumer stores a reference to the ring buffer, so you also need to store the buffer somewhere. |
| 133 | pub fn split_ref(&mut self) -> (Producer<T, &Self>, Consumer<T, &Self>) |
| 134 | where |
| 135 | Self: Sized, |
| 136 | { |
| 137 | unsafe { (Producer::new(self), Consumer::new(self)) } |
| 138 | } |
| 139 | } |
| 140 | |