1use std::sync::atomic::{AtomicUsize, Ordering};
2use std::sync::mpsc::channel;
3use std::sync::{Arc, TryLockError};
4use std::thread;
5
6use crossbeam_utils::sync::ShardedLock;
7use rand::Rng;
8
9#[derive(Eq, PartialEq, Debug)]
10struct NonCopy(i32);
11
12#[test]
13fn smoke() {
14 let l = ShardedLock::new(());
15 drop(l.read().unwrap());
16 drop(l.write().unwrap());
17 drop((l.read().unwrap(), l.read().unwrap()));
18 drop(l.write().unwrap());
19}
20
21#[test]
22fn frob() {
23 const N: u32 = 10;
24 #[cfg(miri)]
25 const M: usize = 50;
26 #[cfg(not(miri))]
27 const M: usize = 1000;
28
29 let r = Arc::new(ShardedLock::new(()));
30
31 let (tx, rx) = channel::<()>();
32 for _ in 0..N {
33 let tx = tx.clone();
34 let r = r.clone();
35 thread::spawn(move || {
36 let mut rng = rand::thread_rng();
37 for _ in 0..M {
38 if rng.gen_bool(1.0 / (N as f64)) {
39 drop(r.write().unwrap());
40 } else {
41 drop(r.read().unwrap());
42 }
43 }
44 drop(tx);
45 });
46 }
47 drop(tx);
48 let _ = rx.recv();
49}
50
51#[test]
52fn arc_poison_wr() {
53 let arc = Arc::new(ShardedLock::new(1));
54 let arc2 = arc.clone();
55 let _: Result<(), _> = thread::spawn(move || {
56 let _lock = arc2.write().unwrap();
57 panic!();
58 })
59 .join();
60 assert!(arc.read().is_err());
61}
62
63#[test]
64fn arc_poison_ww() {
65 let arc = Arc::new(ShardedLock::new(1));
66 assert!(!arc.is_poisoned());
67 let arc2 = arc.clone();
68 let _: Result<(), _> = thread::spawn(move || {
69 let _lock = arc2.write().unwrap();
70 panic!();
71 })
72 .join();
73 assert!(arc.write().is_err());
74 assert!(arc.is_poisoned());
75}
76
77#[test]
78fn arc_no_poison_rr() {
79 let arc = Arc::new(ShardedLock::new(1));
80 let arc2 = arc.clone();
81 let _: Result<(), _> = thread::spawn(move || {
82 let _lock = arc2.read().unwrap();
83 panic!();
84 })
85 .join();
86 let lock = arc.read().unwrap();
87 assert_eq!(*lock, 1);
88}
89#[test]
90fn arc_no_poison_sl() {
91 let arc = Arc::new(ShardedLock::new(1));
92 let arc2 = arc.clone();
93 let _: Result<(), _> = thread::spawn(move || {
94 let _lock = arc2.read().unwrap();
95 panic!()
96 })
97 .join();
98 let lock = arc.write().unwrap();
99 assert_eq!(*lock, 1);
100}
101
102#[test]
103fn arc() {
104 let arc = Arc::new(ShardedLock::new(0));
105 let arc2 = arc.clone();
106 let (tx, rx) = channel();
107
108 thread::spawn(move || {
109 let mut lock = arc2.write().unwrap();
110 for _ in 0..10 {
111 let tmp = *lock;
112 *lock = -1;
113 thread::yield_now();
114 *lock = tmp + 1;
115 }
116 tx.send(()).unwrap();
117 });
118
119 // Readers try to catch the writer in the act
120 let mut children = Vec::new();
121 for _ in 0..5 {
122 let arc3 = arc.clone();
123 children.push(thread::spawn(move || {
124 let lock = arc3.read().unwrap();
125 assert!(*lock >= 0);
126 }));
127 }
128
129 // Wait for children to pass their asserts
130 for r in children {
131 assert!(r.join().is_ok());
132 }
133
134 // Wait for writer to finish
135 rx.recv().unwrap();
136 let lock = arc.read().unwrap();
137 assert_eq!(*lock, 10);
138}
139
140#[test]
141fn arc_access_in_unwind() {
142 let arc = Arc::new(ShardedLock::new(1));
143 let arc2 = arc.clone();
144 let _ = thread::spawn(move || {
145 struct Unwinder {
146 i: Arc<ShardedLock<isize>>,
147 }
148 impl Drop for Unwinder {
149 fn drop(&mut self) {
150 let mut lock = self.i.write().unwrap();
151 *lock += 1;
152 }
153 }
154 let _u = Unwinder { i: arc2 };
155 panic!();
156 })
157 .join();
158 let lock = arc.read().unwrap();
159 assert_eq!(*lock, 2);
160}
161
162#[test]
163fn unsized_type() {
164 let sl: &ShardedLock<[i32]> = &ShardedLock::new([1, 2, 3]);
165 {
166 let b = &mut *sl.write().unwrap();
167 b[0] = 4;
168 b[2] = 5;
169 }
170 let comp: &[i32] = &[4, 2, 5];
171 assert_eq!(&*sl.read().unwrap(), comp);
172}
173
174#[test]
175fn try_write() {
176 let lock = ShardedLock::new(0isize);
177 let read_guard = lock.read().unwrap();
178
179 let write_result = lock.try_write();
180 match write_result {
181 Err(TryLockError::WouldBlock) => (),
182 Ok(_) => panic!("try_write should not succeed while read_guard is in scope"),
183 Err(_) => panic!("unexpected error"),
184 }
185
186 drop(read_guard);
187}
188
189#[test]
190fn test_into_inner() {
191 let m = ShardedLock::new(NonCopy(10));
192 assert_eq!(m.into_inner().unwrap(), NonCopy(10));
193}
194
195#[test]
196fn test_into_inner_drop() {
197 struct Foo(Arc<AtomicUsize>);
198 impl Drop for Foo {
199 fn drop(&mut self) {
200 self.0.fetch_add(1, Ordering::SeqCst);
201 }
202 }
203 let num_drops = Arc::new(AtomicUsize::new(0));
204 let m = ShardedLock::new(Foo(num_drops.clone()));
205 assert_eq!(num_drops.load(Ordering::SeqCst), 0);
206 {
207 let _inner = m.into_inner().unwrap();
208 assert_eq!(num_drops.load(Ordering::SeqCst), 0);
209 }
210 assert_eq!(num_drops.load(Ordering::SeqCst), 1);
211}
212
213#[test]
214fn test_into_inner_poison() {
215 let m = Arc::new(ShardedLock::new(NonCopy(10)));
216 let m2 = m.clone();
217 let _ = thread::spawn(move || {
218 let _lock = m2.write().unwrap();
219 panic!("test panic in inner thread to poison ShardedLock");
220 })
221 .join();
222
223 assert!(m.is_poisoned());
224 match Arc::try_unwrap(m).unwrap().into_inner() {
225 Err(e) => assert_eq!(e.into_inner(), NonCopy(10)),
226 Ok(x) => panic!("into_inner of poisoned ShardedLock is Ok: {:?}", x),
227 }
228}
229
230#[test]
231fn test_get_mut() {
232 let mut m = ShardedLock::new(NonCopy(10));
233 *m.get_mut().unwrap() = NonCopy(20);
234 assert_eq!(m.into_inner().unwrap(), NonCopy(20));
235}
236
237#[test]
238fn test_get_mut_poison() {
239 let m = Arc::new(ShardedLock::new(NonCopy(10)));
240 let m2 = m.clone();
241 let _ = thread::spawn(move || {
242 let _lock = m2.write().unwrap();
243 panic!("test panic in inner thread to poison ShardedLock");
244 })
245 .join();
246
247 assert!(m.is_poisoned());
248 match Arc::try_unwrap(m).unwrap().get_mut() {
249 Err(e) => assert_eq!(*e.into_inner(), NonCopy(10)),
250 Ok(x) => panic!("get_mut of poisoned ShardedLock is Ok: {:?}", x),
251 }
252}
253