1 | //! **This module is experimental** |
2 | //! |
3 | //! This module provides threadsafe versions of FrozenMap and FrozenVec, |
4 | //! ideal for use as a cache. |
5 | //! |
6 | //! These lock internally, however locks only last as long as the method calls |
7 | //! |
8 | |
9 | use stable_deref_trait::StableDeref; |
10 | use std::alloc::Layout; |
11 | use std::borrow::Borrow; |
12 | use std::collections::BTreeMap; |
13 | use std::collections::HashMap; |
14 | use std::hash::Hash; |
15 | use std::iter::{FromIterator, IntoIterator}; |
16 | use std::ops::Index; |
17 | |
18 | use std::sync::atomic::AtomicPtr; |
19 | use std::sync::atomic::AtomicUsize; |
20 | use std::sync::atomic::Ordering; |
21 | use std::sync::RwLock; |
22 | |
23 | /// Append-only threadsafe version of `std::collections::HashMap` where |
24 | /// insertion does not require mutable access |
25 | pub struct FrozenMap<K, V> { |
26 | map: RwLock<HashMap<K, V>>, |
27 | } |
28 | |
29 | impl<K, V> Default for FrozenMap<K, V> { |
30 | fn default() -> Self { |
31 | Self { |
32 | map: Default::default(), |
33 | } |
34 | } |
35 | } |
36 | |
37 | impl<K: Eq + Hash, V: StableDeref> FrozenMap<K, V> { |
38 | // these should never return &K or &V |
39 | // these should never delete any entries |
40 | |
41 | pub fn new() -> Self { |
42 | Self::default() |
43 | } |
44 | |
45 | /// If the key exists in the map, returns a reference |
46 | /// to the corresponding value, otherwise inserts a |
47 | /// new entry in the map for that key and returns a |
48 | /// reference to the given value. |
49 | /// |
50 | /// Existing values are never overwritten. |
51 | /// |
52 | /// The key may be any borrowed form of the map's key type, but |
53 | /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for |
54 | /// the key type. |
55 | /// |
56 | /// # Examples |
57 | /// |
58 | /// ``` |
59 | /// use elsa::sync::FrozenMap; |
60 | /// |
61 | /// let map = FrozenMap::new(); |
62 | /// assert_eq!(map.insert(1, Box::new("a" )), &"a" ); |
63 | /// assert_eq!(map.insert(1, Box::new("b" )), &"a" ); |
64 | /// ``` |
65 | pub fn insert(&self, k: K, v: V) -> &V::Target { |
66 | let mut map = self.map.write().unwrap(); |
67 | let ret = unsafe { |
68 | let inserted = &**map.entry(k).or_insert(v); |
69 | &*(inserted as *const _) |
70 | }; |
71 | ret |
72 | } |
73 | |
74 | /// If the key exists in the map, returns a reference to the corresponding |
75 | /// value, otherwise inserts a new entry in the map for that key and the |
76 | /// value returned by the creation function, and returns a reference to the |
77 | /// generated value. |
78 | /// |
79 | /// Existing values are never overwritten. |
80 | /// |
81 | /// The key may be any borrowed form of the map's key type, but [`Hash`] and |
82 | /// [`Eq`] on the borrowed form *must* match those for the key type. |
83 | /// |
84 | /// **Note** that the write lock is held for the duration of this function’s |
85 | /// execution, even while the value creation function is executing (if |
86 | /// needed). This will block any concurrent `get` or `insert` calls. |
87 | /// |
88 | /// # Examples |
89 | /// |
90 | /// ``` |
91 | /// use elsa::sync::FrozenMap; |
92 | /// |
93 | /// let map = FrozenMap::new(); |
94 | /// assert_eq!(map.insert_with(1, || Box::new("a" )), &"a" ); |
95 | /// assert_eq!(map.insert_with(1, || unreachable!()), &"a" ); |
96 | /// ``` |
97 | pub fn insert_with(&self, k: K, f: impl FnOnce() -> V) -> &V::Target { |
98 | let mut map = self.map.write().unwrap(); |
99 | let ret = unsafe { |
100 | let inserted = &**map.entry(k).or_insert_with(f); |
101 | &*(inserted as *const _) |
102 | }; |
103 | ret |
104 | } |
105 | |
106 | /// If the key exists in the map, returns a reference to the corresponding |
107 | /// value, otherwise inserts a new entry in the map for that key and the |
108 | /// value returned by the creation function, and returns a reference to the |
109 | /// generated value. |
110 | /// |
111 | /// Existing values are never overwritten. |
112 | /// |
113 | /// The key may be any borrowed form of the map's key type, but [`Hash`] and |
114 | /// [`Eq`] on the borrowed form *must* match those for the key type. |
115 | /// |
116 | /// **Note** that the write lock is held for the duration of this function’s |
117 | /// execution, even while the value creation function is executing (if |
118 | /// needed). This will block any concurrent `get` or `insert` calls. |
119 | /// |
120 | /// # Examples |
121 | /// |
122 | /// ``` |
123 | /// use elsa::sync::FrozenMap; |
124 | /// |
125 | /// let map = FrozenMap::new(); |
126 | /// assert_eq!(map.insert_with_key(1, |_| Box::new("a" )), &"a" ); |
127 | /// assert_eq!(map.insert_with_key(1, |_| unreachable!()), &"a" ); |
128 | /// ``` |
129 | pub fn insert_with_key(&self, k: K, f: impl FnOnce(&K) -> V) -> &V::Target { |
130 | let mut map = self.map.write().unwrap(); |
131 | let ret = unsafe { |
132 | let inserted = &**map.entry(k).or_insert_with_key(f); |
133 | &*(inserted as *const _) |
134 | }; |
135 | ret |
136 | } |
137 | |
138 | /// Returns a reference to the value corresponding to the key. |
139 | /// |
140 | /// The key may be any borrowed form of the map's key type, but |
141 | /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for |
142 | /// the key type. |
143 | /// |
144 | /// # Examples |
145 | /// |
146 | /// ``` |
147 | /// use elsa::sync::FrozenMap; |
148 | /// |
149 | /// let map = FrozenMap::new(); |
150 | /// map.insert(1, Box::new("a" )); |
151 | /// assert_eq!(map.get(&1), Some(&"a" )); |
152 | /// assert_eq!(map.get(&2), None); |
153 | /// ``` |
154 | pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> |
155 | where |
156 | K: Borrow<Q>, |
157 | Q: Hash + Eq, |
158 | { |
159 | let map = self.map.read().unwrap(); |
160 | let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) }; |
161 | ret |
162 | } |
163 | |
164 | /// Applies a function to the owner of the value corresponding to the key (if any). |
165 | /// |
166 | /// The key may be any borrowed form of the map's key type, but |
167 | /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for |
168 | /// the key type. |
169 | /// |
170 | /// # Examples |
171 | /// |
172 | /// ``` |
173 | /// use elsa::sync::FrozenMap; |
174 | /// |
175 | /// let map = FrozenMap::new(); |
176 | /// map.insert(1, Box::new("a" )); |
177 | /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a" ))); |
178 | /// assert_eq!(map.map_get(&2, Clone::clone), None); |
179 | /// ``` |
180 | pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> |
181 | where |
182 | K: Borrow<Q>, |
183 | Q: Hash + Eq, |
184 | F: FnOnce(&V) -> T, |
185 | { |
186 | let map = self.map.read().unwrap(); |
187 | let ret = map.get(k).map(f); |
188 | ret |
189 | } |
190 | |
191 | /// # Examples |
192 | /// |
193 | /// ``` |
194 | /// use elsa::sync::FrozenMap; |
195 | /// |
196 | /// let map = FrozenMap::new(); |
197 | /// assert_eq!(map.len(), 0); |
198 | /// map.insert(1, Box::new("a" )); |
199 | /// assert_eq!(map.len(), 1); |
200 | /// ``` |
201 | pub fn len(&self) -> usize { |
202 | let map = self.map.read().unwrap(); |
203 | map.len() |
204 | } |
205 | |
206 | /// # Examples |
207 | /// |
208 | /// ``` |
209 | /// use elsa::sync::FrozenMap; |
210 | /// |
211 | /// let map = FrozenMap::new(); |
212 | /// assert_eq!(map.is_empty(), true); |
213 | /// map.insert(1, Box::new("a" )); |
214 | /// assert_eq!(map.is_empty(), false); |
215 | /// ``` |
216 | pub fn is_empty(&self) -> bool { |
217 | let map = self.map.read().unwrap(); |
218 | map.is_empty() |
219 | } |
220 | |
221 | // TODO add more |
222 | } |
223 | |
224 | /// Append-only threadsafe version of `std::vec::Vec` where |
225 | /// insertion does not require mutable access |
226 | pub struct FrozenVec<T> { |
227 | vec: RwLock<Vec<T>>, |
228 | } |
229 | |
230 | impl<T> Default for FrozenVec<T> { |
231 | fn default() -> Self { |
232 | Self { |
233 | vec: Default::default(), |
234 | } |
235 | } |
236 | } |
237 | |
238 | impl<T: StableDeref> FrozenVec<T> { |
239 | pub fn new() -> Self { |
240 | Default::default() |
241 | } |
242 | |
243 | // these should never return &T |
244 | // these should never delete any entries |
245 | |
246 | pub fn push(&self, val: T) { |
247 | let mut vec = self.vec.write().unwrap(); |
248 | vec.push(val); |
249 | } |
250 | |
251 | /// Push, immediately getting a reference to the element |
252 | pub fn push_get(&self, val: T) -> &T::Target { |
253 | let mut vec = self.vec.write().unwrap(); |
254 | vec.push(val); |
255 | unsafe { &*(&**vec.get_unchecked(vec.len() - 1) as *const T::Target) } |
256 | } |
257 | |
258 | /// Push, immediately getting a an index of the element |
259 | /// |
260 | /// Index can then be used with the `get` method |
261 | /// |
262 | /// # Examples |
263 | /// |
264 | /// ``` |
265 | /// use elsa::sync::FrozenVec; |
266 | /// |
267 | /// let map = FrozenVec::new(); |
268 | /// let idx = map.push_get_index(String::from("a" )); |
269 | /// assert_eq!(map.get(idx), Some("a" )); |
270 | /// assert_eq!(idx, 0); |
271 | /// assert_eq!(map.push_get_index(String::from("b" )), 1); |
272 | /// ``` |
273 | pub fn push_get_index(&self, val: T) -> usize { |
274 | let mut vec = self.vec.write().unwrap(); |
275 | let index = vec.len(); |
276 | vec.push(val); |
277 | return index; |
278 | } |
279 | |
280 | pub fn get(&self, index: usize) -> Option<&T::Target> { |
281 | let vec = self.vec.read().unwrap(); |
282 | unsafe { vec.get(index).map(|x| &*(&**x as *const T::Target)) } |
283 | } |
284 | |
285 | // TODO add more |
286 | } |
287 | |
288 | /// Append-only threadsafe version of `std::vec::Vec` where |
289 | /// insertion does not require mutable access. |
290 | /// Does not have locks, only allows `Copy` types and will |
291 | /// spinlock on contention. The spinlocks are really rare as |
292 | /// they only happen on reallocation due to a push going over |
293 | /// the capacity. |
294 | pub struct LockFreeFrozenVec<T: Copy> { |
295 | data: AtomicPtr<T>, |
296 | len: AtomicUsize, |
297 | cap: AtomicUsize, |
298 | } |
299 | |
300 | impl<T: Copy> Drop for LockFreeFrozenVec<T> { |
301 | fn drop(&mut self) { |
302 | let cap: usize = *self.cap.get_mut(); |
303 | let layout: Layout = Self::layout(cap); |
304 | if cap != 0 { |
305 | unsafe { |
306 | std::alloc::dealloc((*self.data.get_mut()).cast(), layout); |
307 | } |
308 | } |
309 | } |
310 | } |
311 | |
312 | impl<T: Copy> Default for LockFreeFrozenVec<T> { |
313 | fn default() -> Self { |
314 | Self { |
315 | // FIXME: use `std::ptr::invalid_mut()` once that is stable. |
316 | data: AtomicPtr::new(std::mem::align_of::<T>() as *mut T), |
317 | len: AtomicUsize::new(0), |
318 | cap: AtomicUsize::new(0), |
319 | } |
320 | } |
321 | } |
322 | |
323 | impl<T: Copy> LockFreeFrozenVec<T> { |
324 | pub fn new() -> Self { |
325 | Default::default() |
326 | } |
327 | |
328 | pub fn with_capacity(cap: usize) -> Self { |
329 | Self { |
330 | data: AtomicPtr::new(unsafe { std::alloc::alloc(Self::layout(cap)) }.cast()), |
331 | len: AtomicUsize::new(0), |
332 | cap: AtomicUsize::new(cap), |
333 | } |
334 | } |
335 | |
336 | fn lock<U>(&self, f: impl FnOnce(&mut *mut T) -> U) -> U { |
337 | let mut ptr; |
338 | loop { |
339 | ptr = self.data.swap(std::ptr::null_mut(), Ordering::Acquire); |
340 | if !ptr.is_null() { |
341 | // Wheeeee spinlock |
342 | break; |
343 | } |
344 | } |
345 | |
346 | let ret = f(&mut ptr); |
347 | self.data.store(ptr, Ordering::Release); |
348 | ret |
349 | } |
350 | |
351 | fn layout(cap: usize) -> Layout { |
352 | let num_bytes = std::mem::size_of::<T>() * cap; |
353 | let align = std::mem::align_of::<T>(); |
354 | Layout::from_size_align(num_bytes, align).unwrap() |
355 | } |
356 | |
357 | // these should never return &T |
358 | // these should never delete any entries |
359 | |
360 | const NOT_ZST: () = if std::mem::size_of::<T>() == 0 { |
361 | panic!("`LockFreeFrozenVec` cannot be used with ZSTs" ); |
362 | }; |
363 | |
364 | pub fn push(&self, val: T) -> usize { |
365 | // This statement actually does something: it evaluates a constant. |
366 | #[allow (path_statements)] |
367 | { |
368 | Self::NOT_ZST |
369 | } |
370 | self.lock(|ptr| { |
371 | // These values must be consistent with the pointer we got. |
372 | let len = self.len.load(Ordering::Acquire); |
373 | let cap = self.cap.load(Ordering::Acquire); |
374 | if len >= cap { |
375 | if cap == 0 { |
376 | // No memory allocated yet |
377 | let layout = Self::layout(128); |
378 | // SAFETY: `LockFreeFrozenVec` statically rejects zsts |
379 | unsafe { |
380 | *ptr = std::alloc::alloc(layout).cast::<T>(); |
381 | } |
382 | // This is written before the end of the `lock` closure, so no one will observe this |
383 | // until the data pointer has been updated anyway. |
384 | self.cap.store(128, Ordering::Release); |
385 | } else { |
386 | // Out of memory, realloc with double the capacity |
387 | let layout = Self::layout(cap); |
388 | let new_size = layout.size() * 2; |
389 | // SAFETY: `LockFreeFrozenVec` statically rejects zsts and the input `ptr` has always been |
390 | // allocated at the size stated in `cap`. |
391 | unsafe { |
392 | *ptr = std::alloc::realloc((*ptr).cast(), layout, new_size).cast::<T>(); |
393 | } |
394 | // This is written before the end of the `lock` closure, so no one will observe this |
395 | // until the data pointer has been updated anyway. |
396 | self.cap.store(cap * 2, Ordering::Release); |
397 | } |
398 | assert!(!ptr.is_null()); |
399 | } |
400 | unsafe { |
401 | ptr.add(len).write(val); |
402 | } |
403 | // This is written before updating the data pointer. Other `push` calls cannot observe this, |
404 | // because they are blocked on aquiring the data pointer before they ever read the `len`. |
405 | // `get` may read the length before actually aquiring the data pointer lock, but that is fine, |
406 | // as once it is able to aquire the lock, there will be actually the right number of elements |
407 | // stored. |
408 | self.len.store(len + 1, Ordering::Release); |
409 | len |
410 | }) |
411 | } |
412 | |
413 | pub fn get(&self, index: usize) -> Option<T> { |
414 | // The length can only grow, so just doing the length check |
415 | // independently of the `lock` and read is fine. Worst case we |
416 | // read an old length value and end up returning `None` even if |
417 | // another thread already inserted the value. |
418 | let len = self.len.load(Ordering::Relaxed); |
419 | if index >= len { |
420 | return None; |
421 | } |
422 | self.lock(|ptr| Some(unsafe { ptr.add(index).read() })) |
423 | } |
424 | } |
425 | |
426 | #[test ] |
427 | fn test_non_lockfree() { |
428 | #[derive (Copy, Clone, Debug, PartialEq, Eq)] |
429 | struct Moo(i32); |
430 | |
431 | for vec in [ |
432 | LockFreeFrozenVec::new(), |
433 | LockFreeFrozenVec::with_capacity(1), |
434 | LockFreeFrozenVec::with_capacity(2), |
435 | LockFreeFrozenVec::with_capacity(1000), |
436 | ] { |
437 | assert_eq!(vec.get(1), None); |
438 | |
439 | vec.push(Moo(1)); |
440 | let i = vec.push(Moo(2)); |
441 | vec.push(Moo(3)); |
442 | |
443 | assert_eq!(vec.get(i), Some(Moo(2))); |
444 | |
445 | std::thread::scope(|s| { |
446 | s.spawn(|| { |
447 | for i in 0..1000 { |
448 | vec.push(Moo(i)); |
449 | } |
450 | }); |
451 | s.spawn(|| { |
452 | for i in 0..1000 { |
453 | vec.push(Moo(i)); |
454 | } |
455 | }); |
456 | for i in 0..2000 { |
457 | while vec.get(i).is_none() {} |
458 | } |
459 | }); |
460 | } |
461 | // Test dropping empty vecs |
462 | for _ in [ |
463 | LockFreeFrozenVec::<()>::new(), |
464 | LockFreeFrozenVec::with_capacity(1), |
465 | LockFreeFrozenVec::with_capacity(2), |
466 | LockFreeFrozenVec::with_capacity(1000), |
467 | ] {} |
468 | } |
469 | |
470 | /// Append-only threadsafe version of `std::collections::BTreeMap` where |
471 | /// insertion does not require mutable access |
472 | #[derive (Debug)] |
473 | pub struct FrozenBTreeMap<K, V>(RwLock<BTreeMap<K, V>>); |
474 | |
475 | impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> { |
476 | pub fn new() -> Self { |
477 | Self(RwLock::new(BTreeMap::new())) |
478 | } |
479 | |
480 | // these should never return &K or &V |
481 | // these should never delete any entries |
482 | |
483 | /// Returns a reference to the value corresponding to the key. |
484 | /// |
485 | /// The key may be any borrowed form of the map's key type, but |
486 | /// [`Ord`] on the borrowed form *must* match those for |
487 | /// the key type. |
488 | /// |
489 | /// # Examples |
490 | /// |
491 | /// ``` |
492 | /// use elsa::sync::FrozenBTreeMap; |
493 | /// |
494 | /// let map = FrozenBTreeMap::new(); |
495 | /// map.insert(1, Box::new("a" )); |
496 | /// assert_eq!(map.get(&1), Some(&"a" )); |
497 | /// assert_eq!(map.get(&2), None); |
498 | /// ``` |
499 | pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> |
500 | where |
501 | K: Borrow<Q>, |
502 | Q: Ord, |
503 | { |
504 | let map = self.0.read().unwrap(); |
505 | let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) }; |
506 | ret |
507 | } |
508 | |
509 | /// Insert a new value into the map. Does nothing if the key is already occupied. |
510 | /// |
511 | /// # Examples |
512 | /// |
513 | /// ``` |
514 | /// use elsa::sync::FrozenBTreeMap; |
515 | /// |
516 | /// let map = FrozenBTreeMap::new(); |
517 | /// map.insert(1, Box::new("a" )); |
518 | /// assert_eq!(map.get(&1), Some(&"a" )); |
519 | /// ``` |
520 | pub fn insert(&self, k: K, v: V) -> &V::Target { |
521 | let mut map = self.0.write().unwrap(); |
522 | let ret = unsafe { |
523 | let inserted = &**map.entry(k).or_insert(v); |
524 | &*(inserted as *const _) |
525 | }; |
526 | ret |
527 | } |
528 | |
529 | /// Applies a function to the owner of the value corresponding to the key (if any). |
530 | /// |
531 | /// The key may be any borrowed form of the map's key type, but |
532 | /// [`Ord`] on the borrowed form *must* match those for |
533 | /// the key type. |
534 | /// |
535 | /// # Examples |
536 | /// |
537 | /// ``` |
538 | /// use elsa::sync::FrozenBTreeMap; |
539 | /// |
540 | /// let map = FrozenBTreeMap::new(); |
541 | /// map.insert(1, Box::new("a" )); |
542 | /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a" ))); |
543 | /// assert_eq!(map.map_get(&2, Clone::clone), None); |
544 | /// ``` |
545 | pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> |
546 | where |
547 | K: Borrow<Q>, |
548 | Q: Ord, |
549 | F: FnOnce(&V) -> T, |
550 | { |
551 | let map = self.0.read().unwrap(); |
552 | let ret = map.get(k).map(f); |
553 | ret |
554 | } |
555 | |
556 | /// # Examples |
557 | /// |
558 | /// ``` |
559 | /// use elsa::sync::FrozenBTreeMap; |
560 | /// |
561 | /// let map = FrozenBTreeMap::new(); |
562 | /// assert_eq!(map.len(), 0); |
563 | /// map.insert(1, Box::new("a" )); |
564 | /// assert_eq!(map.len(), 1); |
565 | /// ``` |
566 | pub fn len(&self) -> usize { |
567 | let map = self.0.read().unwrap(); |
568 | map.len() |
569 | } |
570 | |
571 | /// # Examples |
572 | /// |
573 | /// ``` |
574 | /// use elsa::sync::FrozenBTreeMap; |
575 | /// |
576 | /// let map = FrozenBTreeMap::new(); |
577 | /// assert_eq!(map.is_empty(), true); |
578 | /// map.insert(1, Box::new("a" )); |
579 | /// assert_eq!(map.is_empty(), false); |
580 | /// ``` |
581 | pub fn is_empty(&self) -> bool { |
582 | let map = self.0.read().unwrap(); |
583 | map.is_empty() |
584 | } |
585 | } |
586 | |
587 | impl<K: Clone + Ord, V: StableDeref> From<BTreeMap<K, V>> for FrozenBTreeMap<K, V> { |
588 | fn from(map: BTreeMap<K, V>) -> Self { |
589 | Self(RwLock::new(map)) |
590 | } |
591 | } |
592 | |
593 | impl<Q: ?Sized, K, V> Index<&Q> for FrozenBTreeMap<K, V> |
594 | where |
595 | Q: Ord, |
596 | K: Clone + Ord + Borrow<Q>, |
597 | V: StableDeref, |
598 | { |
599 | type Output = V::Target; |
600 | |
601 | /// # Examples |
602 | /// |
603 | /// ``` |
604 | /// use elsa::sync::FrozenBTreeMap; |
605 | /// |
606 | /// let map = FrozenBTreeMap::new(); |
607 | /// map.insert(1, Box::new("a" )); |
608 | /// assert_eq!(map[&1], "a" ); |
609 | /// ``` |
610 | fn index(&self, idx: &Q) -> &V::Target { |
611 | self.get(idx) |
612 | .expect(msg:"attempted to index FrozenBTreeMap with unknown key" ) |
613 | } |
614 | } |
615 | |
616 | impl<K: Clone + Ord, V: StableDeref> FromIterator<(K, V)> for FrozenBTreeMap<K, V> { |
617 | fn from_iter<T>(iter: T) -> Self |
618 | where |
619 | T: IntoIterator<Item = (K, V)>, |
620 | { |
621 | let map: BTreeMap<_, _> = iter.into_iter().collect(); |
622 | map.into() |
623 | } |
624 | } |
625 | |
626 | impl<K: Clone + Ord, V: StableDeref> Default for FrozenBTreeMap<K, V> { |
627 | fn default() -> Self { |
628 | Self::new() |
629 | } |
630 | } |
631 | |