1 | // MIT License |
2 | |
3 | // Copyright (c) 2016 Jerome Froelich |
4 | |
5 | // Permission is hereby granted, free of charge, to any person obtaining a copy |
6 | // of this software and associated documentation files (the "Software"), to deal |
7 | // in the Software without restriction, including without limitation the rights |
8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
9 | // copies of the Software, and to permit persons to whom the Software is |
10 | // furnished to do so, subject to the following conditions: |
11 | |
12 | // The above copyright notice and this permission notice shall be included in all |
13 | // copies or substantial portions of the Software. |
14 | |
15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
21 | // SOFTWARE. |
22 | |
23 | //! An implementation of a LRU cache. The cache supports `get`, `get_mut`, `put`, |
24 | //! and `pop` operations, all of which are O(1). This crate was heavily influenced |
25 | //! by the [LRU Cache implementation in an earlier version of Rust's std::collections crate](https://doc.rust-lang.org/0.12.0/std/collections/lru_cache/struct.LruCache.html). |
26 | //! |
27 | //! ## Example |
28 | //! |
29 | //! ```rust |
30 | //! extern crate lru; |
31 | //! |
32 | //! use lru::LruCache; |
33 | //! use std::num::NonZeroUsize; |
34 | //! |
35 | //! fn main() { |
36 | //! let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
37 | //! cache.put("apple" , 3); |
38 | //! cache.put("banana" , 2); |
39 | //! |
40 | //! assert_eq!(*cache.get(&"apple" ).unwrap(), 3); |
41 | //! assert_eq!(*cache.get(&"banana" ).unwrap(), 2); |
42 | //! assert!(cache.get(&"pear" ).is_none()); |
43 | //! |
44 | //! assert_eq!(cache.put("banana" , 4), Some(2)); |
45 | //! assert_eq!(cache.put("pear" , 5), None); |
46 | //! |
47 | //! assert_eq!(*cache.get(&"pear" ).unwrap(), 5); |
48 | //! assert_eq!(*cache.get(&"banana" ).unwrap(), 4); |
49 | //! assert!(cache.get(&"apple" ).is_none()); |
50 | //! |
51 | //! { |
52 | //! let v = cache.get_mut(&"banana" ).unwrap(); |
53 | //! *v = 6; |
54 | //! } |
55 | //! |
56 | //! assert_eq!(*cache.get(&"banana" ).unwrap(), 6); |
57 | //! } |
58 | //! ``` |
59 | |
60 | #![no_std ] |
61 | |
62 | #[cfg (feature = "hashbrown" )] |
63 | extern crate hashbrown; |
64 | |
65 | #[cfg (test)] |
66 | extern crate scoped_threadpool; |
67 | |
68 | use alloc::borrow::Borrow; |
69 | use alloc::boxed::Box; |
70 | use core::fmt; |
71 | use core::hash::{BuildHasher, Hash, Hasher}; |
72 | use core::iter::FusedIterator; |
73 | use core::marker::PhantomData; |
74 | use core::mem; |
75 | use core::num::NonZeroUsize; |
76 | use core::ptr::{self, NonNull}; |
77 | use core::usize; |
78 | |
79 | #[cfg (any(test, not(feature = "hashbrown" )))] |
80 | extern crate std; |
81 | |
82 | #[cfg (feature = "hashbrown" )] |
83 | use hashbrown::HashMap; |
84 | #[cfg (not(feature = "hashbrown" ))] |
85 | use std::collections::HashMap; |
86 | |
87 | extern crate alloc; |
88 | |
89 | // Struct used to hold a reference to a key |
90 | struct KeyRef<K> { |
91 | k: *const K, |
92 | } |
93 | |
94 | impl<K: Hash> Hash for KeyRef<K> { |
95 | fn hash<H: Hasher>(&self, state: &mut H) { |
96 | unsafe { (*self.k).hash(state) } |
97 | } |
98 | } |
99 | |
100 | impl<K: PartialEq> PartialEq for KeyRef<K> { |
101 | // NB: The unconditional_recursion lint was added in 1.76.0 and can be removed |
102 | // once the current stable version of Rust is 1.76.0 or higher. |
103 | #![allow (unknown_lints)] |
104 | #[allow (clippy::unconditional_recursion)] |
105 | fn eq(&self, other: &KeyRef<K>) -> bool { |
106 | unsafe { (*self.k).eq(&*other.k) } |
107 | } |
108 | } |
109 | |
110 | impl<K: Eq> Eq for KeyRef<K> {} |
111 | |
112 | // This type exists to allow a "blanket" Borrow impl for KeyRef without conflicting with the |
113 | // stdlib blanket impl |
114 | #[repr (transparent)] |
115 | struct KeyWrapper<K: ?Sized>(K); |
116 | |
117 | impl<K: ?Sized> KeyWrapper<K> { |
118 | fn from_ref(key: &K) -> &Self { |
119 | // safety: KeyWrapper is transparent, so casting the ref like this is allowable |
120 | unsafe { &*(key as *const K as *const KeyWrapper<K>) } |
121 | } |
122 | } |
123 | |
124 | impl<K: ?Sized + Hash> Hash for KeyWrapper<K> { |
125 | fn hash<H: Hasher>(&self, state: &mut H) { |
126 | self.0.hash(state) |
127 | } |
128 | } |
129 | |
130 | impl<K: ?Sized + PartialEq> PartialEq for KeyWrapper<K> { |
131 | // NB: The unconditional_recursion lint was added in 1.76.0 and can be removed |
132 | // once the current stable version of Rust is 1.76.0 or higher. |
133 | #![allow (unknown_lints)] |
134 | #[allow (clippy::unconditional_recursion)] |
135 | fn eq(&self, other: &Self) -> bool { |
136 | self.0.eq(&other.0) |
137 | } |
138 | } |
139 | |
140 | impl<K: ?Sized + Eq> Eq for KeyWrapper<K> {} |
141 | |
142 | impl<K, Q> Borrow<KeyWrapper<Q>> for KeyRef<K> |
143 | where |
144 | K: Borrow<Q>, |
145 | Q: ?Sized, |
146 | { |
147 | fn borrow(&self) -> &KeyWrapper<Q> { |
148 | let key: &Q = unsafe { &*self.k }.borrow(); |
149 | KeyWrapper::from_ref(key) |
150 | } |
151 | } |
152 | |
153 | // Struct used to hold a key value pair. Also contains references to previous and next entries |
154 | // so we can maintain the entries in a linked list ordered by their use. |
155 | struct LruEntry<K, V> { |
156 | key: mem::MaybeUninit<K>, |
157 | val: mem::MaybeUninit<V>, |
158 | prev: *mut LruEntry<K, V>, |
159 | next: *mut LruEntry<K, V>, |
160 | } |
161 | |
162 | impl<K, V> LruEntry<K, V> { |
163 | fn new(key: K, val: V) -> Self { |
164 | LruEntry { |
165 | key: mem::MaybeUninit::new(val:key), |
166 | val: mem::MaybeUninit::new(val), |
167 | prev: ptr::null_mut(), |
168 | next: ptr::null_mut(), |
169 | } |
170 | } |
171 | |
172 | fn new_sigil() -> Self { |
173 | LruEntry { |
174 | key: mem::MaybeUninit::uninit(), |
175 | val: mem::MaybeUninit::uninit(), |
176 | prev: ptr::null_mut(), |
177 | next: ptr::null_mut(), |
178 | } |
179 | } |
180 | } |
181 | |
182 | #[cfg (feature = "hashbrown" )] |
183 | pub type DefaultHasher = hashbrown::hash_map::DefaultHashBuilder; |
184 | #[cfg (not(feature = "hashbrown" ))] |
185 | pub type DefaultHasher = std::collections::hash_map::RandomState; |
186 | |
187 | /// An LRU Cache |
188 | pub struct LruCache<K, V, S = DefaultHasher> { |
189 | map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>, |
190 | cap: NonZeroUsize, |
191 | |
192 | // head and tail are sigil nodes to facilitate inserting entries |
193 | head: *mut LruEntry<K, V>, |
194 | tail: *mut LruEntry<K, V>, |
195 | } |
196 | |
197 | impl<K, V> Clone for LruCache<K, V> |
198 | where |
199 | K: Hash + PartialEq + Eq + Clone, |
200 | V: Clone, |
201 | { |
202 | fn clone(&self) -> Self { |
203 | let mut new_lru: LruCache = LruCache::new(self.cap()); |
204 | |
205 | for (key: &K, value: &V) in self.iter().rev() { |
206 | new_lru.push(k:key.clone(), v:value.clone()); |
207 | } |
208 | |
209 | new_lru |
210 | } |
211 | } |
212 | |
213 | impl<K: Hash + Eq, V> LruCache<K, V> { |
214 | /// Creates a new LRU Cache that holds at most `cap` items. |
215 | /// |
216 | /// # Example |
217 | /// |
218 | /// ``` |
219 | /// use lru::LruCache; |
220 | /// use std::num::NonZeroUsize; |
221 | /// let mut cache: LruCache<isize, &str> = LruCache::new(NonZeroUsize::new(10).unwrap()); |
222 | /// ``` |
223 | pub fn new(cap: NonZeroUsize) -> LruCache<K, V> { |
224 | LruCache::construct(cap, HashMap::with_capacity(cap.get())) |
225 | } |
226 | |
227 | /// Creates a new LRU Cache that never automatically evicts items. |
228 | /// |
229 | /// # Example |
230 | /// |
231 | /// ``` |
232 | /// use lru::LruCache; |
233 | /// use std::num::NonZeroUsize; |
234 | /// let mut cache: LruCache<isize, &str> = LruCache::unbounded(); |
235 | /// ``` |
236 | pub fn unbounded() -> LruCache<K, V> { |
237 | LruCache::construct(NonZeroUsize::new(usize::MAX).unwrap(), HashMap::default()) |
238 | } |
239 | } |
240 | |
241 | impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> { |
242 | /// Creates a new LRU Cache that holds at most `cap` items and |
243 | /// uses the provided hash builder to hash keys. |
244 | /// |
245 | /// # Example |
246 | /// |
247 | /// ``` |
248 | /// use lru::{LruCache, DefaultHasher}; |
249 | /// use std::num::NonZeroUsize; |
250 | /// |
251 | /// let s = DefaultHasher::default(); |
252 | /// let mut cache: LruCache<isize, &str> = LruCache::with_hasher(NonZeroUsize::new(10).unwrap(), s); |
253 | /// ``` |
254 | pub fn with_hasher(cap: NonZeroUsize, hash_builder: S) -> LruCache<K, V, S> { |
255 | LruCache::construct( |
256 | cap, |
257 | HashMap::with_capacity_and_hasher(cap.into(), hash_builder), |
258 | ) |
259 | } |
260 | |
261 | /// Creates a new LRU Cache that never automatically evicts items and |
262 | /// uses the provided hash builder to hash keys. |
263 | /// |
264 | /// # Example |
265 | /// |
266 | /// ``` |
267 | /// use lru::{LruCache, DefaultHasher}; |
268 | /// |
269 | /// let s = DefaultHasher::default(); |
270 | /// let mut cache: LruCache<isize, &str> = LruCache::unbounded_with_hasher(s); |
271 | /// ``` |
272 | pub fn unbounded_with_hasher(hash_builder: S) -> LruCache<K, V, S> { |
273 | LruCache::construct( |
274 | NonZeroUsize::new(usize::MAX).unwrap(), |
275 | HashMap::with_hasher(hash_builder), |
276 | ) |
277 | } |
278 | |
279 | /// Creates a new LRU Cache with the given capacity. |
280 | fn construct( |
281 | cap: NonZeroUsize, |
282 | map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>, |
283 | ) -> LruCache<K, V, S> { |
284 | // NB: The compiler warns that cache does not need to be marked as mutable if we |
285 | // declare it as such since we only mutate it inside the unsafe block. |
286 | let cache = LruCache { |
287 | map, |
288 | cap, |
289 | head: Box::into_raw(Box::new(LruEntry::new_sigil())), |
290 | tail: Box::into_raw(Box::new(LruEntry::new_sigil())), |
291 | }; |
292 | |
293 | unsafe { |
294 | (*cache.head).next = cache.tail; |
295 | (*cache.tail).prev = cache.head; |
296 | } |
297 | |
298 | cache |
299 | } |
300 | |
301 | /// Puts a key-value pair into cache. If the key already exists in the cache, then it updates |
302 | /// the key's value and returns the old value. Otherwise, `None` is returned. |
303 | /// |
304 | /// # Example |
305 | /// |
306 | /// ``` |
307 | /// use lru::LruCache; |
308 | /// use std::num::NonZeroUsize; |
309 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
310 | /// |
311 | /// assert_eq!(None, cache.put(1, "a" )); |
312 | /// assert_eq!(None, cache.put(2, "b" )); |
313 | /// assert_eq!(Some("b" ), cache.put(2, "beta" )); |
314 | /// |
315 | /// assert_eq!(cache.get(&1), Some(&"a" )); |
316 | /// assert_eq!(cache.get(&2), Some(&"beta" )); |
317 | /// ``` |
318 | pub fn put(&mut self, k: K, v: V) -> Option<V> { |
319 | self.capturing_put(k, v, false).map(|(_, v)| v) |
320 | } |
321 | |
322 | /// Pushes a key-value pair into the cache. If an entry with key `k` already exists in |
323 | /// the cache or another cache entry is removed (due to the lru's capacity), |
324 | /// then it returns the old entry's key-value pair. Otherwise, returns `None`. |
325 | /// |
326 | /// # Example |
327 | /// |
328 | /// ``` |
329 | /// use lru::LruCache; |
330 | /// use std::num::NonZeroUsize; |
331 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
332 | /// |
333 | /// assert_eq!(None, cache.push(1, "a" )); |
334 | /// assert_eq!(None, cache.push(2, "b" )); |
335 | /// |
336 | /// // This push call returns (2, "b") because that was previously 2's entry in the cache. |
337 | /// assert_eq!(Some((2, "b" )), cache.push(2, "beta" )); |
338 | /// |
339 | /// // This push call returns (1, "a") because the cache is at capacity and 1's entry was the lru entry. |
340 | /// assert_eq!(Some((1, "a" )), cache.push(3, "alpha" )); |
341 | /// |
342 | /// assert_eq!(cache.get(&1), None); |
343 | /// assert_eq!(cache.get(&2), Some(&"beta" )); |
344 | /// assert_eq!(cache.get(&3), Some(&"alpha" )); |
345 | /// ``` |
346 | pub fn push(&mut self, k: K, v: V) -> Option<(K, V)> { |
347 | self.capturing_put(k, v, true) |
348 | } |
349 | |
350 | // Used internally by `put` and `push` to add a new entry to the lru. |
351 | // Takes ownership of and returns entries replaced due to the cache's capacity |
352 | // when `capture` is true. |
353 | fn capturing_put(&mut self, k: K, mut v: V, capture: bool) -> Option<(K, V)> { |
354 | let node_ref = self.map.get_mut(&KeyRef { k: &k }); |
355 | |
356 | match node_ref { |
357 | Some(node_ref) => { |
358 | // if the key is already in the cache just update its value and move it to the |
359 | // front of the list |
360 | let node_ptr: *mut LruEntry<K, V> = node_ref.as_ptr(); |
361 | |
362 | // gets a reference to the node to perform a swap and drops it right after |
363 | let node_ref = unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) }; |
364 | mem::swap(&mut v, node_ref); |
365 | let _ = node_ref; |
366 | |
367 | self.detach(node_ptr); |
368 | self.attach(node_ptr); |
369 | Some((k, v)) |
370 | } |
371 | None => { |
372 | let (replaced, node) = self.replace_or_create_node(k, v); |
373 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
374 | |
375 | self.attach(node_ptr); |
376 | |
377 | let keyref = unsafe { (*node_ptr).key.as_ptr() }; |
378 | self.map.insert(KeyRef { k: keyref }, node); |
379 | |
380 | replaced.filter(|_| capture) |
381 | } |
382 | } |
383 | } |
384 | |
385 | // Used internally to swap out a node if the cache is full or to create a new node if space |
386 | // is available. Shared between `put`, `push`, `get_or_insert`, and `get_or_insert_mut`. |
387 | #[allow (clippy::type_complexity)] |
388 | fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, NonNull<LruEntry<K, V>>) { |
389 | if self.len() == self.cap().get() { |
390 | // if the cache is full, remove the last entry so we can use it for the new key |
391 | let old_key = KeyRef { |
392 | k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) }, |
393 | }; |
394 | let old_node = self.map.remove(&old_key).unwrap(); |
395 | let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr(); |
396 | |
397 | // read out the node's old key and value and then replace it |
398 | let replaced = unsafe { |
399 | ( |
400 | mem::replace(&mut (*node_ptr).key, mem::MaybeUninit::new(k)).assume_init(), |
401 | mem::replace(&mut (*node_ptr).val, mem::MaybeUninit::new(v)).assume_init(), |
402 | ) |
403 | }; |
404 | |
405 | self.detach(node_ptr); |
406 | |
407 | (Some(replaced), old_node) |
408 | } else { |
409 | // if the cache is not full allocate a new LruEntry |
410 | // Safety: We allocate, turn into raw, and get NonNull all in one step. |
411 | (None, unsafe { |
412 | NonNull::new_unchecked(Box::into_raw(Box::new(LruEntry::new(k, v)))) |
413 | }) |
414 | } |
415 | } |
416 | |
417 | /// Returns a reference to the value of the key in the cache or `None` if it is not |
418 | /// present in the cache. Moves the key to the head of the LRU list if it exists. |
419 | /// |
420 | /// # Example |
421 | /// |
422 | /// ``` |
423 | /// use lru::LruCache; |
424 | /// use std::num::NonZeroUsize; |
425 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
426 | /// |
427 | /// cache.put(1, "a" ); |
428 | /// cache.put(2, "b" ); |
429 | /// cache.put(2, "c" ); |
430 | /// cache.put(3, "d" ); |
431 | /// |
432 | /// assert_eq!(cache.get(&1), None); |
433 | /// assert_eq!(cache.get(&2), Some(&"c" )); |
434 | /// assert_eq!(cache.get(&3), Some(&"d" )); |
435 | /// ``` |
436 | pub fn get<'a, Q>(&'a mut self, k: &Q) -> Option<&'a V> |
437 | where |
438 | K: Borrow<Q>, |
439 | Q: Hash + Eq + ?Sized, |
440 | { |
441 | if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) { |
442 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
443 | |
444 | self.detach(node_ptr); |
445 | self.attach(node_ptr); |
446 | |
447 | Some(unsafe { &*(*node_ptr).val.as_ptr() }) |
448 | } else { |
449 | None |
450 | } |
451 | } |
452 | |
453 | /// Returns a mutable reference to the value of the key in the cache or `None` if it |
454 | /// is not present in the cache. Moves the key to the head of the LRU list if it exists. |
455 | /// |
456 | /// # Example |
457 | /// |
458 | /// ``` |
459 | /// use lru::LruCache; |
460 | /// use std::num::NonZeroUsize; |
461 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
462 | /// |
463 | /// cache.put("apple" , 8); |
464 | /// cache.put("banana" , 4); |
465 | /// cache.put("banana" , 6); |
466 | /// cache.put("pear" , 2); |
467 | /// |
468 | /// assert_eq!(cache.get_mut(&"apple" ), None); |
469 | /// assert_eq!(cache.get_mut(&"banana" ), Some(&mut 6)); |
470 | /// assert_eq!(cache.get_mut(&"pear" ), Some(&mut 2)); |
471 | /// ``` |
472 | pub fn get_mut<'a, Q>(&'a mut self, k: &Q) -> Option<&'a mut V> |
473 | where |
474 | K: Borrow<Q>, |
475 | Q: Hash + Eq + ?Sized, |
476 | { |
477 | if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) { |
478 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
479 | |
480 | self.detach(node_ptr); |
481 | self.attach(node_ptr); |
482 | |
483 | Some(unsafe { &mut *(*node_ptr).val.as_mut_ptr() }) |
484 | } else { |
485 | None |
486 | } |
487 | } |
488 | |
489 | /// Returns a key-value references pair of the key in the cache or `None` if it is not |
490 | /// present in the cache. Moves the key to the head of the LRU list if it exists. |
491 | /// |
492 | /// # Example |
493 | /// |
494 | /// ``` |
495 | /// use lru::LruCache; |
496 | /// use std::num::NonZeroUsize; |
497 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
498 | /// |
499 | /// cache.put(String::from("1" ), "a" ); |
500 | /// cache.put(String::from("2" ), "b" ); |
501 | /// cache.put(String::from("2" ), "c" ); |
502 | /// cache.put(String::from("3" ), "d" ); |
503 | /// |
504 | /// assert_eq!(cache.get_key_value("1" ), None); |
505 | /// assert_eq!(cache.get_key_value("2" ), Some((&String::from("2" ), &"c" ))); |
506 | /// assert_eq!(cache.get_key_value("3" ), Some((&String::from("3" ), &"d" ))); |
507 | /// ``` |
508 | pub fn get_key_value<'a, Q>(&'a mut self, k: &Q) -> Option<(&'a K, &'a V)> |
509 | where |
510 | K: Borrow<Q>, |
511 | Q: Hash + Eq + ?Sized, |
512 | { |
513 | if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) { |
514 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
515 | |
516 | self.detach(node_ptr); |
517 | self.attach(node_ptr); |
518 | |
519 | Some(unsafe { (&*(*node_ptr).key.as_ptr(), &*(*node_ptr).val.as_ptr()) }) |
520 | } else { |
521 | None |
522 | } |
523 | } |
524 | |
525 | /// Returns a key-value references pair of the key in the cache or `None` if it is not |
526 | /// present in the cache. The reference to the value of the key is mutable. Moves the key to |
527 | /// the head of the LRU list if it exists. |
528 | /// |
529 | /// # Example |
530 | /// |
531 | /// ``` |
532 | /// use lru::LruCache; |
533 | /// use std::num::NonZeroUsize; |
534 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
535 | /// |
536 | /// cache.put(1, "a" ); |
537 | /// cache.put(2, "b" ); |
538 | /// let (k, v) = cache.get_key_value_mut(&1).unwrap(); |
539 | /// assert_eq!(k, &1); |
540 | /// assert_eq!(v, &mut "a" ); |
541 | /// *v = "aa" ; |
542 | /// cache.put(3, "c" ); |
543 | /// assert_eq!(cache.get_key_value(&2), None); |
544 | /// assert_eq!(cache.get_key_value(&1), Some((&1, &"aa" ))); |
545 | /// assert_eq!(cache.get_key_value(&3), Some((&3, &"c" ))); |
546 | /// ``` |
547 | pub fn get_key_value_mut<'a, Q>(&'a mut self, k: &Q) -> Option<(&'a K, &'a mut V)> |
548 | where |
549 | K: Borrow<Q>, |
550 | Q: Hash + Eq + ?Sized, |
551 | { |
552 | if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) { |
553 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
554 | |
555 | self.detach(node_ptr); |
556 | self.attach(node_ptr); |
557 | |
558 | Some(unsafe { |
559 | ( |
560 | &*(*node_ptr).key.as_ptr(), |
561 | &mut *(*node_ptr).val.as_mut_ptr(), |
562 | ) |
563 | }) |
564 | } else { |
565 | None |
566 | } |
567 | } |
568 | |
569 | /// Returns a reference to the value of the key in the cache if it is |
570 | /// present in the cache and moves the key to the head of the LRU list. |
571 | /// If the key does not exist the provided `FnOnce` is used to populate |
572 | /// the list and a reference is returned. |
573 | /// |
574 | /// # Example |
575 | /// |
576 | /// ``` |
577 | /// use lru::LruCache; |
578 | /// use std::num::NonZeroUsize; |
579 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
580 | /// |
581 | /// cache.put(1, "a" ); |
582 | /// cache.put(2, "b" ); |
583 | /// cache.put(2, "c" ); |
584 | /// cache.put(3, "d" ); |
585 | /// |
586 | /// assert_eq!(cache.get_or_insert(2, ||"a" ), &"c" ); |
587 | /// assert_eq!(cache.get_or_insert(3, ||"a" ), &"d" ); |
588 | /// assert_eq!(cache.get_or_insert(1, ||"a" ), &"a" ); |
589 | /// assert_eq!(cache.get_or_insert(1, ||"b" ), &"a" ); |
590 | /// ``` |
591 | pub fn get_or_insert<F>(&mut self, k: K, f: F) -> &V |
592 | where |
593 | F: FnOnce() -> V, |
594 | { |
595 | if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { |
596 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
597 | |
598 | self.detach(node_ptr); |
599 | self.attach(node_ptr); |
600 | |
601 | unsafe { &*(*node_ptr).val.as_ptr() } |
602 | } else { |
603 | let v = f(); |
604 | let (_, node) = self.replace_or_create_node(k, v); |
605 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
606 | |
607 | self.attach(node_ptr); |
608 | |
609 | let keyref = unsafe { (*node_ptr).key.as_ptr() }; |
610 | self.map.insert(KeyRef { k: keyref }, node); |
611 | unsafe { &*(*node_ptr).val.as_ptr() } |
612 | } |
613 | } |
614 | |
615 | /// Returns a reference to the value of the key in the cache if it is |
616 | /// present in the cache and moves the key to the head of the LRU list. |
617 | /// If the key does not exist the provided `FnOnce` is used to populate |
618 | /// the list and a reference is returned. If `FnOnce` returns `Err`, |
619 | /// returns the `Err`. |
620 | /// |
621 | /// # Example |
622 | /// |
623 | /// ``` |
624 | /// use lru::LruCache; |
625 | /// use std::num::NonZeroUsize; |
626 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
627 | /// |
628 | /// cache.put(1, "a" ); |
629 | /// cache.put(2, "b" ); |
630 | /// cache.put(2, "c" ); |
631 | /// cache.put(3, "d" ); |
632 | /// |
633 | /// let f = ||->Result<&str, String> {Err("failed" .to_owned())}; |
634 | /// let a = ||->Result<&str, String> {Ok("a" )}; |
635 | /// let b = ||->Result<&str, String> {Ok("b" )}; |
636 | /// assert_eq!(cache.try_get_or_insert(2, a), Ok(&"c" )); |
637 | /// assert_eq!(cache.try_get_or_insert(3, a), Ok(&"d" )); |
638 | /// assert_eq!(cache.try_get_or_insert(4, f), Err("failed" .to_owned())); |
639 | /// assert_eq!(cache.try_get_or_insert(5, b), Ok(&"b" )); |
640 | /// assert_eq!(cache.try_get_or_insert(5, a), Ok(&"b" )); |
641 | /// ``` |
642 | pub fn try_get_or_insert<F, E>(&mut self, k: K, f: F) -> Result<&V, E> |
643 | where |
644 | F: FnOnce() -> Result<V, E>, |
645 | { |
646 | if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { |
647 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
648 | |
649 | self.detach(node_ptr); |
650 | self.attach(node_ptr); |
651 | |
652 | unsafe { Ok(&*(*node_ptr).val.as_ptr()) } |
653 | } else { |
654 | let v = f()?; |
655 | let (_, node) = self.replace_or_create_node(k, v); |
656 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
657 | |
658 | self.attach(node_ptr); |
659 | |
660 | let keyref = unsafe { (*node_ptr).key.as_ptr() }; |
661 | self.map.insert(KeyRef { k: keyref }, node); |
662 | Ok(unsafe { &*(*node_ptr).val.as_ptr() }) |
663 | } |
664 | } |
665 | |
666 | /// Returns a mutable reference to the value of the key in the cache if it is |
667 | /// present in the cache and moves the key to the head of the LRU list. |
668 | /// If the key does not exist the provided `FnOnce` is used to populate |
669 | /// the list and a mutable reference is returned. |
670 | /// |
671 | /// # Example |
672 | /// |
673 | /// ``` |
674 | /// use lru::LruCache; |
675 | /// use std::num::NonZeroUsize; |
676 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
677 | /// |
678 | /// cache.put(1, "a" ); |
679 | /// cache.put(2, "b" ); |
680 | /// |
681 | /// let v = cache.get_or_insert_mut(2, ||"c" ); |
682 | /// assert_eq!(v, &"b" ); |
683 | /// *v = "d" ; |
684 | /// assert_eq!(cache.get_or_insert_mut(2, ||"e" ), &mut "d" ); |
685 | /// assert_eq!(cache.get_or_insert_mut(3, ||"f" ), &mut "f" ); |
686 | /// assert_eq!(cache.get_or_insert_mut(3, ||"e" ), &mut "f" ); |
687 | /// ``` |
688 | pub fn get_or_insert_mut<F>(&mut self, k: K, f: F) -> &mut V |
689 | where |
690 | F: FnOnce() -> V, |
691 | { |
692 | if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { |
693 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
694 | |
695 | self.detach(node_ptr); |
696 | self.attach(node_ptr); |
697 | |
698 | unsafe { &mut *(*node_ptr).val.as_mut_ptr() } |
699 | } else { |
700 | let v = f(); |
701 | let (_, node) = self.replace_or_create_node(k, v); |
702 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
703 | |
704 | self.attach(node_ptr); |
705 | |
706 | let keyref = unsafe { (*node_ptr).key.as_ptr() }; |
707 | self.map.insert(KeyRef { k: keyref }, node); |
708 | unsafe { &mut *(*node_ptr).val.as_mut_ptr() } |
709 | } |
710 | } |
711 | |
712 | /// Returns a mutable reference to the value of the key in the cache if it is |
713 | /// present in the cache and moves the key to the head of the LRU list. |
714 | /// If the key does not exist the provided `FnOnce` is used to populate |
715 | /// the list and a mutable reference is returned. If `FnOnce` returns `Err`, |
716 | /// returns the `Err`. |
717 | /// |
718 | /// # Example |
719 | /// |
720 | /// ``` |
721 | /// use lru::LruCache; |
722 | /// use std::num::NonZeroUsize; |
723 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
724 | /// |
725 | /// cache.put(1, "a" ); |
726 | /// cache.put(2, "b" ); |
727 | /// cache.put(2, "c" ); |
728 | /// |
729 | /// let f = ||->Result<&str, String> {Err("failed" .to_owned())}; |
730 | /// let a = ||->Result<&str, String> {Ok("a" )}; |
731 | /// let b = ||->Result<&str, String> {Ok("b" )}; |
732 | /// if let Ok(v) = cache.try_get_or_insert_mut(2, a) { |
733 | /// *v = "d" ; |
734 | /// } |
735 | /// assert_eq!(cache.try_get_or_insert_mut(2, a), Ok(&mut "d" )); |
736 | /// assert_eq!(cache.try_get_or_insert_mut(3, f), Err("failed" .to_owned())); |
737 | /// assert_eq!(cache.try_get_or_insert_mut(4, b), Ok(&mut "b" )); |
738 | /// assert_eq!(cache.try_get_or_insert_mut(4, a), Ok(&mut "b" )); |
739 | /// ``` |
740 | pub fn try_get_or_insert_mut<F, E>(&mut self, k: K, f: F) -> Result<&mut V, E> |
741 | where |
742 | F: FnOnce() -> Result<V, E>, |
743 | { |
744 | if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { |
745 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
746 | |
747 | self.detach(node_ptr); |
748 | self.attach(node_ptr); |
749 | |
750 | unsafe { Ok(&mut *(*node_ptr).val.as_mut_ptr()) } |
751 | } else { |
752 | let v = f()?; |
753 | let (_, node) = self.replace_or_create_node(k, v); |
754 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
755 | |
756 | self.attach(node_ptr); |
757 | |
758 | let keyref = unsafe { (*node_ptr).key.as_ptr() }; |
759 | self.map.insert(KeyRef { k: keyref }, node); |
760 | unsafe { Ok(&mut *(*node_ptr).val.as_mut_ptr()) } |
761 | } |
762 | } |
763 | |
764 | /// Returns a reference to the value corresponding to the key in the cache or `None` if it is |
765 | /// not present in the cache. Unlike `get`, `peek` does not update the LRU list so the key's |
766 | /// position will be unchanged. |
767 | /// |
768 | /// # Example |
769 | /// |
770 | /// ``` |
771 | /// use lru::LruCache; |
772 | /// use std::num::NonZeroUsize; |
773 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
774 | /// |
775 | /// cache.put(1, "a" ); |
776 | /// cache.put(2, "b" ); |
777 | /// |
778 | /// assert_eq!(cache.peek(&1), Some(&"a" )); |
779 | /// assert_eq!(cache.peek(&2), Some(&"b" )); |
780 | /// ``` |
781 | pub fn peek<'a, Q>(&'a self, k: &Q) -> Option<&'a V> |
782 | where |
783 | K: Borrow<Q>, |
784 | Q: Hash + Eq + ?Sized, |
785 | { |
786 | self.map |
787 | .get(KeyWrapper::from_ref(k)) |
788 | .map(|node| unsafe { &*node.as_ref().val.as_ptr() }) |
789 | } |
790 | |
791 | /// Returns a mutable reference to the value corresponding to the key in the cache or `None` |
792 | /// if it is not present in the cache. Unlike `get_mut`, `peek_mut` does not update the LRU |
793 | /// list so the key's position will be unchanged. |
794 | /// |
795 | /// # Example |
796 | /// |
797 | /// ``` |
798 | /// use lru::LruCache; |
799 | /// use std::num::NonZeroUsize; |
800 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
801 | /// |
802 | /// cache.put(1, "a" ); |
803 | /// cache.put(2, "b" ); |
804 | /// |
805 | /// assert_eq!(cache.peek_mut(&1), Some(&mut "a" )); |
806 | /// assert_eq!(cache.peek_mut(&2), Some(&mut "b" )); |
807 | /// ``` |
808 | pub fn peek_mut<'a, Q>(&'a mut self, k: &Q) -> Option<&'a mut V> |
809 | where |
810 | K: Borrow<Q>, |
811 | Q: Hash + Eq + ?Sized, |
812 | { |
813 | match self.map.get_mut(KeyWrapper::from_ref(k)) { |
814 | None => None, |
815 | Some(node) => Some(unsafe { &mut *(*node.as_ptr()).val.as_mut_ptr() }), |
816 | } |
817 | } |
818 | |
819 | /// Returns the value corresponding to the least recently used item or `None` if the |
820 | /// cache is empty. Like `peek`, `peek_lru` does not update the LRU list so the item's |
821 | /// position will be unchanged. |
822 | /// |
823 | /// # Example |
824 | /// |
825 | /// ``` |
826 | /// use lru::LruCache; |
827 | /// use std::num::NonZeroUsize; |
828 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
829 | /// |
830 | /// cache.put(1, "a" ); |
831 | /// cache.put(2, "b" ); |
832 | /// |
833 | /// assert_eq!(cache.peek_lru(), Some((&1, &"a" ))); |
834 | /// ``` |
835 | pub fn peek_lru(&self) -> Option<(&K, &V)> { |
836 | if self.is_empty() { |
837 | return None; |
838 | } |
839 | |
840 | let (key, val); |
841 | unsafe { |
842 | let node = (*self.tail).prev; |
843 | key = &(*(*node).key.as_ptr()) as &K; |
844 | val = &(*(*node).val.as_ptr()) as &V; |
845 | } |
846 | |
847 | Some((key, val)) |
848 | } |
849 | |
850 | /// Returns a bool indicating whether the given key is in the cache. Does not update the |
851 | /// LRU list. |
852 | /// |
853 | /// # Example |
854 | /// |
855 | /// ``` |
856 | /// use lru::LruCache; |
857 | /// use std::num::NonZeroUsize; |
858 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
859 | /// |
860 | /// cache.put(1, "a" ); |
861 | /// cache.put(2, "b" ); |
862 | /// cache.put(3, "c" ); |
863 | /// |
864 | /// assert!(!cache.contains(&1)); |
865 | /// assert!(cache.contains(&2)); |
866 | /// assert!(cache.contains(&3)); |
867 | /// ``` |
868 | pub fn contains<Q>(&self, k: &Q) -> bool |
869 | where |
870 | K: Borrow<Q>, |
871 | Q: Hash + Eq + ?Sized, |
872 | { |
873 | self.map.contains_key(KeyWrapper::from_ref(k)) |
874 | } |
875 | |
876 | /// Removes and returns the value corresponding to the key from the cache or |
877 | /// `None` if it does not exist. |
878 | /// |
879 | /// # Example |
880 | /// |
881 | /// ``` |
882 | /// use lru::LruCache; |
883 | /// use std::num::NonZeroUsize; |
884 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
885 | /// |
886 | /// cache.put(2, "a" ); |
887 | /// |
888 | /// assert_eq!(cache.pop(&1), None); |
889 | /// assert_eq!(cache.pop(&2), Some("a" )); |
890 | /// assert_eq!(cache.pop(&2), None); |
891 | /// assert_eq!(cache.len(), 0); |
892 | /// ``` |
893 | pub fn pop<Q>(&mut self, k: &Q) -> Option<V> |
894 | where |
895 | K: Borrow<Q>, |
896 | Q: Hash + Eq + ?Sized, |
897 | { |
898 | match self.map.remove(KeyWrapper::from_ref(k)) { |
899 | None => None, |
900 | Some(old_node) => { |
901 | let mut old_node = unsafe { |
902 | let mut old_node = *Box::from_raw(old_node.as_ptr()); |
903 | ptr::drop_in_place(old_node.key.as_mut_ptr()); |
904 | |
905 | old_node |
906 | }; |
907 | |
908 | self.detach(&mut old_node); |
909 | |
910 | let LruEntry { key: _, val, .. } = old_node; |
911 | unsafe { Some(val.assume_init()) } |
912 | } |
913 | } |
914 | } |
915 | |
916 | /// Removes and returns the key and the value corresponding to the key from the cache or |
917 | /// `None` if it does not exist. |
918 | /// |
919 | /// # Example |
920 | /// |
921 | /// ``` |
922 | /// use lru::LruCache; |
923 | /// use std::num::NonZeroUsize; |
924 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
925 | /// |
926 | /// cache.put(1, "a" ); |
927 | /// cache.put(2, "a" ); |
928 | /// |
929 | /// assert_eq!(cache.pop(&1), Some("a" )); |
930 | /// assert_eq!(cache.pop_entry(&2), Some((2, "a" ))); |
931 | /// assert_eq!(cache.pop(&1), None); |
932 | /// assert_eq!(cache.pop_entry(&2), None); |
933 | /// assert_eq!(cache.len(), 0); |
934 | /// ``` |
935 | pub fn pop_entry<Q>(&mut self, k: &Q) -> Option<(K, V)> |
936 | where |
937 | K: Borrow<Q>, |
938 | Q: Hash + Eq + ?Sized, |
939 | { |
940 | match self.map.remove(KeyWrapper::from_ref(k)) { |
941 | None => None, |
942 | Some(old_node) => { |
943 | let mut old_node = unsafe { *Box::from_raw(old_node.as_ptr()) }; |
944 | |
945 | self.detach(&mut old_node); |
946 | |
947 | let LruEntry { key, val, .. } = old_node; |
948 | unsafe { Some((key.assume_init(), val.assume_init())) } |
949 | } |
950 | } |
951 | } |
952 | |
953 | /// Removes and returns the key and value corresponding to the least recently |
954 | /// used item or `None` if the cache is empty. |
955 | /// |
956 | /// # Example |
957 | /// |
958 | /// ``` |
959 | /// use lru::LruCache; |
960 | /// use std::num::NonZeroUsize; |
961 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
962 | /// |
963 | /// cache.put(2, "a" ); |
964 | /// cache.put(3, "b" ); |
965 | /// cache.put(4, "c" ); |
966 | /// cache.get(&3); |
967 | /// |
968 | /// assert_eq!(cache.pop_lru(), Some((4, "c" ))); |
969 | /// assert_eq!(cache.pop_lru(), Some((3, "b" ))); |
970 | /// assert_eq!(cache.pop_lru(), None); |
971 | /// assert_eq!(cache.len(), 0); |
972 | /// ``` |
973 | pub fn pop_lru(&mut self) -> Option<(K, V)> { |
974 | let node = self.remove_last()?; |
975 | // N.B.: Can't destructure directly because of https://github.com/rust-lang/rust/issues/28536 |
976 | let node = *node; |
977 | let LruEntry { key, val, .. } = node; |
978 | unsafe { Some((key.assume_init(), val.assume_init())) } |
979 | } |
980 | |
981 | /// Marks the key as the most recently used one. |
982 | /// |
983 | /// # Example |
984 | /// |
985 | /// ``` |
986 | /// use lru::LruCache; |
987 | /// use std::num::NonZeroUsize; |
988 | /// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
989 | /// |
990 | /// cache.put(1, "a" ); |
991 | /// cache.put(2, "b" ); |
992 | /// cache.put(3, "c" ); |
993 | /// cache.get(&1); |
994 | /// cache.get(&2); |
995 | /// |
996 | /// // If we do `pop_lru` now, we would pop 3. |
997 | /// // assert_eq!(cache.pop_lru(), Some((3, "c"))); |
998 | /// |
999 | /// // By promoting 3, we make sure it isn't popped. |
1000 | /// cache.promote(&3); |
1001 | /// assert_eq!(cache.pop_lru(), Some((1, "a" ))); |
1002 | /// ``` |
1003 | pub fn promote<Q>(&mut self, k: &Q) |
1004 | where |
1005 | K: Borrow<Q>, |
1006 | Q: Hash + Eq + ?Sized, |
1007 | { |
1008 | if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) { |
1009 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
1010 | self.detach(node_ptr); |
1011 | self.attach(node_ptr); |
1012 | } |
1013 | } |
1014 | |
1015 | /// Marks the key as the least recently used one. |
1016 | /// |
1017 | /// # Example |
1018 | /// |
1019 | /// ``` |
1020 | /// use lru::LruCache; |
1021 | /// use std::num::NonZeroUsize; |
1022 | /// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
1023 | /// |
1024 | /// cache.put(1, "a" ); |
1025 | /// cache.put(2, "b" ); |
1026 | /// cache.put(3, "c" ); |
1027 | /// cache.get(&1); |
1028 | /// cache.get(&2); |
1029 | /// |
1030 | /// // If we do `pop_lru` now, we would pop 3. |
1031 | /// // assert_eq!(cache.pop_lru(), Some((3, "c"))); |
1032 | /// |
1033 | /// // By demoting 1 and 2, we make sure those are popped first. |
1034 | /// cache.demote(&2); |
1035 | /// cache.demote(&1); |
1036 | /// assert_eq!(cache.pop_lru(), Some((1, "a" ))); |
1037 | /// assert_eq!(cache.pop_lru(), Some((2, "b" ))); |
1038 | /// ``` |
1039 | pub fn demote<Q>(&mut self, k: &Q) |
1040 | where |
1041 | K: Borrow<Q>, |
1042 | Q: Hash + Eq + ?Sized, |
1043 | { |
1044 | if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) { |
1045 | let node_ptr: *mut LruEntry<K, V> = node.as_ptr(); |
1046 | self.detach(node_ptr); |
1047 | self.attach_last(node_ptr); |
1048 | } |
1049 | } |
1050 | |
1051 | /// Returns the number of key-value pairs that are currently in the the cache. |
1052 | /// |
1053 | /// # Example |
1054 | /// |
1055 | /// ``` |
1056 | /// use lru::LruCache; |
1057 | /// use std::num::NonZeroUsize; |
1058 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1059 | /// assert_eq!(cache.len(), 0); |
1060 | /// |
1061 | /// cache.put(1, "a" ); |
1062 | /// assert_eq!(cache.len(), 1); |
1063 | /// |
1064 | /// cache.put(2, "b" ); |
1065 | /// assert_eq!(cache.len(), 2); |
1066 | /// |
1067 | /// cache.put(3, "c" ); |
1068 | /// assert_eq!(cache.len(), 2); |
1069 | /// ``` |
1070 | pub fn len(&self) -> usize { |
1071 | self.map.len() |
1072 | } |
1073 | |
1074 | /// Returns a bool indicating whether the cache is empty or not. |
1075 | /// |
1076 | /// # Example |
1077 | /// |
1078 | /// ``` |
1079 | /// use lru::LruCache; |
1080 | /// use std::num::NonZeroUsize; |
1081 | /// let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1082 | /// assert!(cache.is_empty()); |
1083 | /// |
1084 | /// cache.put(1, "a" ); |
1085 | /// assert!(!cache.is_empty()); |
1086 | /// ``` |
1087 | pub fn is_empty(&self) -> bool { |
1088 | self.map.len() == 0 |
1089 | } |
1090 | |
1091 | /// Returns the maximum number of key-value pairs the cache can hold. |
1092 | /// |
1093 | /// # Example |
1094 | /// |
1095 | /// ``` |
1096 | /// use lru::LruCache; |
1097 | /// use std::num::NonZeroUsize; |
1098 | /// let mut cache: LruCache<isize, &str> = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1099 | /// assert_eq!(cache.cap().get(), 2); |
1100 | /// ``` |
1101 | pub fn cap(&self) -> NonZeroUsize { |
1102 | self.cap |
1103 | } |
1104 | |
1105 | /// Resizes the cache. If the new capacity is smaller than the size of the current |
1106 | /// cache any entries past the new capacity are discarded. |
1107 | /// |
1108 | /// # Example |
1109 | /// |
1110 | /// ``` |
1111 | /// use lru::LruCache; |
1112 | /// use std::num::NonZeroUsize; |
1113 | /// let mut cache: LruCache<isize, &str> = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1114 | /// |
1115 | /// cache.put(1, "a" ); |
1116 | /// cache.put(2, "b" ); |
1117 | /// cache.resize(NonZeroUsize::new(4).unwrap()); |
1118 | /// cache.put(3, "c" ); |
1119 | /// cache.put(4, "d" ); |
1120 | /// |
1121 | /// assert_eq!(cache.len(), 4); |
1122 | /// assert_eq!(cache.get(&1), Some(&"a" )); |
1123 | /// assert_eq!(cache.get(&2), Some(&"b" )); |
1124 | /// assert_eq!(cache.get(&3), Some(&"c" )); |
1125 | /// assert_eq!(cache.get(&4), Some(&"d" )); |
1126 | /// ``` |
1127 | pub fn resize(&mut self, cap: NonZeroUsize) { |
1128 | // return early if capacity doesn't change |
1129 | if cap == self.cap { |
1130 | return; |
1131 | } |
1132 | |
1133 | while self.map.len() > cap.get() { |
1134 | self.pop_lru(); |
1135 | } |
1136 | self.map.shrink_to_fit(); |
1137 | |
1138 | self.cap = cap; |
1139 | } |
1140 | |
1141 | /// Clears the contents of the cache. |
1142 | /// |
1143 | /// # Example |
1144 | /// |
1145 | /// ``` |
1146 | /// use lru::LruCache; |
1147 | /// use std::num::NonZeroUsize; |
1148 | /// let mut cache: LruCache<isize, &str> = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1149 | /// assert_eq!(cache.len(), 0); |
1150 | /// |
1151 | /// cache.put(1, "a" ); |
1152 | /// assert_eq!(cache.len(), 1); |
1153 | /// |
1154 | /// cache.put(2, "b" ); |
1155 | /// assert_eq!(cache.len(), 2); |
1156 | /// |
1157 | /// cache.clear(); |
1158 | /// assert_eq!(cache.len(), 0); |
1159 | /// ``` |
1160 | pub fn clear(&mut self) { |
1161 | while self.pop_lru().is_some() {} |
1162 | } |
1163 | |
1164 | /// An iterator visiting all entries in most-recently used order. The iterator element type is |
1165 | /// `(&K, &V)`. |
1166 | /// |
1167 | /// # Examples |
1168 | /// |
1169 | /// ``` |
1170 | /// use lru::LruCache; |
1171 | /// use std::num::NonZeroUsize; |
1172 | /// |
1173 | /// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
1174 | /// cache.put("a" , 1); |
1175 | /// cache.put("b" , 2); |
1176 | /// cache.put("c" , 3); |
1177 | /// |
1178 | /// for (key, val) in cache.iter() { |
1179 | /// println!("key: {} val: {}" , key, val); |
1180 | /// } |
1181 | /// ``` |
1182 | pub fn iter(&self) -> Iter<'_, K, V> { |
1183 | Iter { |
1184 | len: self.len(), |
1185 | ptr: unsafe { (*self.head).next }, |
1186 | end: unsafe { (*self.tail).prev }, |
1187 | phantom: PhantomData, |
1188 | } |
1189 | } |
1190 | |
1191 | /// An iterator visiting all entries in most-recently-used order, giving a mutable reference on |
1192 | /// V. The iterator element type is `(&K, &mut V)`. |
1193 | /// |
1194 | /// # Examples |
1195 | /// |
1196 | /// ``` |
1197 | /// use lru::LruCache; |
1198 | /// use std::num::NonZeroUsize; |
1199 | /// |
1200 | /// struct HddBlock { |
1201 | /// dirty: bool, |
1202 | /// data: [u8; 512] |
1203 | /// } |
1204 | /// |
1205 | /// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
1206 | /// cache.put(0, HddBlock { dirty: false, data: [0x00; 512]}); |
1207 | /// cache.put(1, HddBlock { dirty: true, data: [0x55; 512]}); |
1208 | /// cache.put(2, HddBlock { dirty: true, data: [0x77; 512]}); |
1209 | /// |
1210 | /// // write dirty blocks to disk. |
1211 | /// for (block_id, block) in cache.iter_mut() { |
1212 | /// if block.dirty { |
1213 | /// // write block to disk |
1214 | /// block.dirty = false |
1215 | /// } |
1216 | /// } |
1217 | /// ``` |
1218 | pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { |
1219 | IterMut { |
1220 | len: self.len(), |
1221 | ptr: unsafe { (*self.head).next }, |
1222 | end: unsafe { (*self.tail).prev }, |
1223 | phantom: PhantomData, |
1224 | } |
1225 | } |
1226 | |
1227 | fn remove_last(&mut self) -> Option<Box<LruEntry<K, V>>> { |
1228 | let prev; |
1229 | unsafe { prev = (*self.tail).prev } |
1230 | if prev != self.head { |
1231 | let old_key = KeyRef { |
1232 | k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) }, |
1233 | }; |
1234 | let old_node = self.map.remove(&old_key).unwrap(); |
1235 | let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr(); |
1236 | self.detach(node_ptr); |
1237 | unsafe { Some(Box::from_raw(node_ptr)) } |
1238 | } else { |
1239 | None |
1240 | } |
1241 | } |
1242 | |
1243 | fn detach(&mut self, node: *mut LruEntry<K, V>) { |
1244 | unsafe { |
1245 | (*(*node).prev).next = (*node).next; |
1246 | (*(*node).next).prev = (*node).prev; |
1247 | } |
1248 | } |
1249 | |
1250 | // Attaches `node` after the sigil `self.head` node. |
1251 | fn attach(&mut self, node: *mut LruEntry<K, V>) { |
1252 | unsafe { |
1253 | (*node).next = (*self.head).next; |
1254 | (*node).prev = self.head; |
1255 | (*self.head).next = node; |
1256 | (*(*node).next).prev = node; |
1257 | } |
1258 | } |
1259 | |
1260 | // Attaches `node` before the sigil `self.tail` node. |
1261 | fn attach_last(&mut self, node: *mut LruEntry<K, V>) { |
1262 | unsafe { |
1263 | (*node).next = self.tail; |
1264 | (*node).prev = (*self.tail).prev; |
1265 | (*self.tail).prev = node; |
1266 | (*(*node).prev).next = node; |
1267 | } |
1268 | } |
1269 | } |
1270 | |
1271 | impl<K, V, S> Drop for LruCache<K, V, S> { |
1272 | fn drop(&mut self) { |
1273 | self.map.drain().for_each(|(_, node: NonNull>)| unsafe { |
1274 | let mut node: LruEntry = *Box::from_raw(node.as_ptr()); |
1275 | ptr::drop_in_place((node).key.as_mut_ptr()); |
1276 | ptr::drop_in_place((node).val.as_mut_ptr()); |
1277 | }); |
1278 | // We rebox the head/tail, and because these are maybe-uninit |
1279 | // they do not have the absent k/v dropped. |
1280 | |
1281 | let _head: LruEntry = unsafe { *Box::from_raw(self.head) }; |
1282 | let _tail: LruEntry = unsafe { *Box::from_raw(self.tail) }; |
1283 | } |
1284 | } |
1285 | |
1286 | impl<'a, K: Hash + Eq, V, S: BuildHasher> IntoIterator for &'a LruCache<K, V, S> { |
1287 | type Item = (&'a K, &'a V); |
1288 | type IntoIter = Iter<'a, K, V>; |
1289 | |
1290 | fn into_iter(self) -> Iter<'a, K, V> { |
1291 | self.iter() |
1292 | } |
1293 | } |
1294 | |
1295 | impl<'a, K: Hash + Eq, V, S: BuildHasher> IntoIterator for &'a mut LruCache<K, V, S> { |
1296 | type Item = (&'a K, &'a mut V); |
1297 | type IntoIter = IterMut<'a, K, V>; |
1298 | |
1299 | fn into_iter(self) -> IterMut<'a, K, V> { |
1300 | self.iter_mut() |
1301 | } |
1302 | } |
1303 | |
1304 | // The compiler does not automatically derive Send and Sync for LruCache because it contains |
1305 | // raw pointers. The raw pointers are safely encapsulated by LruCache though so we can |
1306 | // implement Send and Sync for it below. |
1307 | unsafe impl<K: Send, V: Send, S: Send> Send for LruCache<K, V, S> {} |
1308 | unsafe impl<K: Sync, V: Sync, S: Sync> Sync for LruCache<K, V, S> {} |
1309 | |
1310 | impl<K: Hash + Eq, V, S: BuildHasher> fmt::Debug for LruCache<K, V, S> { |
1311 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
1312 | f&mut DebugStruct<'_, '_>.debug_struct("LruCache" ) |
1313 | .field("len" , &self.len()) |
1314 | .field(name:"cap" , &self.cap()) |
1315 | .finish() |
1316 | } |
1317 | } |
1318 | |
1319 | /// An iterator over the entries of a `LruCache`. |
1320 | /// |
1321 | /// This `struct` is created by the [`iter`] method on [`LruCache`][`LruCache`]. See its |
1322 | /// documentation for more. |
1323 | /// |
1324 | /// [`iter`]: struct.LruCache.html#method.iter |
1325 | /// [`LruCache`]: struct.LruCache.html |
1326 | pub struct Iter<'a, K: 'a, V: 'a> { |
1327 | len: usize, |
1328 | |
1329 | ptr: *const LruEntry<K, V>, |
1330 | end: *const LruEntry<K, V>, |
1331 | |
1332 | phantom: PhantomData<&'a K>, |
1333 | } |
1334 | |
1335 | impl<'a, K, V> Iterator for Iter<'a, K, V> { |
1336 | type Item = (&'a K, &'a V); |
1337 | |
1338 | fn next(&mut self) -> Option<(&'a K, &'a V)> { |
1339 | if self.len == 0 { |
1340 | return None; |
1341 | } |
1342 | |
1343 | let key = unsafe { &(*(*self.ptr).key.as_ptr()) as &K }; |
1344 | let val = unsafe { &(*(*self.ptr).val.as_ptr()) as &V }; |
1345 | |
1346 | self.len -= 1; |
1347 | self.ptr = unsafe { (*self.ptr).next }; |
1348 | |
1349 | Some((key, val)) |
1350 | } |
1351 | |
1352 | fn size_hint(&self) -> (usize, Option<usize>) { |
1353 | (self.len, Some(self.len)) |
1354 | } |
1355 | |
1356 | fn count(self) -> usize { |
1357 | self.len |
1358 | } |
1359 | } |
1360 | |
1361 | impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> { |
1362 | fn next_back(&mut self) -> Option<(&'a K, &'a V)> { |
1363 | if self.len == 0 { |
1364 | return None; |
1365 | } |
1366 | |
1367 | let key: &K = unsafe { &(*(*self.end).key.as_ptr()) as &K }; |
1368 | let val: &V = unsafe { &(*(*self.end).val.as_ptr()) as &V }; |
1369 | |
1370 | self.len -= 1; |
1371 | self.end = unsafe { (*self.end).prev }; |
1372 | |
1373 | Some((key, val)) |
1374 | } |
1375 | } |
1376 | |
1377 | impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {} |
1378 | impl<'a, K, V> FusedIterator for Iter<'a, K, V> {} |
1379 | |
1380 | impl<'a, K, V> Clone for Iter<'a, K, V> { |
1381 | fn clone(&self) -> Iter<'a, K, V> { |
1382 | Iter { |
1383 | len: self.len, |
1384 | ptr: self.ptr, |
1385 | end: self.end, |
1386 | phantom: PhantomData, |
1387 | } |
1388 | } |
1389 | } |
1390 | |
1391 | // The compiler does not automatically derive Send and Sync for Iter because it contains |
1392 | // raw pointers. |
1393 | unsafe impl<'a, K: Send, V: Send> Send for Iter<'a, K, V> {} |
1394 | unsafe impl<'a, K: Sync, V: Sync> Sync for Iter<'a, K, V> {} |
1395 | |
1396 | /// An iterator over mutables entries of a `LruCache`. |
1397 | /// |
1398 | /// This `struct` is created by the [`iter_mut`] method on [`LruCache`][`LruCache`]. See its |
1399 | /// documentation for more. |
1400 | /// |
1401 | /// [`iter_mut`]: struct.LruCache.html#method.iter_mut |
1402 | /// [`LruCache`]: struct.LruCache.html |
1403 | pub struct IterMut<'a, K: 'a, V: 'a> { |
1404 | len: usize, |
1405 | |
1406 | ptr: *mut LruEntry<K, V>, |
1407 | end: *mut LruEntry<K, V>, |
1408 | |
1409 | phantom: PhantomData<&'a K>, |
1410 | } |
1411 | |
1412 | impl<'a, K, V> Iterator for IterMut<'a, K, V> { |
1413 | type Item = (&'a K, &'a mut V); |
1414 | |
1415 | fn next(&mut self) -> Option<(&'a K, &'a mut V)> { |
1416 | if self.len == 0 { |
1417 | return None; |
1418 | } |
1419 | |
1420 | let key = unsafe { &mut (*(*self.ptr).key.as_mut_ptr()) as &mut K }; |
1421 | let val = unsafe { &mut (*(*self.ptr).val.as_mut_ptr()) as &mut V }; |
1422 | |
1423 | self.len -= 1; |
1424 | self.ptr = unsafe { (*self.ptr).next }; |
1425 | |
1426 | Some((key, val)) |
1427 | } |
1428 | |
1429 | fn size_hint(&self) -> (usize, Option<usize>) { |
1430 | (self.len, Some(self.len)) |
1431 | } |
1432 | |
1433 | fn count(self) -> usize { |
1434 | self.len |
1435 | } |
1436 | } |
1437 | |
1438 | impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> { |
1439 | fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { |
1440 | if self.len == 0 { |
1441 | return None; |
1442 | } |
1443 | |
1444 | let key: &mut K = unsafe { &mut (*(*self.end).key.as_mut_ptr()) as &mut K }; |
1445 | let val: &mut V = unsafe { &mut (*(*self.end).val.as_mut_ptr()) as &mut V }; |
1446 | |
1447 | self.len -= 1; |
1448 | self.end = unsafe { (*self.end).prev }; |
1449 | |
1450 | Some((key, val)) |
1451 | } |
1452 | } |
1453 | |
1454 | impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {} |
1455 | impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {} |
1456 | |
1457 | // The compiler does not automatically derive Send and Sync for Iter because it contains |
1458 | // raw pointers. |
1459 | unsafe impl<'a, K: Send, V: Send> Send for IterMut<'a, K, V> {} |
1460 | unsafe impl<'a, K: Sync, V: Sync> Sync for IterMut<'a, K, V> {} |
1461 | |
1462 | /// An iterator that moves out of a `LruCache`. |
1463 | /// |
1464 | /// This `struct` is created by the [`into_iter`] method on [`LruCache`][`LruCache`]. See its |
1465 | /// documentation for more. |
1466 | /// |
1467 | /// [`into_iter`]: struct.LruCache.html#method.into_iter |
1468 | /// [`LruCache`]: struct.LruCache.html |
1469 | pub struct IntoIter<K, V> |
1470 | where |
1471 | K: Hash + Eq, |
1472 | { |
1473 | cache: LruCache<K, V>, |
1474 | } |
1475 | |
1476 | impl<K, V> Iterator for IntoIter<K, V> |
1477 | where |
1478 | K: Hash + Eq, |
1479 | { |
1480 | type Item = (K, V); |
1481 | |
1482 | fn next(&mut self) -> Option<(K, V)> { |
1483 | self.cache.pop_lru() |
1484 | } |
1485 | |
1486 | fn size_hint(&self) -> (usize, Option<usize>) { |
1487 | let len: usize = self.cache.len(); |
1488 | (len, Some(len)) |
1489 | } |
1490 | |
1491 | fn count(self) -> usize { |
1492 | self.cache.len() |
1493 | } |
1494 | } |
1495 | |
1496 | impl<K, V> ExactSizeIterator for IntoIter<K, V> where K: Hash + Eq {} |
1497 | impl<K, V> FusedIterator for IntoIter<K, V> where K: Hash + Eq {} |
1498 | |
1499 | impl<K: Hash + Eq, V> IntoIterator for LruCache<K, V> { |
1500 | type Item = (K, V); |
1501 | type IntoIter = IntoIter<K, V>; |
1502 | |
1503 | fn into_iter(self) -> IntoIter<K, V> { |
1504 | IntoIter { cache: self } |
1505 | } |
1506 | } |
1507 | |
1508 | #[cfg (test)] |
1509 | mod tests { |
1510 | use super::LruCache; |
1511 | use core::{fmt::Debug, num::NonZeroUsize}; |
1512 | use scoped_threadpool::Pool; |
1513 | use std::sync::atomic::{AtomicUsize, Ordering}; |
1514 | |
1515 | fn assert_opt_eq<V: PartialEq + Debug>(opt: Option<&V>, v: V) { |
1516 | assert!(opt.is_some()); |
1517 | assert_eq!(opt.unwrap(), &v); |
1518 | } |
1519 | |
1520 | fn assert_opt_eq_mut<V: PartialEq + Debug>(opt: Option<&mut V>, v: V) { |
1521 | assert!(opt.is_some()); |
1522 | assert_eq!(opt.unwrap(), &v); |
1523 | } |
1524 | |
1525 | fn assert_opt_eq_tuple<K: PartialEq + Debug, V: PartialEq + Debug>( |
1526 | opt: Option<(&K, &V)>, |
1527 | kv: (K, V), |
1528 | ) { |
1529 | assert!(opt.is_some()); |
1530 | let res = opt.unwrap(); |
1531 | assert_eq!(res.0, &kv.0); |
1532 | assert_eq!(res.1, &kv.1); |
1533 | } |
1534 | |
1535 | fn assert_opt_eq_mut_tuple<K: PartialEq + Debug, V: PartialEq + Debug>( |
1536 | opt: Option<(&K, &mut V)>, |
1537 | kv: (K, V), |
1538 | ) { |
1539 | assert!(opt.is_some()); |
1540 | let res = opt.unwrap(); |
1541 | assert_eq!(res.0, &kv.0); |
1542 | assert_eq!(res.1, &kv.1); |
1543 | } |
1544 | |
1545 | #[test ] |
1546 | fn test_unbounded() { |
1547 | let mut cache = LruCache::unbounded(); |
1548 | for i in 0..13370 { |
1549 | cache.put(i, ()); |
1550 | } |
1551 | assert_eq!(cache.len(), 13370); |
1552 | } |
1553 | |
1554 | #[test ] |
1555 | #[cfg (feature = "hashbrown" )] |
1556 | fn test_with_hasher() { |
1557 | use core::num::NonZeroUsize; |
1558 | |
1559 | use hashbrown::hash_map::DefaultHashBuilder; |
1560 | |
1561 | let s = DefaultHashBuilder::default(); |
1562 | let mut cache = LruCache::with_hasher(NonZeroUsize::new(16).unwrap(), s); |
1563 | |
1564 | for i in 0..13370 { |
1565 | cache.put(i, ()); |
1566 | } |
1567 | assert_eq!(cache.len(), 16); |
1568 | } |
1569 | |
1570 | #[test ] |
1571 | fn test_put_and_get() { |
1572 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1573 | assert!(cache.is_empty()); |
1574 | |
1575 | assert_eq!(cache.put("apple" , "red" ), None); |
1576 | assert_eq!(cache.put("banana" , "yellow" ), None); |
1577 | |
1578 | assert_eq!(cache.cap().get(), 2); |
1579 | assert_eq!(cache.len(), 2); |
1580 | assert!(!cache.is_empty()); |
1581 | assert_opt_eq(cache.get(&"apple" ), "red" ); |
1582 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1583 | } |
1584 | |
1585 | #[test ] |
1586 | fn test_put_and_get_or_insert() { |
1587 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1588 | assert!(cache.is_empty()); |
1589 | |
1590 | assert_eq!(cache.put("apple" , "red" ), None); |
1591 | assert_eq!(cache.put("banana" , "yellow" ), None); |
1592 | |
1593 | assert_eq!(cache.cap().get(), 2); |
1594 | assert_eq!(cache.len(), 2); |
1595 | assert!(!cache.is_empty()); |
1596 | assert_eq!(cache.get_or_insert("apple" , || "orange" ), &"red" ); |
1597 | assert_eq!(cache.get_or_insert("banana" , || "orange" ), &"yellow" ); |
1598 | assert_eq!(cache.get_or_insert("lemon" , || "orange" ), &"orange" ); |
1599 | assert_eq!(cache.get_or_insert("lemon" , || "red" ), &"orange" ); |
1600 | } |
1601 | |
1602 | #[test ] |
1603 | fn test_try_get_or_insert() { |
1604 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1605 | |
1606 | assert_eq!( |
1607 | cache.try_get_or_insert::<_, &str>("apple" , || Ok("red" )), |
1608 | Ok(&"red" ) |
1609 | ); |
1610 | assert_eq!( |
1611 | cache.try_get_or_insert::<_, &str>("apple" , || Err("failed" )), |
1612 | Ok(&"red" ) |
1613 | ); |
1614 | assert_eq!( |
1615 | cache.try_get_or_insert::<_, &str>("banana" , || Ok("orange" )), |
1616 | Ok(&"orange" ) |
1617 | ); |
1618 | assert_eq!( |
1619 | cache.try_get_or_insert::<_, &str>("lemon" , || Err("failed" )), |
1620 | Err("failed" ) |
1621 | ); |
1622 | assert_eq!( |
1623 | cache.try_get_or_insert::<_, &str>("banana" , || Err("failed" )), |
1624 | Ok(&"orange" ) |
1625 | ); |
1626 | } |
1627 | |
1628 | #[test ] |
1629 | fn test_put_and_get_or_insert_mut() { |
1630 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1631 | assert!(cache.is_empty()); |
1632 | |
1633 | assert_eq!(cache.put("apple" , "red" ), None); |
1634 | assert_eq!(cache.put("banana" , "yellow" ), None); |
1635 | |
1636 | assert_eq!(cache.cap().get(), 2); |
1637 | assert_eq!(cache.len(), 2); |
1638 | |
1639 | let v = cache.get_or_insert_mut("apple" , || "orange" ); |
1640 | assert_eq!(v, &"red" ); |
1641 | *v = "blue" ; |
1642 | |
1643 | assert_eq!(cache.get_or_insert_mut("apple" , || "orange" ), &"blue" ); |
1644 | assert_eq!(cache.get_or_insert_mut("banana" , || "orange" ), &"yellow" ); |
1645 | assert_eq!(cache.get_or_insert_mut("lemon" , || "orange" ), &"orange" ); |
1646 | assert_eq!(cache.get_or_insert_mut("lemon" , || "red" ), &"orange" ); |
1647 | } |
1648 | |
1649 | #[test ] |
1650 | fn test_try_get_or_insert_mut() { |
1651 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1652 | |
1653 | cache.put(1, "a" ); |
1654 | cache.put(2, "b" ); |
1655 | cache.put(2, "c" ); |
1656 | |
1657 | let f = || -> Result<&str, &str> { Err("failed" ) }; |
1658 | let a = || -> Result<&str, &str> { Ok("a" ) }; |
1659 | let b = || -> Result<&str, &str> { Ok("b" ) }; |
1660 | if let Ok(v) = cache.try_get_or_insert_mut(2, a) { |
1661 | *v = "d" ; |
1662 | } |
1663 | assert_eq!(cache.try_get_or_insert_mut(2, a), Ok(&mut "d" )); |
1664 | assert_eq!(cache.try_get_or_insert_mut(3, f), Err("failed" )); |
1665 | assert_eq!(cache.try_get_or_insert_mut(4, b), Ok(&mut "b" )); |
1666 | assert_eq!(cache.try_get_or_insert_mut(4, a), Ok(&mut "b" )); |
1667 | } |
1668 | |
1669 | #[test ] |
1670 | fn test_put_and_get_mut() { |
1671 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1672 | |
1673 | cache.put("apple" , "red" ); |
1674 | cache.put("banana" , "yellow" ); |
1675 | |
1676 | assert_eq!(cache.cap().get(), 2); |
1677 | assert_eq!(cache.len(), 2); |
1678 | assert_opt_eq_mut(cache.get_mut(&"apple" ), "red" ); |
1679 | assert_opt_eq_mut(cache.get_mut(&"banana" ), "yellow" ); |
1680 | } |
1681 | |
1682 | #[test ] |
1683 | fn test_get_mut_and_update() { |
1684 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1685 | |
1686 | cache.put("apple" , 1); |
1687 | cache.put("banana" , 3); |
1688 | |
1689 | { |
1690 | let v = cache.get_mut(&"apple" ).unwrap(); |
1691 | *v = 4; |
1692 | } |
1693 | |
1694 | assert_eq!(cache.cap().get(), 2); |
1695 | assert_eq!(cache.len(), 2); |
1696 | assert_opt_eq_mut(cache.get_mut(&"apple" ), 4); |
1697 | assert_opt_eq_mut(cache.get_mut(&"banana" ), 3); |
1698 | } |
1699 | |
1700 | #[test ] |
1701 | fn test_put_update() { |
1702 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1703 | |
1704 | assert_eq!(cache.put("apple" , "red" ), None); |
1705 | assert_eq!(cache.put("apple" , "green" ), Some("red" )); |
1706 | |
1707 | assert_eq!(cache.len(), 1); |
1708 | assert_opt_eq(cache.get(&"apple" ), "green" ); |
1709 | } |
1710 | |
1711 | #[test ] |
1712 | fn test_put_removes_oldest() { |
1713 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1714 | |
1715 | assert_eq!(cache.put("apple" , "red" ), None); |
1716 | assert_eq!(cache.put("banana" , "yellow" ), None); |
1717 | assert_eq!(cache.put("pear" , "green" ), None); |
1718 | |
1719 | assert!(cache.get(&"apple" ).is_none()); |
1720 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1721 | assert_opt_eq(cache.get(&"pear" ), "green" ); |
1722 | |
1723 | // Even though we inserted "apple" into the cache earlier it has since been removed from |
1724 | // the cache so there is no current value for `put` to return. |
1725 | assert_eq!(cache.put("apple" , "green" ), None); |
1726 | assert_eq!(cache.put("tomato" , "red" ), None); |
1727 | |
1728 | assert!(cache.get(&"pear" ).is_none()); |
1729 | assert_opt_eq(cache.get(&"apple" ), "green" ); |
1730 | assert_opt_eq(cache.get(&"tomato" ), "red" ); |
1731 | } |
1732 | |
1733 | #[test ] |
1734 | fn test_peek() { |
1735 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1736 | |
1737 | cache.put("apple" , "red" ); |
1738 | cache.put("banana" , "yellow" ); |
1739 | |
1740 | assert_opt_eq(cache.peek(&"banana" ), "yellow" ); |
1741 | assert_opt_eq(cache.peek(&"apple" ), "red" ); |
1742 | |
1743 | cache.put("pear" , "green" ); |
1744 | |
1745 | assert!(cache.peek(&"apple" ).is_none()); |
1746 | assert_opt_eq(cache.peek(&"banana" ), "yellow" ); |
1747 | assert_opt_eq(cache.peek(&"pear" ), "green" ); |
1748 | } |
1749 | |
1750 | #[test ] |
1751 | fn test_peek_mut() { |
1752 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1753 | |
1754 | cache.put("apple" , "red" ); |
1755 | cache.put("banana" , "yellow" ); |
1756 | |
1757 | assert_opt_eq_mut(cache.peek_mut(&"banana" ), "yellow" ); |
1758 | assert_opt_eq_mut(cache.peek_mut(&"apple" ), "red" ); |
1759 | assert!(cache.peek_mut(&"pear" ).is_none()); |
1760 | |
1761 | cache.put("pear" , "green" ); |
1762 | |
1763 | assert!(cache.peek_mut(&"apple" ).is_none()); |
1764 | assert_opt_eq_mut(cache.peek_mut(&"banana" ), "yellow" ); |
1765 | assert_opt_eq_mut(cache.peek_mut(&"pear" ), "green" ); |
1766 | |
1767 | { |
1768 | let v = cache.peek_mut(&"banana" ).unwrap(); |
1769 | *v = "green" ; |
1770 | } |
1771 | |
1772 | assert_opt_eq_mut(cache.peek_mut(&"banana" ), "green" ); |
1773 | } |
1774 | |
1775 | #[test ] |
1776 | fn test_peek_lru() { |
1777 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1778 | |
1779 | assert!(cache.peek_lru().is_none()); |
1780 | |
1781 | cache.put("apple" , "red" ); |
1782 | cache.put("banana" , "yellow" ); |
1783 | assert_opt_eq_tuple(cache.peek_lru(), ("apple" , "red" )); |
1784 | |
1785 | cache.get(&"apple" ); |
1786 | assert_opt_eq_tuple(cache.peek_lru(), ("banana" , "yellow" )); |
1787 | |
1788 | cache.clear(); |
1789 | assert!(cache.peek_lru().is_none()); |
1790 | } |
1791 | |
1792 | #[test ] |
1793 | fn test_contains() { |
1794 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1795 | |
1796 | cache.put("apple" , "red" ); |
1797 | cache.put("banana" , "yellow" ); |
1798 | cache.put("pear" , "green" ); |
1799 | |
1800 | assert!(!cache.contains(&"apple" )); |
1801 | assert!(cache.contains(&"banana" )); |
1802 | assert!(cache.contains(&"pear" )); |
1803 | } |
1804 | |
1805 | #[test ] |
1806 | fn test_pop() { |
1807 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1808 | |
1809 | cache.put("apple" , "red" ); |
1810 | cache.put("banana" , "yellow" ); |
1811 | |
1812 | assert_eq!(cache.len(), 2); |
1813 | assert_opt_eq(cache.get(&"apple" ), "red" ); |
1814 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1815 | |
1816 | let popped = cache.pop(&"apple" ); |
1817 | assert!(popped.is_some()); |
1818 | assert_eq!(popped.unwrap(), "red" ); |
1819 | assert_eq!(cache.len(), 1); |
1820 | assert!(cache.get(&"apple" ).is_none()); |
1821 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1822 | } |
1823 | |
1824 | #[test ] |
1825 | fn test_pop_entry() { |
1826 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1827 | cache.put("apple" , "red" ); |
1828 | cache.put("banana" , "yellow" ); |
1829 | |
1830 | assert_eq!(cache.len(), 2); |
1831 | assert_opt_eq(cache.get(&"apple" ), "red" ); |
1832 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1833 | |
1834 | let popped = cache.pop_entry(&"apple" ); |
1835 | assert!(popped.is_some()); |
1836 | assert_eq!(popped.unwrap(), ("apple" , "red" )); |
1837 | assert_eq!(cache.len(), 1); |
1838 | assert!(cache.get(&"apple" ).is_none()); |
1839 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1840 | } |
1841 | |
1842 | #[test ] |
1843 | fn test_pop_lru() { |
1844 | let mut cache = LruCache::new(NonZeroUsize::new(200).unwrap()); |
1845 | |
1846 | for i in 0..75 { |
1847 | cache.put(i, "A" ); |
1848 | } |
1849 | for i in 0..75 { |
1850 | cache.put(i + 100, "B" ); |
1851 | } |
1852 | for i in 0..75 { |
1853 | cache.put(i + 200, "C" ); |
1854 | } |
1855 | assert_eq!(cache.len(), 200); |
1856 | |
1857 | for i in 0..75 { |
1858 | assert_opt_eq(cache.get(&(74 - i + 100)), "B" ); |
1859 | } |
1860 | assert_opt_eq(cache.get(&25), "A" ); |
1861 | |
1862 | for i in 26..75 { |
1863 | assert_eq!(cache.pop_lru(), Some((i, "A" ))); |
1864 | } |
1865 | for i in 0..75 { |
1866 | assert_eq!(cache.pop_lru(), Some((i + 200, "C" ))); |
1867 | } |
1868 | for i in 0..75 { |
1869 | assert_eq!(cache.pop_lru(), Some((74 - i + 100, "B" ))); |
1870 | } |
1871 | assert_eq!(cache.pop_lru(), Some((25, "A" ))); |
1872 | for _ in 0..50 { |
1873 | assert_eq!(cache.pop_lru(), None); |
1874 | } |
1875 | } |
1876 | |
1877 | #[test ] |
1878 | fn test_clear() { |
1879 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1880 | |
1881 | cache.put("apple" , "red" ); |
1882 | cache.put("banana" , "yellow" ); |
1883 | |
1884 | assert_eq!(cache.len(), 2); |
1885 | assert_opt_eq(cache.get(&"apple" ), "red" ); |
1886 | assert_opt_eq(cache.get(&"banana" ), "yellow" ); |
1887 | |
1888 | cache.clear(); |
1889 | assert_eq!(cache.len(), 0); |
1890 | } |
1891 | |
1892 | #[test ] |
1893 | fn test_resize_larger() { |
1894 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
1895 | |
1896 | cache.put(1, "a" ); |
1897 | cache.put(2, "b" ); |
1898 | cache.resize(NonZeroUsize::new(4).unwrap()); |
1899 | cache.put(3, "c" ); |
1900 | cache.put(4, "d" ); |
1901 | |
1902 | assert_eq!(cache.len(), 4); |
1903 | assert_eq!(cache.get(&1), Some(&"a" )); |
1904 | assert_eq!(cache.get(&2), Some(&"b" )); |
1905 | assert_eq!(cache.get(&3), Some(&"c" )); |
1906 | assert_eq!(cache.get(&4), Some(&"d" )); |
1907 | } |
1908 | |
1909 | #[test ] |
1910 | fn test_resize_smaller() { |
1911 | let mut cache = LruCache::new(NonZeroUsize::new(4).unwrap()); |
1912 | |
1913 | cache.put(1, "a" ); |
1914 | cache.put(2, "b" ); |
1915 | cache.put(3, "c" ); |
1916 | cache.put(4, "d" ); |
1917 | |
1918 | cache.resize(NonZeroUsize::new(2).unwrap()); |
1919 | |
1920 | assert_eq!(cache.len(), 2); |
1921 | assert!(cache.get(&1).is_none()); |
1922 | assert!(cache.get(&2).is_none()); |
1923 | assert_eq!(cache.get(&3), Some(&"c" )); |
1924 | assert_eq!(cache.get(&4), Some(&"d" )); |
1925 | } |
1926 | |
1927 | #[test ] |
1928 | fn test_send() { |
1929 | use std::thread; |
1930 | |
1931 | let mut cache = LruCache::new(NonZeroUsize::new(4).unwrap()); |
1932 | cache.put(1, "a" ); |
1933 | |
1934 | let handle = thread::spawn(move || { |
1935 | assert_eq!(cache.get(&1), Some(&"a" )); |
1936 | }); |
1937 | |
1938 | assert!(handle.join().is_ok()); |
1939 | } |
1940 | |
1941 | #[test ] |
1942 | fn test_multiple_threads() { |
1943 | let mut pool = Pool::new(1); |
1944 | let mut cache = LruCache::new(NonZeroUsize::new(4).unwrap()); |
1945 | cache.put(1, "a" ); |
1946 | |
1947 | let cache_ref = &cache; |
1948 | pool.scoped(|scoped| { |
1949 | scoped.execute(move || { |
1950 | assert_eq!(cache_ref.peek(&1), Some(&"a" )); |
1951 | }); |
1952 | }); |
1953 | |
1954 | assert_eq!((cache_ref).peek(&1), Some(&"a" )); |
1955 | } |
1956 | |
1957 | #[test ] |
1958 | fn test_iter_forwards() { |
1959 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
1960 | cache.put("a" , 1); |
1961 | cache.put("b" , 2); |
1962 | cache.put("c" , 3); |
1963 | |
1964 | { |
1965 | // iter const |
1966 | let mut iter = cache.iter(); |
1967 | assert_eq!(iter.len(), 3); |
1968 | assert_opt_eq_tuple(iter.next(), ("c" , 3)); |
1969 | |
1970 | assert_eq!(iter.len(), 2); |
1971 | assert_opt_eq_tuple(iter.next(), ("b" , 2)); |
1972 | |
1973 | assert_eq!(iter.len(), 1); |
1974 | assert_opt_eq_tuple(iter.next(), ("a" , 1)); |
1975 | |
1976 | assert_eq!(iter.len(), 0); |
1977 | assert_eq!(iter.next(), None); |
1978 | } |
1979 | { |
1980 | // iter mut |
1981 | let mut iter = cache.iter_mut(); |
1982 | assert_eq!(iter.len(), 3); |
1983 | assert_opt_eq_mut_tuple(iter.next(), ("c" , 3)); |
1984 | |
1985 | assert_eq!(iter.len(), 2); |
1986 | assert_opt_eq_mut_tuple(iter.next(), ("b" , 2)); |
1987 | |
1988 | assert_eq!(iter.len(), 1); |
1989 | assert_opt_eq_mut_tuple(iter.next(), ("a" , 1)); |
1990 | |
1991 | assert_eq!(iter.len(), 0); |
1992 | assert_eq!(iter.next(), None); |
1993 | } |
1994 | } |
1995 | |
1996 | #[test ] |
1997 | fn test_iter_backwards() { |
1998 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
1999 | cache.put("a" , 1); |
2000 | cache.put("b" , 2); |
2001 | cache.put("c" , 3); |
2002 | |
2003 | { |
2004 | // iter const |
2005 | let mut iter = cache.iter(); |
2006 | assert_eq!(iter.len(), 3); |
2007 | assert_opt_eq_tuple(iter.next_back(), ("a" , 1)); |
2008 | |
2009 | assert_eq!(iter.len(), 2); |
2010 | assert_opt_eq_tuple(iter.next_back(), ("b" , 2)); |
2011 | |
2012 | assert_eq!(iter.len(), 1); |
2013 | assert_opt_eq_tuple(iter.next_back(), ("c" , 3)); |
2014 | |
2015 | assert_eq!(iter.len(), 0); |
2016 | assert_eq!(iter.next_back(), None); |
2017 | } |
2018 | |
2019 | { |
2020 | // iter mut |
2021 | let mut iter = cache.iter_mut(); |
2022 | assert_eq!(iter.len(), 3); |
2023 | assert_opt_eq_mut_tuple(iter.next_back(), ("a" , 1)); |
2024 | |
2025 | assert_eq!(iter.len(), 2); |
2026 | assert_opt_eq_mut_tuple(iter.next_back(), ("b" , 2)); |
2027 | |
2028 | assert_eq!(iter.len(), 1); |
2029 | assert_opt_eq_mut_tuple(iter.next_back(), ("c" , 3)); |
2030 | |
2031 | assert_eq!(iter.len(), 0); |
2032 | assert_eq!(iter.next_back(), None); |
2033 | } |
2034 | } |
2035 | |
2036 | #[test ] |
2037 | fn test_iter_forwards_and_backwards() { |
2038 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
2039 | cache.put("a" , 1); |
2040 | cache.put("b" , 2); |
2041 | cache.put("c" , 3); |
2042 | |
2043 | { |
2044 | // iter const |
2045 | let mut iter = cache.iter(); |
2046 | assert_eq!(iter.len(), 3); |
2047 | assert_opt_eq_tuple(iter.next(), ("c" , 3)); |
2048 | |
2049 | assert_eq!(iter.len(), 2); |
2050 | assert_opt_eq_tuple(iter.next_back(), ("a" , 1)); |
2051 | |
2052 | assert_eq!(iter.len(), 1); |
2053 | assert_opt_eq_tuple(iter.next(), ("b" , 2)); |
2054 | |
2055 | assert_eq!(iter.len(), 0); |
2056 | assert_eq!(iter.next_back(), None); |
2057 | } |
2058 | { |
2059 | // iter mut |
2060 | let mut iter = cache.iter_mut(); |
2061 | assert_eq!(iter.len(), 3); |
2062 | assert_opt_eq_mut_tuple(iter.next(), ("c" , 3)); |
2063 | |
2064 | assert_eq!(iter.len(), 2); |
2065 | assert_opt_eq_mut_tuple(iter.next_back(), ("a" , 1)); |
2066 | |
2067 | assert_eq!(iter.len(), 1); |
2068 | assert_opt_eq_mut_tuple(iter.next(), ("b" , 2)); |
2069 | |
2070 | assert_eq!(iter.len(), 0); |
2071 | assert_eq!(iter.next_back(), None); |
2072 | } |
2073 | } |
2074 | |
2075 | #[test ] |
2076 | fn test_iter_multiple_threads() { |
2077 | let mut pool = Pool::new(1); |
2078 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
2079 | cache.put("a" , 1); |
2080 | cache.put("b" , 2); |
2081 | cache.put("c" , 3); |
2082 | |
2083 | let mut iter = cache.iter(); |
2084 | assert_eq!(iter.len(), 3); |
2085 | assert_opt_eq_tuple(iter.next(), ("c" , 3)); |
2086 | |
2087 | { |
2088 | let iter_ref = &mut iter; |
2089 | pool.scoped(|scoped| { |
2090 | scoped.execute(move || { |
2091 | assert_eq!(iter_ref.len(), 2); |
2092 | assert_opt_eq_tuple(iter_ref.next(), ("b" , 2)); |
2093 | }); |
2094 | }); |
2095 | } |
2096 | |
2097 | assert_eq!(iter.len(), 1); |
2098 | assert_opt_eq_tuple(iter.next(), ("a" , 1)); |
2099 | |
2100 | assert_eq!(iter.len(), 0); |
2101 | assert_eq!(iter.next(), None); |
2102 | } |
2103 | |
2104 | #[test ] |
2105 | fn test_iter_clone() { |
2106 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
2107 | cache.put("a" , 1); |
2108 | cache.put("b" , 2); |
2109 | |
2110 | let mut iter = cache.iter(); |
2111 | let mut iter_clone = iter.clone(); |
2112 | |
2113 | assert_eq!(iter.len(), 2); |
2114 | assert_opt_eq_tuple(iter.next(), ("b" , 2)); |
2115 | assert_eq!(iter_clone.len(), 2); |
2116 | assert_opt_eq_tuple(iter_clone.next(), ("b" , 2)); |
2117 | |
2118 | assert_eq!(iter.len(), 1); |
2119 | assert_opt_eq_tuple(iter.next(), ("a" , 1)); |
2120 | assert_eq!(iter_clone.len(), 1); |
2121 | assert_opt_eq_tuple(iter_clone.next(), ("a" , 1)); |
2122 | |
2123 | assert_eq!(iter.len(), 0); |
2124 | assert_eq!(iter.next(), None); |
2125 | assert_eq!(iter_clone.len(), 0); |
2126 | assert_eq!(iter_clone.next(), None); |
2127 | } |
2128 | |
2129 | #[test ] |
2130 | fn test_into_iter() { |
2131 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
2132 | cache.put("a" , 1); |
2133 | cache.put("b" , 2); |
2134 | cache.put("c" , 3); |
2135 | |
2136 | let mut iter = cache.into_iter(); |
2137 | assert_eq!(iter.len(), 3); |
2138 | assert_eq!(iter.next(), Some(("a" , 1))); |
2139 | |
2140 | assert_eq!(iter.len(), 2); |
2141 | assert_eq!(iter.next(), Some(("b" , 2))); |
2142 | |
2143 | assert_eq!(iter.len(), 1); |
2144 | assert_eq!(iter.next(), Some(("c" , 3))); |
2145 | |
2146 | assert_eq!(iter.len(), 0); |
2147 | assert_eq!(iter.next(), None); |
2148 | } |
2149 | |
2150 | #[test ] |
2151 | fn test_that_pop_actually_detaches_node() { |
2152 | let mut cache = LruCache::new(NonZeroUsize::new(5).unwrap()); |
2153 | |
2154 | cache.put("a" , 1); |
2155 | cache.put("b" , 2); |
2156 | cache.put("c" , 3); |
2157 | cache.put("d" , 4); |
2158 | cache.put("e" , 5); |
2159 | |
2160 | assert_eq!(cache.pop(&"c" ), Some(3)); |
2161 | |
2162 | cache.put("f" , 6); |
2163 | |
2164 | let mut iter = cache.iter(); |
2165 | assert_opt_eq_tuple(iter.next(), ("f" , 6)); |
2166 | assert_opt_eq_tuple(iter.next(), ("e" , 5)); |
2167 | assert_opt_eq_tuple(iter.next(), ("d" , 4)); |
2168 | assert_opt_eq_tuple(iter.next(), ("b" , 2)); |
2169 | assert_opt_eq_tuple(iter.next(), ("a" , 1)); |
2170 | assert!(iter.next().is_none()); |
2171 | } |
2172 | |
2173 | #[test ] |
2174 | fn test_get_with_borrow() { |
2175 | use alloc::string::String; |
2176 | |
2177 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
2178 | |
2179 | let key = String::from("apple" ); |
2180 | cache.put(key, "red" ); |
2181 | |
2182 | assert_opt_eq(cache.get("apple" ), "red" ); |
2183 | } |
2184 | |
2185 | #[test ] |
2186 | fn test_get_mut_with_borrow() { |
2187 | use alloc::string::String; |
2188 | |
2189 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
2190 | |
2191 | let key = String::from("apple" ); |
2192 | cache.put(key, "red" ); |
2193 | |
2194 | assert_opt_eq_mut(cache.get_mut("apple" ), "red" ); |
2195 | } |
2196 | |
2197 | #[test ] |
2198 | fn test_no_memory_leaks() { |
2199 | static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); |
2200 | |
2201 | struct DropCounter; |
2202 | |
2203 | impl Drop for DropCounter { |
2204 | fn drop(&mut self) { |
2205 | DROP_COUNT.fetch_add(1, Ordering::SeqCst); |
2206 | } |
2207 | } |
2208 | |
2209 | let n = 100; |
2210 | for _ in 0..n { |
2211 | let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap()); |
2212 | for i in 0..n { |
2213 | cache.put(i, DropCounter {}); |
2214 | } |
2215 | } |
2216 | assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n); |
2217 | } |
2218 | |
2219 | #[test ] |
2220 | fn test_no_memory_leaks_with_clear() { |
2221 | static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); |
2222 | |
2223 | struct DropCounter; |
2224 | |
2225 | impl Drop for DropCounter { |
2226 | fn drop(&mut self) { |
2227 | DROP_COUNT.fetch_add(1, Ordering::SeqCst); |
2228 | } |
2229 | } |
2230 | |
2231 | let n = 100; |
2232 | for _ in 0..n { |
2233 | let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap()); |
2234 | for i in 0..n { |
2235 | cache.put(i, DropCounter {}); |
2236 | } |
2237 | cache.clear(); |
2238 | } |
2239 | assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n); |
2240 | } |
2241 | |
2242 | #[test ] |
2243 | fn test_no_memory_leaks_with_resize() { |
2244 | static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); |
2245 | |
2246 | struct DropCounter; |
2247 | |
2248 | impl Drop for DropCounter { |
2249 | fn drop(&mut self) { |
2250 | DROP_COUNT.fetch_add(1, Ordering::SeqCst); |
2251 | } |
2252 | } |
2253 | |
2254 | let n = 100; |
2255 | for _ in 0..n { |
2256 | let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap()); |
2257 | for i in 0..n { |
2258 | cache.put(i, DropCounter {}); |
2259 | } |
2260 | cache.clear(); |
2261 | } |
2262 | assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n); |
2263 | } |
2264 | |
2265 | #[test ] |
2266 | fn test_no_memory_leaks_with_pop() { |
2267 | static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); |
2268 | |
2269 | #[derive (Hash, Eq)] |
2270 | struct KeyDropCounter(usize); |
2271 | |
2272 | impl PartialEq for KeyDropCounter { |
2273 | fn eq(&self, other: &Self) -> bool { |
2274 | self.0.eq(&other.0) |
2275 | } |
2276 | } |
2277 | |
2278 | impl Drop for KeyDropCounter { |
2279 | fn drop(&mut self) { |
2280 | DROP_COUNT.fetch_add(1, Ordering::SeqCst); |
2281 | } |
2282 | } |
2283 | |
2284 | let n = 100; |
2285 | for _ in 0..n { |
2286 | let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap()); |
2287 | |
2288 | for i in 0..100 { |
2289 | cache.put(KeyDropCounter(i), i); |
2290 | cache.pop(&KeyDropCounter(i)); |
2291 | } |
2292 | } |
2293 | |
2294 | assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n * 2); |
2295 | } |
2296 | |
2297 | #[test ] |
2298 | fn test_promote_and_demote() { |
2299 | let mut cache = LruCache::new(NonZeroUsize::new(5).unwrap()); |
2300 | for i in 0..5 { |
2301 | cache.push(i, i); |
2302 | } |
2303 | cache.promote(&1); |
2304 | cache.promote(&0); |
2305 | cache.demote(&3); |
2306 | cache.demote(&4); |
2307 | assert_eq!(cache.pop_lru(), Some((4, 4))); |
2308 | assert_eq!(cache.pop_lru(), Some((3, 3))); |
2309 | assert_eq!(cache.pop_lru(), Some((2, 2))); |
2310 | assert_eq!(cache.pop_lru(), Some((1, 1))); |
2311 | assert_eq!(cache.pop_lru(), Some((0, 0))); |
2312 | assert_eq!(cache.pop_lru(), None); |
2313 | } |
2314 | |
2315 | #[test ] |
2316 | fn test_get_key_value() { |
2317 | use alloc::string::String; |
2318 | |
2319 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
2320 | |
2321 | let key = String::from("apple" ); |
2322 | cache.put(key, "red" ); |
2323 | |
2324 | assert_eq!( |
2325 | cache.get_key_value("apple" ), |
2326 | Some((&String::from("apple" ), &"red" )) |
2327 | ); |
2328 | assert_eq!(cache.get_key_value("banana" ), None); |
2329 | } |
2330 | |
2331 | #[test ] |
2332 | fn test_get_key_value_mut() { |
2333 | use alloc::string::String; |
2334 | |
2335 | let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap()); |
2336 | |
2337 | let key = String::from("apple" ); |
2338 | cache.put(key, "red" ); |
2339 | |
2340 | let (k, v) = cache.get_key_value_mut("apple" ).unwrap(); |
2341 | assert_eq!(k, &String::from("apple" )); |
2342 | assert_eq!(v, &mut "red" ); |
2343 | *v = "green" ; |
2344 | |
2345 | assert_eq!( |
2346 | cache.get_key_value("apple" ), |
2347 | Some((&String::from("apple" ), &"green" )) |
2348 | ); |
2349 | assert_eq!(cache.get_key_value("banana" ), None); |
2350 | } |
2351 | |
2352 | #[test ] |
2353 | fn test_clone() { |
2354 | let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); |
2355 | cache.put("a" , 1); |
2356 | cache.put("b" , 2); |
2357 | cache.put("c" , 3); |
2358 | |
2359 | let mut cloned = cache.clone(); |
2360 | |
2361 | assert_eq!(cache.pop_lru(), Some(("a" , 1))); |
2362 | assert_eq!(cloned.pop_lru(), Some(("a" , 1))); |
2363 | |
2364 | assert_eq!(cache.pop_lru(), Some(("b" , 2))); |
2365 | assert_eq!(cloned.pop_lru(), Some(("b" , 2))); |
2366 | |
2367 | assert_eq!(cache.pop_lru(), Some(("c" , 3))); |
2368 | assert_eq!(cloned.pop_lru(), Some(("c" , 3))); |
2369 | |
2370 | assert_eq!(cache.pop_lru(), None); |
2371 | assert_eq!(cloned.pop_lru(), None); |
2372 | } |
2373 | } |
2374 | |
2375 | /// Doctests for what should *not* compile |
2376 | /// |
2377 | /// ```compile_fail |
2378 | /// let mut cache = lru::LruCache::<u32, u32>::unbounded(); |
2379 | /// let _: &'static u32 = cache.get_or_insert(0, || 92); |
2380 | /// ``` |
2381 | /// |
2382 | /// ```compile_fail |
2383 | /// let mut cache = lru::LruCache::<u32, u32>::unbounded(); |
2384 | /// let _: Option<(&'static u32, _)> = cache.peek_lru(); |
2385 | /// let _: Option<(_, &'static u32)> = cache.peek_lru(); |
2386 | /// ``` |
2387 | fn _test_lifetimes() {} |
2388 | |