1#![no_std]
61
62#[cfg(feature = "hashbrown")]
63extern crate hashbrown;
64
65#[cfg(test)]
66extern crate scoped_threadpool;
67
68use alloc::borrow::Borrow;
69use alloc::boxed::Box;
70use core::fmt;
71use core::hash::{BuildHasher, Hash, Hasher};
72use core::iter::FusedIterator;
73use core::marker::PhantomData;
74use core::mem;
75use core::num::NonZeroUsize;
76use core::ptr::{self, NonNull};
77use core::usize;
78
79#[cfg(any(test, not(feature = "hashbrown")))]
80extern crate std;
81
82#[cfg(feature = "hashbrown")]
83use hashbrown::HashMap;
84#[cfg(not(feature = "hashbrown"))]
85use std::collections::HashMap;
86
87extern crate alloc;
88
89struct KeyRef<K> {
91 k: *const K,
92}
93
94impl<K: Hash> Hash for KeyRef<K> {
95 fn hash<H: Hasher>(&self, state: &mut H) {
96 unsafe { (*self.k).hash(state) }
97 }
98}
99
100impl<K: PartialEq> PartialEq for KeyRef<K> {
101 #![allow(unknown_lints)]
104 #[allow(clippy::unconditional_recursion)]
105 fn eq(&self, other: &KeyRef<K>) -> bool {
106 unsafe { (*self.k).eq(&*other.k) }
107 }
108}
109
110impl<K: Eq> Eq for KeyRef<K> {}
111
112#[repr(transparent)]
115struct KeyWrapper<K: ?Sized>(K);
116
117impl<K: ?Sized> KeyWrapper<K> {
118 fn from_ref(key: &K) -> &Self {
119 unsafe { &*(key as *const K as *const KeyWrapper<K>) }
121 }
122}
123
124impl<K: ?Sized + Hash> Hash for KeyWrapper<K> {
125 fn hash<H: Hasher>(&self, state: &mut H) {
126 self.0.hash(state)
127 }
128}
129
130impl<K: ?Sized + PartialEq> PartialEq for KeyWrapper<K> {
131 #![allow(unknown_lints)]
134 #[allow(clippy::unconditional_recursion)]
135 fn eq(&self, other: &Self) -> bool {
136 self.0.eq(&other.0)
137 }
138}
139
140impl<K: ?Sized + Eq> Eq for KeyWrapper<K> {}
141
142impl<K, Q> Borrow<KeyWrapper<Q>> for KeyRef<K>
143where
144 K: Borrow<Q>,
145 Q: ?Sized,
146{
147 fn borrow(&self) -> &KeyWrapper<Q> {
148 let key = unsafe { &*self.k }.borrow();
149 KeyWrapper::from_ref(key)
150 }
151}
152
153struct LruEntry<K, V> {
156 key: mem::MaybeUninit<K>,
157 val: mem::MaybeUninit<V>,
158 prev: *mut LruEntry<K, V>,
159 next: *mut LruEntry<K, V>,
160}
161
162impl<K, V> LruEntry<K, V> {
163 fn new(key: K, val: V) -> Self {
164 LruEntry {
165 key: mem::MaybeUninit::new(key),
166 val: mem::MaybeUninit::new(val),
167 prev: ptr::null_mut(),
168 next: ptr::null_mut(),
169 }
170 }
171
172 fn new_sigil() -> Self {
173 LruEntry {
174 key: mem::MaybeUninit::uninit(),
175 val: mem::MaybeUninit::uninit(),
176 prev: ptr::null_mut(),
177 next: ptr::null_mut(),
178 }
179 }
180}
181
182#[cfg(feature = "hashbrown")]
183pub type DefaultHasher = hashbrown::hash_map::DefaultHashBuilder;
184#[cfg(not(feature = "hashbrown"))]
185pub type DefaultHasher = std::collections::hash_map::RandomState;
186
187pub struct LruCache<K, V, S = DefaultHasher> {
189 map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>,
190 cap: NonZeroUsize,
191
192 head: *mut LruEntry<K, V>,
194 tail: *mut LruEntry<K, V>,
195}
196
197impl<K, V> Clone for LruCache<K, V>
198where
199 K: Hash + PartialEq + Eq + Clone,
200 V: Clone,
201{
202 fn clone(&self) -> Self {
203 let mut new_lru = LruCache::new(self.cap());
204
205 for (key, value) in self.iter().rev() {
206 new_lru.push(key.clone(), value.clone());
207 }
208
209 new_lru
210 }
211}
212
213impl<K: Hash + Eq, V> LruCache<K, V> {
214 pub fn new(cap: NonZeroUsize) -> LruCache<K, V> {
224 LruCache::construct(cap, HashMap::with_capacity(cap.get()))
225 }
226
227 pub fn unbounded() -> LruCache<K, V> {
237 LruCache::construct(NonZeroUsize::new(usize::MAX).unwrap(), HashMap::default())
238 }
239}
240
241impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
242 pub fn with_hasher(cap: NonZeroUsize, hash_builder: S) -> LruCache<K, V, S> {
255 LruCache::construct(
256 cap,
257 HashMap::with_capacity_and_hasher(cap.into(), hash_builder),
258 )
259 }
260
261 pub fn unbounded_with_hasher(hash_builder: S) -> LruCache<K, V, S> {
273 LruCache::construct(
274 NonZeroUsize::new(usize::MAX).unwrap(),
275 HashMap::with_hasher(hash_builder),
276 )
277 }
278
279 fn construct(
281 cap: NonZeroUsize,
282 map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>,
283 ) -> LruCache<K, V, S> {
284 let cache = LruCache {
287 map,
288 cap,
289 head: Box::into_raw(Box::new(LruEntry::new_sigil())),
290 tail: Box::into_raw(Box::new(LruEntry::new_sigil())),
291 };
292
293 unsafe {
294 (*cache.head).next = cache.tail;
295 (*cache.tail).prev = cache.head;
296 }
297
298 cache
299 }
300
301 pub fn put(&mut self, k: K, v: V) -> Option<V> {
319 self.capturing_put(k, v, false).map(|(_, v)| v)
320 }
321
322 pub fn push(&mut self, k: K, v: V) -> Option<(K, V)> {
347 self.capturing_put(k, v, true)
348 }
349
350 fn capturing_put(&mut self, k: K, mut v: V, capture: bool) -> Option<(K, V)> {
354 let node_ref = self.map.get_mut(&KeyRef { k: &k });
355
356 match node_ref {
357 Some(node_ref) => {
358 let node_ptr: *mut LruEntry<K, V> = node_ref.as_ptr();
361
362 let node_ref = unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) };
364 mem::swap(&mut v, node_ref);
365 let _ = node_ref;
366
367 self.detach(node_ptr);
368 self.attach(node_ptr);
369 Some((k, v))
370 }
371 None => {
372 let (replaced, node) = self.replace_or_create_node(k, v);
373 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
374
375 self.attach(node_ptr);
376
377 let keyref = unsafe { (*node_ptr).key.as_ptr() };
378 self.map.insert(KeyRef { k: keyref }, node);
379
380 replaced.filter(|_| capture)
381 }
382 }
383 }
384
385 #[allow(clippy::type_complexity)]
388 fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, NonNull<LruEntry<K, V>>) {
389 if self.len() == self.cap().get() {
390 let old_key = KeyRef {
392 k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) },
393 };
394 let old_node = self.map.remove(&old_key).unwrap();
395 let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr();
396
397 let replaced = unsafe {
399 (
400 mem::replace(&mut (*node_ptr).key, mem::MaybeUninit::new(k)).assume_init(),
401 mem::replace(&mut (*node_ptr).val, mem::MaybeUninit::new(v)).assume_init(),
402 )
403 };
404
405 self.detach(node_ptr);
406
407 (Some(replaced), old_node)
408 } else {
409 (None, unsafe {
412 NonNull::new_unchecked(Box::into_raw(Box::new(LruEntry::new(k, v))))
413 })
414 }
415 }
416
417 pub fn get<'a, Q>(&'a mut self, k: &Q) -> Option<&'a V>
437 where
438 K: Borrow<Q>,
439 Q: Hash + Eq + ?Sized,
440 {
441 if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) {
442 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
443
444 self.detach(node_ptr);
445 self.attach(node_ptr);
446
447 Some(unsafe { &*(*node_ptr).val.as_ptr() })
448 } else {
449 None
450 }
451 }
452
453 pub fn get_mut<'a, Q>(&'a mut self, k: &Q) -> Option<&'a mut V>
473 where
474 K: Borrow<Q>,
475 Q: Hash + Eq + ?Sized,
476 {
477 if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) {
478 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
479
480 self.detach(node_ptr);
481 self.attach(node_ptr);
482
483 Some(unsafe { &mut *(*node_ptr).val.as_mut_ptr() })
484 } else {
485 None
486 }
487 }
488
489 pub fn get_key_value<'a, Q>(&'a mut self, k: &Q) -> Option<(&'a K, &'a V)>
509 where
510 K: Borrow<Q>,
511 Q: Hash + Eq + ?Sized,
512 {
513 if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) {
514 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
515
516 self.detach(node_ptr);
517 self.attach(node_ptr);
518
519 Some(unsafe { (&*(*node_ptr).key.as_ptr(), &*(*node_ptr).val.as_ptr()) })
520 } else {
521 None
522 }
523 }
524
525 pub fn get_key_value_mut<'a, Q>(&'a mut self, k: &Q) -> Option<(&'a K, &'a mut V)>
548 where
549 K: Borrow<Q>,
550 Q: Hash + Eq + ?Sized,
551 {
552 if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) {
553 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
554
555 self.detach(node_ptr);
556 self.attach(node_ptr);
557
558 Some(unsafe {
559 (
560 &*(*node_ptr).key.as_ptr(),
561 &mut *(*node_ptr).val.as_mut_ptr(),
562 )
563 })
564 } else {
565 None
566 }
567 }
568
569 pub fn get_or_insert<F>(&mut self, k: K, f: F) -> &V
592 where
593 F: FnOnce() -> V,
594 {
595 if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
596 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
597
598 self.detach(node_ptr);
599 self.attach(node_ptr);
600
601 unsafe { &*(*node_ptr).val.as_ptr() }
602 } else {
603 let v = f();
604 let (_, node) = self.replace_or_create_node(k, v);
605 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
606
607 self.attach(node_ptr);
608
609 let keyref = unsafe { (*node_ptr).key.as_ptr() };
610 self.map.insert(KeyRef { k: keyref }, node);
611 unsafe { &*(*node_ptr).val.as_ptr() }
612 }
613 }
614
615 pub fn try_get_or_insert<F, E>(&mut self, k: K, f: F) -> Result<&V, E>
643 where
644 F: FnOnce() -> Result<V, E>,
645 {
646 if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
647 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
648
649 self.detach(node_ptr);
650 self.attach(node_ptr);
651
652 unsafe { Ok(&*(*node_ptr).val.as_ptr()) }
653 } else {
654 let v = f()?;
655 let (_, node) = self.replace_or_create_node(k, v);
656 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
657
658 self.attach(node_ptr);
659
660 let keyref = unsafe { (*node_ptr).key.as_ptr() };
661 self.map.insert(KeyRef { k: keyref }, node);
662 Ok(unsafe { &*(*node_ptr).val.as_ptr() })
663 }
664 }
665
666 pub fn get_or_insert_mut<F>(&mut self, k: K, f: F) -> &mut V
689 where
690 F: FnOnce() -> V,
691 {
692 if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
693 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
694
695 self.detach(node_ptr);
696 self.attach(node_ptr);
697
698 unsafe { &mut *(*node_ptr).val.as_mut_ptr() }
699 } else {
700 let v = f();
701 let (_, node) = self.replace_or_create_node(k, v);
702 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
703
704 self.attach(node_ptr);
705
706 let keyref = unsafe { (*node_ptr).key.as_ptr() };
707 self.map.insert(KeyRef { k: keyref }, node);
708 unsafe { &mut *(*node_ptr).val.as_mut_ptr() }
709 }
710 }
711
712 pub fn try_get_or_insert_mut<F, E>(&mut self, k: K, f: F) -> Result<&mut V, E>
741 where
742 F: FnOnce() -> Result<V, E>,
743 {
744 if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
745 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
746
747 self.detach(node_ptr);
748 self.attach(node_ptr);
749
750 unsafe { Ok(&mut *(*node_ptr).val.as_mut_ptr()) }
751 } else {
752 let v = f()?;
753 let (_, node) = self.replace_or_create_node(k, v);
754 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
755
756 self.attach(node_ptr);
757
758 let keyref = unsafe { (*node_ptr).key.as_ptr() };
759 self.map.insert(KeyRef { k: keyref }, node);
760 unsafe { Ok(&mut *(*node_ptr).val.as_mut_ptr()) }
761 }
762 }
763
764 pub fn peek<'a, Q>(&'a self, k: &Q) -> Option<&'a V>
782 where
783 K: Borrow<Q>,
784 Q: Hash + Eq + ?Sized,
785 {
786 self.map
787 .get(KeyWrapper::from_ref(k))
788 .map(|node| unsafe { &*node.as_ref().val.as_ptr() })
789 }
790
791 pub fn peek_mut<'a, Q>(&'a mut self, k: &Q) -> Option<&'a mut V>
809 where
810 K: Borrow<Q>,
811 Q: Hash + Eq + ?Sized,
812 {
813 match self.map.get_mut(KeyWrapper::from_ref(k)) {
814 None => None,
815 Some(node) => Some(unsafe { &mut *(*node.as_ptr()).val.as_mut_ptr() }),
816 }
817 }
818
819 pub fn peek_lru(&self) -> Option<(&K, &V)> {
836 if self.is_empty() {
837 return None;
838 }
839
840 let (key, val);
841 unsafe {
842 let node = (*self.tail).prev;
843 key = &(*(*node).key.as_ptr()) as &K;
844 val = &(*(*node).val.as_ptr()) as &V;
845 }
846
847 Some((key, val))
848 }
849
850 pub fn contains<Q>(&self, k: &Q) -> bool
869 where
870 K: Borrow<Q>,
871 Q: Hash + Eq + ?Sized,
872 {
873 self.map.contains_key(KeyWrapper::from_ref(k))
874 }
875
876 pub fn pop<Q>(&mut self, k: &Q) -> Option<V>
894 where
895 K: Borrow<Q>,
896 Q: Hash + Eq + ?Sized,
897 {
898 match self.map.remove(KeyWrapper::from_ref(k)) {
899 None => None,
900 Some(old_node) => {
901 let mut old_node = unsafe {
902 let mut old_node = *Box::from_raw(old_node.as_ptr());
903 ptr::drop_in_place(old_node.key.as_mut_ptr());
904
905 old_node
906 };
907
908 self.detach(&mut old_node);
909
910 let LruEntry { key: _, val, .. } = old_node;
911 unsafe { Some(val.assume_init()) }
912 }
913 }
914 }
915
916 pub fn pop_entry<Q>(&mut self, k: &Q) -> Option<(K, V)>
936 where
937 K: Borrow<Q>,
938 Q: Hash + Eq + ?Sized,
939 {
940 match self.map.remove(KeyWrapper::from_ref(k)) {
941 None => None,
942 Some(old_node) => {
943 let mut old_node = unsafe { *Box::from_raw(old_node.as_ptr()) };
944
945 self.detach(&mut old_node);
946
947 let LruEntry { key, val, .. } = old_node;
948 unsafe { Some((key.assume_init(), val.assume_init())) }
949 }
950 }
951 }
952
953 pub fn pop_lru(&mut self) -> Option<(K, V)> {
974 let node = self.remove_last()?;
975 let node = *node;
977 let LruEntry { key, val, .. } = node;
978 unsafe { Some((key.assume_init(), val.assume_init())) }
979 }
980
981 pub fn promote<Q>(&mut self, k: &Q)
1004 where
1005 K: Borrow<Q>,
1006 Q: Hash + Eq + ?Sized,
1007 {
1008 if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) {
1009 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
1010 self.detach(node_ptr);
1011 self.attach(node_ptr);
1012 }
1013 }
1014
1015 pub fn demote<Q>(&mut self, k: &Q)
1040 where
1041 K: Borrow<Q>,
1042 Q: Hash + Eq + ?Sized,
1043 {
1044 if let Some(node) = self.map.get_mut(KeyWrapper::from_ref(k)) {
1045 let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
1046 self.detach(node_ptr);
1047 self.attach_last(node_ptr);
1048 }
1049 }
1050
1051 pub fn len(&self) -> usize {
1071 self.map.len()
1072 }
1073
1074 pub fn is_empty(&self) -> bool {
1088 self.map.len() == 0
1089 }
1090
1091 pub fn cap(&self) -> NonZeroUsize {
1102 self.cap
1103 }
1104
1105 pub fn resize(&mut self, cap: NonZeroUsize) {
1128 if cap == self.cap {
1130 return;
1131 }
1132
1133 while self.map.len() > cap.get() {
1134 self.pop_lru();
1135 }
1136 self.map.shrink_to_fit();
1137
1138 self.cap = cap;
1139 }
1140
1141 pub fn clear(&mut self) {
1161 while self.pop_lru().is_some() {}
1162 }
1163
1164 pub fn iter(&self) -> Iter<'_, K, V> {
1183 Iter {
1184 len: self.len(),
1185 ptr: unsafe { (*self.head).next },
1186 end: unsafe { (*self.tail).prev },
1187 phantom: PhantomData,
1188 }
1189 }
1190
1191 pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
1219 IterMut {
1220 len: self.len(),
1221 ptr: unsafe { (*self.head).next },
1222 end: unsafe { (*self.tail).prev },
1223 phantom: PhantomData,
1224 }
1225 }
1226
1227 fn remove_last(&mut self) -> Option<Box<LruEntry<K, V>>> {
1228 let prev;
1229 unsafe { prev = (*self.tail).prev }
1230 if prev != self.head {
1231 let old_key = KeyRef {
1232 k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) },
1233 };
1234 let old_node = self.map.remove(&old_key).unwrap();
1235 let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr();
1236 self.detach(node_ptr);
1237 unsafe { Some(Box::from_raw(node_ptr)) }
1238 } else {
1239 None
1240 }
1241 }
1242
1243 fn detach(&mut self, node: *mut LruEntry<K, V>) {
1244 unsafe {
1245 (*(*node).prev).next = (*node).next;
1246 (*(*node).next).prev = (*node).prev;
1247 }
1248 }
1249
1250 fn attach(&mut self, node: *mut LruEntry<K, V>) {
1252 unsafe {
1253 (*node).next = (*self.head).next;
1254 (*node).prev = self.head;
1255 (*self.head).next = node;
1256 (*(*node).next).prev = node;
1257 }
1258 }
1259
1260 fn attach_last(&mut self, node: *mut LruEntry<K, V>) {
1262 unsafe {
1263 (*node).next = self.tail;
1264 (*node).prev = (*self.tail).prev;
1265 (*self.tail).prev = node;
1266 (*(*node).prev).next = node;
1267 }
1268 }
1269}
1270
1271impl<K, V, S> Drop for LruCache<K, V, S> {
1272 fn drop(&mut self) {
1273 self.map.drain().for_each(|(_, node)| unsafe {
1274 let mut node = *Box::from_raw(node.as_ptr());
1275 ptr::drop_in_place((node).key.as_mut_ptr());
1276 ptr::drop_in_place((node).val.as_mut_ptr());
1277 });
1278 let _head = unsafe { *Box::from_raw(self.head) };
1282 let _tail = unsafe { *Box::from_raw(self.tail) };
1283 }
1284}
1285
1286impl<'a, K: Hash + Eq, V, S: BuildHasher> IntoIterator for &'a LruCache<K, V, S> {
1287 type Item = (&'a K, &'a V);
1288 type IntoIter = Iter<'a, K, V>;
1289
1290 fn into_iter(self) -> Iter<'a, K, V> {
1291 self.iter()
1292 }
1293}
1294
1295impl<'a, K: Hash + Eq, V, S: BuildHasher> IntoIterator for &'a mut LruCache<K, V, S> {
1296 type Item = (&'a K, &'a mut V);
1297 type IntoIter = IterMut<'a, K, V>;
1298
1299 fn into_iter(self) -> IterMut<'a, K, V> {
1300 self.iter_mut()
1301 }
1302}
1303
1304unsafe impl<K: Send, V: Send, S: Send> Send for LruCache<K, V, S> {}
1308unsafe impl<K: Sync, V: Sync, S: Sync> Sync for LruCache<K, V, S> {}
1309
1310impl<K: Hash + Eq, V, S: BuildHasher> fmt::Debug for LruCache<K, V, S> {
1311 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1312 f.debug_struct("LruCache")
1313 .field("len", &self.len())
1314 .field("cap", &self.cap())
1315 .finish()
1316 }
1317}
1318
1319pub struct Iter<'a, K: 'a, V: 'a> {
1327 len: usize,
1328
1329 ptr: *const LruEntry<K, V>,
1330 end: *const LruEntry<K, V>,
1331
1332 phantom: PhantomData<&'a K>,
1333}
1334
1335impl<'a, K, V> Iterator for Iter<'a, K, V> {
1336 type Item = (&'a K, &'a V);
1337
1338 fn next(&mut self) -> Option<(&'a K, &'a V)> {
1339 if self.len == 0 {
1340 return None;
1341 }
1342
1343 let key = unsafe { &(*(*self.ptr).key.as_ptr()) as &K };
1344 let val = unsafe { &(*(*self.ptr).val.as_ptr()) as &V };
1345
1346 self.len -= 1;
1347 self.ptr = unsafe { (*self.ptr).next };
1348
1349 Some((key, val))
1350 }
1351
1352 fn size_hint(&self) -> (usize, Option<usize>) {
1353 (self.len, Some(self.len))
1354 }
1355
1356 fn count(self) -> usize {
1357 self.len
1358 }
1359}
1360
1361impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
1362 fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
1363 if self.len == 0 {
1364 return None;
1365 }
1366
1367 let key = unsafe { &(*(*self.end).key.as_ptr()) as &K };
1368 let val = unsafe { &(*(*self.end).val.as_ptr()) as &V };
1369
1370 self.len -= 1;
1371 self.end = unsafe { (*self.end).prev };
1372
1373 Some((key, val))
1374 }
1375}
1376
1377impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {}
1378impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
1379
1380impl<'a, K, V> Clone for Iter<'a, K, V> {
1381 fn clone(&self) -> Iter<'a, K, V> {
1382 Iter {
1383 len: self.len,
1384 ptr: self.ptr,
1385 end: self.end,
1386 phantom: PhantomData,
1387 }
1388 }
1389}
1390
1391unsafe impl<'a, K: Send, V: Send> Send for Iter<'a, K, V> {}
1394unsafe impl<'a, K: Sync, V: Sync> Sync for Iter<'a, K, V> {}
1395
1396pub struct IterMut<'a, K: 'a, V: 'a> {
1404 len: usize,
1405
1406 ptr: *mut LruEntry<K, V>,
1407 end: *mut LruEntry<K, V>,
1408
1409 phantom: PhantomData<&'a K>,
1410}
1411
1412impl<'a, K, V> Iterator for IterMut<'a, K, V> {
1413 type Item = (&'a K, &'a mut V);
1414
1415 fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
1416 if self.len == 0 {
1417 return None;
1418 }
1419
1420 let key = unsafe { &mut (*(*self.ptr).key.as_mut_ptr()) as &mut K };
1421 let val = unsafe { &mut (*(*self.ptr).val.as_mut_ptr()) as &mut V };
1422
1423 self.len -= 1;
1424 self.ptr = unsafe { (*self.ptr).next };
1425
1426 Some((key, val))
1427 }
1428
1429 fn size_hint(&self) -> (usize, Option<usize>) {
1430 (self.len, Some(self.len))
1431 }
1432
1433 fn count(self) -> usize {
1434 self.len
1435 }
1436}
1437
1438impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
1439 fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
1440 if self.len == 0 {
1441 return None;
1442 }
1443
1444 let key = unsafe { &mut (*(*self.end).key.as_mut_ptr()) as &mut K };
1445 let val = unsafe { &mut (*(*self.end).val.as_mut_ptr()) as &mut V };
1446
1447 self.len -= 1;
1448 self.end = unsafe { (*self.end).prev };
1449
1450 Some((key, val))
1451 }
1452}
1453
1454impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {}
1455impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {}
1456
1457unsafe impl<'a, K: Send, V: Send> Send for IterMut<'a, K, V> {}
1460unsafe impl<'a, K: Sync, V: Sync> Sync for IterMut<'a, K, V> {}
1461
1462pub struct IntoIter<K, V>
1470where
1471 K: Hash + Eq,
1472{
1473 cache: LruCache<K, V>,
1474}
1475
1476impl<K, V> Iterator for IntoIter<K, V>
1477where
1478 K: Hash + Eq,
1479{
1480 type Item = (K, V);
1481
1482 fn next(&mut self) -> Option<(K, V)> {
1483 self.cache.pop_lru()
1484 }
1485
1486 fn size_hint(&self) -> (usize, Option<usize>) {
1487 let len = self.cache.len();
1488 (len, Some(len))
1489 }
1490
1491 fn count(self) -> usize {
1492 self.cache.len()
1493 }
1494}
1495
1496impl<K, V> ExactSizeIterator for IntoIter<K, V> where K: Hash + Eq {}
1497impl<K, V> FusedIterator for IntoIter<K, V> where K: Hash + Eq {}
1498
1499impl<K: Hash + Eq, V> IntoIterator for LruCache<K, V> {
1500 type Item = (K, V);
1501 type IntoIter = IntoIter<K, V>;
1502
1503 fn into_iter(self) -> IntoIter<K, V> {
1504 IntoIter { cache: self }
1505 }
1506}
1507
1508#[cfg(test)]
1509mod tests {
1510 use super::LruCache;
1511 use core::{fmt::Debug, num::NonZeroUsize};
1512 use scoped_threadpool::Pool;
1513 use std::sync::atomic::{AtomicUsize, Ordering};
1514
1515 fn assert_opt_eq<V: PartialEq + Debug>(opt: Option<&V>, v: V) {
1516 assert!(opt.is_some());
1517 assert_eq!(opt.unwrap(), &v);
1518 }
1519
1520 fn assert_opt_eq_mut<V: PartialEq + Debug>(opt: Option<&mut V>, v: V) {
1521 assert!(opt.is_some());
1522 assert_eq!(opt.unwrap(), &v);
1523 }
1524
1525 fn assert_opt_eq_tuple<K: PartialEq + Debug, V: PartialEq + Debug>(
1526 opt: Option<(&K, &V)>,
1527 kv: (K, V),
1528 ) {
1529 assert!(opt.is_some());
1530 let res = opt.unwrap();
1531 assert_eq!(res.0, &kv.0);
1532 assert_eq!(res.1, &kv.1);
1533 }
1534
1535 fn assert_opt_eq_mut_tuple<K: PartialEq + Debug, V: PartialEq + Debug>(
1536 opt: Option<(&K, &mut V)>,
1537 kv: (K, V),
1538 ) {
1539 assert!(opt.is_some());
1540 let res = opt.unwrap();
1541 assert_eq!(res.0, &kv.0);
1542 assert_eq!(res.1, &kv.1);
1543 }
1544
1545 #[test]
1546 fn test_unbounded() {
1547 let mut cache = LruCache::unbounded();
1548 for i in 0..13370 {
1549 cache.put(i, ());
1550 }
1551 assert_eq!(cache.len(), 13370);
1552 }
1553
1554 #[test]
1555 #[cfg(feature = "hashbrown")]
1556 fn test_with_hasher() {
1557 use core::num::NonZeroUsize;
1558
1559 use hashbrown::hash_map::DefaultHashBuilder;
1560
1561 let s = DefaultHashBuilder::default();
1562 let mut cache = LruCache::with_hasher(NonZeroUsize::new(16).unwrap(), s);
1563
1564 for i in 0..13370 {
1565 cache.put(i, ());
1566 }
1567 assert_eq!(cache.len(), 16);
1568 }
1569
1570 #[test]
1571 fn test_put_and_get() {
1572 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1573 assert!(cache.is_empty());
1574
1575 assert_eq!(cache.put("apple", "red"), None);
1576 assert_eq!(cache.put("banana", "yellow"), None);
1577
1578 assert_eq!(cache.cap().get(), 2);
1579 assert_eq!(cache.len(), 2);
1580 assert!(!cache.is_empty());
1581 assert_opt_eq(cache.get(&"apple"), "red");
1582 assert_opt_eq(cache.get(&"banana"), "yellow");
1583 }
1584
1585 #[test]
1586 fn test_put_and_get_or_insert() {
1587 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1588 assert!(cache.is_empty());
1589
1590 assert_eq!(cache.put("apple", "red"), None);
1591 assert_eq!(cache.put("banana", "yellow"), None);
1592
1593 assert_eq!(cache.cap().get(), 2);
1594 assert_eq!(cache.len(), 2);
1595 assert!(!cache.is_empty());
1596 assert_eq!(cache.get_or_insert("apple", || "orange"), &"red");
1597 assert_eq!(cache.get_or_insert("banana", || "orange"), &"yellow");
1598 assert_eq!(cache.get_or_insert("lemon", || "orange"), &"orange");
1599 assert_eq!(cache.get_or_insert("lemon", || "red"), &"orange");
1600 }
1601
1602 #[test]
1603 fn test_try_get_or_insert() {
1604 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1605
1606 assert_eq!(
1607 cache.try_get_or_insert::<_, &str>("apple", || Ok("red")),
1608 Ok(&"red")
1609 );
1610 assert_eq!(
1611 cache.try_get_or_insert::<_, &str>("apple", || Err("failed")),
1612 Ok(&"red")
1613 );
1614 assert_eq!(
1615 cache.try_get_or_insert::<_, &str>("banana", || Ok("orange")),
1616 Ok(&"orange")
1617 );
1618 assert_eq!(
1619 cache.try_get_or_insert::<_, &str>("lemon", || Err("failed")),
1620 Err("failed")
1621 );
1622 assert_eq!(
1623 cache.try_get_or_insert::<_, &str>("banana", || Err("failed")),
1624 Ok(&"orange")
1625 );
1626 }
1627
1628 #[test]
1629 fn test_put_and_get_or_insert_mut() {
1630 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1631 assert!(cache.is_empty());
1632
1633 assert_eq!(cache.put("apple", "red"), None);
1634 assert_eq!(cache.put("banana", "yellow"), None);
1635
1636 assert_eq!(cache.cap().get(), 2);
1637 assert_eq!(cache.len(), 2);
1638
1639 let v = cache.get_or_insert_mut("apple", || "orange");
1640 assert_eq!(v, &"red");
1641 *v = "blue";
1642
1643 assert_eq!(cache.get_or_insert_mut("apple", || "orange"), &"blue");
1644 assert_eq!(cache.get_or_insert_mut("banana", || "orange"), &"yellow");
1645 assert_eq!(cache.get_or_insert_mut("lemon", || "orange"), &"orange");
1646 assert_eq!(cache.get_or_insert_mut("lemon", || "red"), &"orange");
1647 }
1648
1649 #[test]
1650 fn test_try_get_or_insert_mut() {
1651 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1652
1653 cache.put(1, "a");
1654 cache.put(2, "b");
1655 cache.put(2, "c");
1656
1657 let f = || -> Result<&str, &str> { Err("failed") };
1658 let a = || -> Result<&str, &str> { Ok("a") };
1659 let b = || -> Result<&str, &str> { Ok("b") };
1660 if let Ok(v) = cache.try_get_or_insert_mut(2, a) {
1661 *v = "d";
1662 }
1663 assert_eq!(cache.try_get_or_insert_mut(2, a), Ok(&mut "d"));
1664 assert_eq!(cache.try_get_or_insert_mut(3, f), Err("failed"));
1665 assert_eq!(cache.try_get_or_insert_mut(4, b), Ok(&mut "b"));
1666 assert_eq!(cache.try_get_or_insert_mut(4, a), Ok(&mut "b"));
1667 }
1668
1669 #[test]
1670 fn test_put_and_get_mut() {
1671 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1672
1673 cache.put("apple", "red");
1674 cache.put("banana", "yellow");
1675
1676 assert_eq!(cache.cap().get(), 2);
1677 assert_eq!(cache.len(), 2);
1678 assert_opt_eq_mut(cache.get_mut(&"apple"), "red");
1679 assert_opt_eq_mut(cache.get_mut(&"banana"), "yellow");
1680 }
1681
1682 #[test]
1683 fn test_get_mut_and_update() {
1684 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1685
1686 cache.put("apple", 1);
1687 cache.put("banana", 3);
1688
1689 {
1690 let v = cache.get_mut(&"apple").unwrap();
1691 *v = 4;
1692 }
1693
1694 assert_eq!(cache.cap().get(), 2);
1695 assert_eq!(cache.len(), 2);
1696 assert_opt_eq_mut(cache.get_mut(&"apple"), 4);
1697 assert_opt_eq_mut(cache.get_mut(&"banana"), 3);
1698 }
1699
1700 #[test]
1701 fn test_put_update() {
1702 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1703
1704 assert_eq!(cache.put("apple", "red"), None);
1705 assert_eq!(cache.put("apple", "green"), Some("red"));
1706
1707 assert_eq!(cache.len(), 1);
1708 assert_opt_eq(cache.get(&"apple"), "green");
1709 }
1710
1711 #[test]
1712 fn test_put_removes_oldest() {
1713 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1714
1715 assert_eq!(cache.put("apple", "red"), None);
1716 assert_eq!(cache.put("banana", "yellow"), None);
1717 assert_eq!(cache.put("pear", "green"), None);
1718
1719 assert!(cache.get(&"apple").is_none());
1720 assert_opt_eq(cache.get(&"banana"), "yellow");
1721 assert_opt_eq(cache.get(&"pear"), "green");
1722
1723 assert_eq!(cache.put("apple", "green"), None);
1726 assert_eq!(cache.put("tomato", "red"), None);
1727
1728 assert!(cache.get(&"pear").is_none());
1729 assert_opt_eq(cache.get(&"apple"), "green");
1730 assert_opt_eq(cache.get(&"tomato"), "red");
1731 }
1732
1733 #[test]
1734 fn test_peek() {
1735 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1736
1737 cache.put("apple", "red");
1738 cache.put("banana", "yellow");
1739
1740 assert_opt_eq(cache.peek(&"banana"), "yellow");
1741 assert_opt_eq(cache.peek(&"apple"), "red");
1742
1743 cache.put("pear", "green");
1744
1745 assert!(cache.peek(&"apple").is_none());
1746 assert_opt_eq(cache.peek(&"banana"), "yellow");
1747 assert_opt_eq(cache.peek(&"pear"), "green");
1748 }
1749
1750 #[test]
1751 fn test_peek_mut() {
1752 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1753
1754 cache.put("apple", "red");
1755 cache.put("banana", "yellow");
1756
1757 assert_opt_eq_mut(cache.peek_mut(&"banana"), "yellow");
1758 assert_opt_eq_mut(cache.peek_mut(&"apple"), "red");
1759 assert!(cache.peek_mut(&"pear").is_none());
1760
1761 cache.put("pear", "green");
1762
1763 assert!(cache.peek_mut(&"apple").is_none());
1764 assert_opt_eq_mut(cache.peek_mut(&"banana"), "yellow");
1765 assert_opt_eq_mut(cache.peek_mut(&"pear"), "green");
1766
1767 {
1768 let v = cache.peek_mut(&"banana").unwrap();
1769 *v = "green";
1770 }
1771
1772 assert_opt_eq_mut(cache.peek_mut(&"banana"), "green");
1773 }
1774
1775 #[test]
1776 fn test_peek_lru() {
1777 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1778
1779 assert!(cache.peek_lru().is_none());
1780
1781 cache.put("apple", "red");
1782 cache.put("banana", "yellow");
1783 assert_opt_eq_tuple(cache.peek_lru(), ("apple", "red"));
1784
1785 cache.get(&"apple");
1786 assert_opt_eq_tuple(cache.peek_lru(), ("banana", "yellow"));
1787
1788 cache.clear();
1789 assert!(cache.peek_lru().is_none());
1790 }
1791
1792 #[test]
1793 fn test_contains() {
1794 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1795
1796 cache.put("apple", "red");
1797 cache.put("banana", "yellow");
1798 cache.put("pear", "green");
1799
1800 assert!(!cache.contains(&"apple"));
1801 assert!(cache.contains(&"banana"));
1802 assert!(cache.contains(&"pear"));
1803 }
1804
1805 #[test]
1806 fn test_pop() {
1807 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1808
1809 cache.put("apple", "red");
1810 cache.put("banana", "yellow");
1811
1812 assert_eq!(cache.len(), 2);
1813 assert_opt_eq(cache.get(&"apple"), "red");
1814 assert_opt_eq(cache.get(&"banana"), "yellow");
1815
1816 let popped = cache.pop(&"apple");
1817 assert!(popped.is_some());
1818 assert_eq!(popped.unwrap(), "red");
1819 assert_eq!(cache.len(), 1);
1820 assert!(cache.get(&"apple").is_none());
1821 assert_opt_eq(cache.get(&"banana"), "yellow");
1822 }
1823
1824 #[test]
1825 fn test_pop_entry() {
1826 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1827 cache.put("apple", "red");
1828 cache.put("banana", "yellow");
1829
1830 assert_eq!(cache.len(), 2);
1831 assert_opt_eq(cache.get(&"apple"), "red");
1832 assert_opt_eq(cache.get(&"banana"), "yellow");
1833
1834 let popped = cache.pop_entry(&"apple");
1835 assert!(popped.is_some());
1836 assert_eq!(popped.unwrap(), ("apple", "red"));
1837 assert_eq!(cache.len(), 1);
1838 assert!(cache.get(&"apple").is_none());
1839 assert_opt_eq(cache.get(&"banana"), "yellow");
1840 }
1841
1842 #[test]
1843 fn test_pop_lru() {
1844 let mut cache = LruCache::new(NonZeroUsize::new(200).unwrap());
1845
1846 for i in 0..75 {
1847 cache.put(i, "A");
1848 }
1849 for i in 0..75 {
1850 cache.put(i + 100, "B");
1851 }
1852 for i in 0..75 {
1853 cache.put(i + 200, "C");
1854 }
1855 assert_eq!(cache.len(), 200);
1856
1857 for i in 0..75 {
1858 assert_opt_eq(cache.get(&(74 - i + 100)), "B");
1859 }
1860 assert_opt_eq(cache.get(&25), "A");
1861
1862 for i in 26..75 {
1863 assert_eq!(cache.pop_lru(), Some((i, "A")));
1864 }
1865 for i in 0..75 {
1866 assert_eq!(cache.pop_lru(), Some((i + 200, "C")));
1867 }
1868 for i in 0..75 {
1869 assert_eq!(cache.pop_lru(), Some((74 - i + 100, "B")));
1870 }
1871 assert_eq!(cache.pop_lru(), Some((25, "A")));
1872 for _ in 0..50 {
1873 assert_eq!(cache.pop_lru(), None);
1874 }
1875 }
1876
1877 #[test]
1878 fn test_clear() {
1879 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1880
1881 cache.put("apple", "red");
1882 cache.put("banana", "yellow");
1883
1884 assert_eq!(cache.len(), 2);
1885 assert_opt_eq(cache.get(&"apple"), "red");
1886 assert_opt_eq(cache.get(&"banana"), "yellow");
1887
1888 cache.clear();
1889 assert_eq!(cache.len(), 0);
1890 }
1891
1892 #[test]
1893 fn test_resize_larger() {
1894 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
1895
1896 cache.put(1, "a");
1897 cache.put(2, "b");
1898 cache.resize(NonZeroUsize::new(4).unwrap());
1899 cache.put(3, "c");
1900 cache.put(4, "d");
1901
1902 assert_eq!(cache.len(), 4);
1903 assert_eq!(cache.get(&1), Some(&"a"));
1904 assert_eq!(cache.get(&2), Some(&"b"));
1905 assert_eq!(cache.get(&3), Some(&"c"));
1906 assert_eq!(cache.get(&4), Some(&"d"));
1907 }
1908
1909 #[test]
1910 fn test_resize_smaller() {
1911 let mut cache = LruCache::new(NonZeroUsize::new(4).unwrap());
1912
1913 cache.put(1, "a");
1914 cache.put(2, "b");
1915 cache.put(3, "c");
1916 cache.put(4, "d");
1917
1918 cache.resize(NonZeroUsize::new(2).unwrap());
1919
1920 assert_eq!(cache.len(), 2);
1921 assert!(cache.get(&1).is_none());
1922 assert!(cache.get(&2).is_none());
1923 assert_eq!(cache.get(&3), Some(&"c"));
1924 assert_eq!(cache.get(&4), Some(&"d"));
1925 }
1926
1927 #[test]
1928 fn test_send() {
1929 use std::thread;
1930
1931 let mut cache = LruCache::new(NonZeroUsize::new(4).unwrap());
1932 cache.put(1, "a");
1933
1934 let handle = thread::spawn(move || {
1935 assert_eq!(cache.get(&1), Some(&"a"));
1936 });
1937
1938 assert!(handle.join().is_ok());
1939 }
1940
1941 #[test]
1942 fn test_multiple_threads() {
1943 let mut pool = Pool::new(1);
1944 let mut cache = LruCache::new(NonZeroUsize::new(4).unwrap());
1945 cache.put(1, "a");
1946
1947 let cache_ref = &cache;
1948 pool.scoped(|scoped| {
1949 scoped.execute(move || {
1950 assert_eq!(cache_ref.peek(&1), Some(&"a"));
1951 });
1952 });
1953
1954 assert_eq!((cache_ref).peek(&1), Some(&"a"));
1955 }
1956
1957 #[test]
1958 fn test_iter_forwards() {
1959 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
1960 cache.put("a", 1);
1961 cache.put("b", 2);
1962 cache.put("c", 3);
1963
1964 {
1965 let mut iter = cache.iter();
1967 assert_eq!(iter.len(), 3);
1968 assert_opt_eq_tuple(iter.next(), ("c", 3));
1969
1970 assert_eq!(iter.len(), 2);
1971 assert_opt_eq_tuple(iter.next(), ("b", 2));
1972
1973 assert_eq!(iter.len(), 1);
1974 assert_opt_eq_tuple(iter.next(), ("a", 1));
1975
1976 assert_eq!(iter.len(), 0);
1977 assert_eq!(iter.next(), None);
1978 }
1979 {
1980 let mut iter = cache.iter_mut();
1982 assert_eq!(iter.len(), 3);
1983 assert_opt_eq_mut_tuple(iter.next(), ("c", 3));
1984
1985 assert_eq!(iter.len(), 2);
1986 assert_opt_eq_mut_tuple(iter.next(), ("b", 2));
1987
1988 assert_eq!(iter.len(), 1);
1989 assert_opt_eq_mut_tuple(iter.next(), ("a", 1));
1990
1991 assert_eq!(iter.len(), 0);
1992 assert_eq!(iter.next(), None);
1993 }
1994 }
1995
1996 #[test]
1997 fn test_iter_backwards() {
1998 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
1999 cache.put("a", 1);
2000 cache.put("b", 2);
2001 cache.put("c", 3);
2002
2003 {
2004 let mut iter = cache.iter();
2006 assert_eq!(iter.len(), 3);
2007 assert_opt_eq_tuple(iter.next_back(), ("a", 1));
2008
2009 assert_eq!(iter.len(), 2);
2010 assert_opt_eq_tuple(iter.next_back(), ("b", 2));
2011
2012 assert_eq!(iter.len(), 1);
2013 assert_opt_eq_tuple(iter.next_back(), ("c", 3));
2014
2015 assert_eq!(iter.len(), 0);
2016 assert_eq!(iter.next_back(), None);
2017 }
2018
2019 {
2020 let mut iter = cache.iter_mut();
2022 assert_eq!(iter.len(), 3);
2023 assert_opt_eq_mut_tuple(iter.next_back(), ("a", 1));
2024
2025 assert_eq!(iter.len(), 2);
2026 assert_opt_eq_mut_tuple(iter.next_back(), ("b", 2));
2027
2028 assert_eq!(iter.len(), 1);
2029 assert_opt_eq_mut_tuple(iter.next_back(), ("c", 3));
2030
2031 assert_eq!(iter.len(), 0);
2032 assert_eq!(iter.next_back(), None);
2033 }
2034 }
2035
2036 #[test]
2037 fn test_iter_forwards_and_backwards() {
2038 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
2039 cache.put("a", 1);
2040 cache.put("b", 2);
2041 cache.put("c", 3);
2042
2043 {
2044 let mut iter = cache.iter();
2046 assert_eq!(iter.len(), 3);
2047 assert_opt_eq_tuple(iter.next(), ("c", 3));
2048
2049 assert_eq!(iter.len(), 2);
2050 assert_opt_eq_tuple(iter.next_back(), ("a", 1));
2051
2052 assert_eq!(iter.len(), 1);
2053 assert_opt_eq_tuple(iter.next(), ("b", 2));
2054
2055 assert_eq!(iter.len(), 0);
2056 assert_eq!(iter.next_back(), None);
2057 }
2058 {
2059 let mut iter = cache.iter_mut();
2061 assert_eq!(iter.len(), 3);
2062 assert_opt_eq_mut_tuple(iter.next(), ("c", 3));
2063
2064 assert_eq!(iter.len(), 2);
2065 assert_opt_eq_mut_tuple(iter.next_back(), ("a", 1));
2066
2067 assert_eq!(iter.len(), 1);
2068 assert_opt_eq_mut_tuple(iter.next(), ("b", 2));
2069
2070 assert_eq!(iter.len(), 0);
2071 assert_eq!(iter.next_back(), None);
2072 }
2073 }
2074
2075 #[test]
2076 fn test_iter_multiple_threads() {
2077 let mut pool = Pool::new(1);
2078 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
2079 cache.put("a", 1);
2080 cache.put("b", 2);
2081 cache.put("c", 3);
2082
2083 let mut iter = cache.iter();
2084 assert_eq!(iter.len(), 3);
2085 assert_opt_eq_tuple(iter.next(), ("c", 3));
2086
2087 {
2088 let iter_ref = &mut iter;
2089 pool.scoped(|scoped| {
2090 scoped.execute(move || {
2091 assert_eq!(iter_ref.len(), 2);
2092 assert_opt_eq_tuple(iter_ref.next(), ("b", 2));
2093 });
2094 });
2095 }
2096
2097 assert_eq!(iter.len(), 1);
2098 assert_opt_eq_tuple(iter.next(), ("a", 1));
2099
2100 assert_eq!(iter.len(), 0);
2101 assert_eq!(iter.next(), None);
2102 }
2103
2104 #[test]
2105 fn test_iter_clone() {
2106 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
2107 cache.put("a", 1);
2108 cache.put("b", 2);
2109
2110 let mut iter = cache.iter();
2111 let mut iter_clone = iter.clone();
2112
2113 assert_eq!(iter.len(), 2);
2114 assert_opt_eq_tuple(iter.next(), ("b", 2));
2115 assert_eq!(iter_clone.len(), 2);
2116 assert_opt_eq_tuple(iter_clone.next(), ("b", 2));
2117
2118 assert_eq!(iter.len(), 1);
2119 assert_opt_eq_tuple(iter.next(), ("a", 1));
2120 assert_eq!(iter_clone.len(), 1);
2121 assert_opt_eq_tuple(iter_clone.next(), ("a", 1));
2122
2123 assert_eq!(iter.len(), 0);
2124 assert_eq!(iter.next(), None);
2125 assert_eq!(iter_clone.len(), 0);
2126 assert_eq!(iter_clone.next(), None);
2127 }
2128
2129 #[test]
2130 fn test_into_iter() {
2131 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
2132 cache.put("a", 1);
2133 cache.put("b", 2);
2134 cache.put("c", 3);
2135
2136 let mut iter = cache.into_iter();
2137 assert_eq!(iter.len(), 3);
2138 assert_eq!(iter.next(), Some(("a", 1)));
2139
2140 assert_eq!(iter.len(), 2);
2141 assert_eq!(iter.next(), Some(("b", 2)));
2142
2143 assert_eq!(iter.len(), 1);
2144 assert_eq!(iter.next(), Some(("c", 3)));
2145
2146 assert_eq!(iter.len(), 0);
2147 assert_eq!(iter.next(), None);
2148 }
2149
2150 #[test]
2151 fn test_that_pop_actually_detaches_node() {
2152 let mut cache = LruCache::new(NonZeroUsize::new(5).unwrap());
2153
2154 cache.put("a", 1);
2155 cache.put("b", 2);
2156 cache.put("c", 3);
2157 cache.put("d", 4);
2158 cache.put("e", 5);
2159
2160 assert_eq!(cache.pop(&"c"), Some(3));
2161
2162 cache.put("f", 6);
2163
2164 let mut iter = cache.iter();
2165 assert_opt_eq_tuple(iter.next(), ("f", 6));
2166 assert_opt_eq_tuple(iter.next(), ("e", 5));
2167 assert_opt_eq_tuple(iter.next(), ("d", 4));
2168 assert_opt_eq_tuple(iter.next(), ("b", 2));
2169 assert_opt_eq_tuple(iter.next(), ("a", 1));
2170 assert!(iter.next().is_none());
2171 }
2172
2173 #[test]
2174 fn test_get_with_borrow() {
2175 use alloc::string::String;
2176
2177 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
2178
2179 let key = String::from("apple");
2180 cache.put(key, "red");
2181
2182 assert_opt_eq(cache.get("apple"), "red");
2183 }
2184
2185 #[test]
2186 fn test_get_mut_with_borrow() {
2187 use alloc::string::String;
2188
2189 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
2190
2191 let key = String::from("apple");
2192 cache.put(key, "red");
2193
2194 assert_opt_eq_mut(cache.get_mut("apple"), "red");
2195 }
2196
2197 #[test]
2198 fn test_no_memory_leaks() {
2199 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
2200
2201 struct DropCounter;
2202
2203 impl Drop for DropCounter {
2204 fn drop(&mut self) {
2205 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
2206 }
2207 }
2208
2209 let n = 100;
2210 for _ in 0..n {
2211 let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap());
2212 for i in 0..n {
2213 cache.put(i, DropCounter {});
2214 }
2215 }
2216 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n);
2217 }
2218
2219 #[test]
2220 fn test_no_memory_leaks_with_clear() {
2221 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
2222
2223 struct DropCounter;
2224
2225 impl Drop for DropCounter {
2226 fn drop(&mut self) {
2227 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
2228 }
2229 }
2230
2231 let n = 100;
2232 for _ in 0..n {
2233 let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap());
2234 for i in 0..n {
2235 cache.put(i, DropCounter {});
2236 }
2237 cache.clear();
2238 }
2239 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n);
2240 }
2241
2242 #[test]
2243 fn test_no_memory_leaks_with_resize() {
2244 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
2245
2246 struct DropCounter;
2247
2248 impl Drop for DropCounter {
2249 fn drop(&mut self) {
2250 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
2251 }
2252 }
2253
2254 let n = 100;
2255 for _ in 0..n {
2256 let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap());
2257 for i in 0..n {
2258 cache.put(i, DropCounter {});
2259 }
2260 cache.clear();
2261 }
2262 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n);
2263 }
2264
2265 #[test]
2266 fn test_no_memory_leaks_with_pop() {
2267 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
2268
2269 #[derive(Hash, Eq)]
2270 struct KeyDropCounter(usize);
2271
2272 impl PartialEq for KeyDropCounter {
2273 fn eq(&self, other: &Self) -> bool {
2274 self.0.eq(&other.0)
2275 }
2276 }
2277
2278 impl Drop for KeyDropCounter {
2279 fn drop(&mut self) {
2280 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
2281 }
2282 }
2283
2284 let n = 100;
2285 for _ in 0..n {
2286 let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap());
2287
2288 for i in 0..100 {
2289 cache.put(KeyDropCounter(i), i);
2290 cache.pop(&KeyDropCounter(i));
2291 }
2292 }
2293
2294 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n * 2);
2295 }
2296
2297 #[test]
2298 fn test_promote_and_demote() {
2299 let mut cache = LruCache::new(NonZeroUsize::new(5).unwrap());
2300 for i in 0..5 {
2301 cache.push(i, i);
2302 }
2303 cache.promote(&1);
2304 cache.promote(&0);
2305 cache.demote(&3);
2306 cache.demote(&4);
2307 assert_eq!(cache.pop_lru(), Some((4, 4)));
2308 assert_eq!(cache.pop_lru(), Some((3, 3)));
2309 assert_eq!(cache.pop_lru(), Some((2, 2)));
2310 assert_eq!(cache.pop_lru(), Some((1, 1)));
2311 assert_eq!(cache.pop_lru(), Some((0, 0)));
2312 assert_eq!(cache.pop_lru(), None);
2313 }
2314
2315 #[test]
2316 fn test_get_key_value() {
2317 use alloc::string::String;
2318
2319 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
2320
2321 let key = String::from("apple");
2322 cache.put(key, "red");
2323
2324 assert_eq!(
2325 cache.get_key_value("apple"),
2326 Some((&String::from("apple"), &"red"))
2327 );
2328 assert_eq!(cache.get_key_value("banana"), None);
2329 }
2330
2331 #[test]
2332 fn test_get_key_value_mut() {
2333 use alloc::string::String;
2334
2335 let mut cache = LruCache::new(NonZeroUsize::new(2).unwrap());
2336
2337 let key = String::from("apple");
2338 cache.put(key, "red");
2339
2340 let (k, v) = cache.get_key_value_mut("apple").unwrap();
2341 assert_eq!(k, &String::from("apple"));
2342 assert_eq!(v, &mut "red");
2343 *v = "green";
2344
2345 assert_eq!(
2346 cache.get_key_value("apple"),
2347 Some((&String::from("apple"), &"green"))
2348 );
2349 assert_eq!(cache.get_key_value("banana"), None);
2350 }
2351
2352 #[test]
2353 fn test_clone() {
2354 let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap());
2355 cache.put("a", 1);
2356 cache.put("b", 2);
2357 cache.put("c", 3);
2358
2359 let mut cloned = cache.clone();
2360
2361 assert_eq!(cache.pop_lru(), Some(("a", 1)));
2362 assert_eq!(cloned.pop_lru(), Some(("a", 1)));
2363
2364 assert_eq!(cache.pop_lru(), Some(("b", 2)));
2365 assert_eq!(cloned.pop_lru(), Some(("b", 2)));
2366
2367 assert_eq!(cache.pop_lru(), Some(("c", 3)));
2368 assert_eq!(cloned.pop_lru(), Some(("c", 3)));
2369
2370 assert_eq!(cache.pop_lru(), None);
2371 assert_eq!(cloned.pop_lru(), None);
2372 }
2373}
2374
2375fn _test_lifetimes() {}