redb/tree_store/page_store/
lru_cache.rs

1use std::collections::{HashMap, VecDeque};
2use std::sync::atomic::{AtomicBool, Ordering};
3
4#[derive(Default)]
5pub struct LRUCache<T> {
6    // AtomicBool is the second chance flag
7    cache: HashMap<u64, (T, AtomicBool)>,
8    lru_queue: VecDeque<u64>,
9}
10
11impl<T> LRUCache<T> {
12    pub(crate) fn new() -> Self {
13        Self {
14            cache: Default::default(),
15            lru_queue: Default::default(),
16        }
17    }
18
19    pub(crate) fn len(&self) -> usize {
20        self.cache.len()
21    }
22
23    pub(crate) fn insert(&mut self, key: u64, value: T) -> Option<T> {
24        let result = self
25            .cache
26            .insert(key, (value, AtomicBool::new(false)))
27            .map(|(x, _)| x);
28        if result.is_none() {
29            self.lru_queue.push_back(key);
30        }
31        result
32    }
33
34    pub(crate) fn remove(&mut self, key: u64) -> Option<T> {
35        if let Some((value, _)) = self.cache.remove(&key) {
36            if self.lru_queue.len() > 2 * self.cache.len() {
37                // Cycle two elements of the LRU queue to ensure it doesn't grow without bound
38                for _ in 0..2 {
39                    if let Some(removed_key) = self.lru_queue.pop_front() {
40                        if let Some((_, second_chance)) = self.cache.get(&removed_key) {
41                            second_chance.store(false, Ordering::Release);
42                            self.lru_queue.push_back(removed_key);
43                        }
44                    }
45                }
46            }
47            Some(value)
48        } else {
49            None
50        }
51    }
52
53    pub(crate) fn get(&self, key: u64) -> Option<&T> {
54        if let Some((value, second_chance)) = self.cache.get(&key) {
55            second_chance.store(true, Ordering::Release);
56            Some(value)
57        } else {
58            None
59        }
60    }
61
62    pub(crate) fn get_mut(&mut self, key: u64) -> Option<&mut T> {
63        if let Some((value, second_chance)) = self.cache.get_mut(&key) {
64            second_chance.store(true, Ordering::Release);
65            Some(value)
66        } else {
67            None
68        }
69    }
70
71    pub(crate) fn iter(&self) -> impl ExactSizeIterator<Item = (&u64, &T)> {
72        self.cache.iter().map(|(k, (v, _))| (k, v))
73    }
74
75    pub(crate) fn iter_mut(&mut self) -> impl ExactSizeIterator<Item = (&u64, &mut T)> {
76        self.cache.iter_mut().map(|(k, (v, _))| (k, v))
77    }
78
79    pub(crate) fn pop_lowest_priority(&mut self) -> Option<(u64, T)> {
80        while let Some(key) = self.lru_queue.pop_front() {
81            if let Some((_, second_chance)) = self.cache.get(&key) {
82                if second_chance
83                    .compare_exchange(true, false, Ordering::AcqRel, Ordering::Acquire)
84                    .is_ok()
85                {
86                    self.lru_queue.push_back(key);
87                } else {
88                    let (value, _) = self.cache.remove(&key).unwrap();
89                    return Some((key, value));
90                }
91            }
92        }
93        None
94    }
95
96    pub(crate) fn clear(&mut self) {
97        self.cache.clear();
98        self.lru_queue.clear();
99    }
100}