arc_swap/cache.rs
1#![deny(unsafe_code)]
2
3//! Caching handle into the [ArcSwapAny].
4//!
5//! The [Cache] keeps a copy of the internal [Arc] for faster access.
6//!
7//! [Arc]: std::sync::Arc
8
9use core::ops::Deref;
10use core::sync::atomic::Ordering;
11
12use super::ref_cnt::RefCnt;
13use super::strategy::Strategy;
14use super::ArcSwapAny;
15
16/// Generalization of caches providing access to `T`.
17///
18/// This abstracts over all kinds of caches that can provide a cheap access to values of type `T`.
19/// This is useful in cases where some code doesn't care if the `T` is the whole structure or just
20/// a part of it.
21///
22/// See the example at [`Cache::map`].
23pub trait Access<T> {
24 /// Loads the value from cache.
25 ///
26 /// This revalidates the value in the cache, then provides the access to the cached value.
27 fn load(&mut self) -> &T;
28}
29
30/// Caching handle for [`ArcSwapAny`][ArcSwapAny].
31///
32/// Instead of loading the [`Arc`][Arc] on every request from the shared storage, this keeps
33/// another copy inside itself. Upon request it only cheaply revalidates it is up to
34/// date. If it is, access is significantly faster. If it is stale, the [load_full] is done and the
35/// cache value is replaced. Under a read-heavy loads, the measured speedup are 10-25 times,
36/// depending on the architecture.
37///
38/// There are, however, downsides:
39///
40/// * The handle needs to be kept around by the caller (usually, one per thread). This is fine if
41/// there's one global `ArcSwapAny`, but starts being tricky with eg. data structures build from
42/// them.
43/// * As it keeps a copy of the [Arc] inside the cache, the old value may be kept alive for longer
44/// period of time ‒ it is replaced by the new value on [load][Cache::load]. You may not want to
45/// use this if dropping the old value in timely manner is important (possibly because of
46/// releasing large amount of RAM or because of closing file handles).
47///
48/// # Examples
49///
50/// ```rust
51/// # fn do_something<V>(_v: V) { }
52/// use std::sync::Arc;
53/// use std::sync::atomic::{AtomicBool, Ordering};
54///
55/// use arc_swap::{ArcSwap, Cache};
56///
57/// let shared = Arc::new(ArcSwap::from_pointee(42));
58/// # let mut threads = Vec::new();
59/// let terminate = Arc::new(AtomicBool::new(false));
60/// // Start 10 worker threads...
61/// for _ in 0..10 {
62/// let mut cache = Cache::new(Arc::clone(&shared));
63/// let terminate = Arc::clone(&terminate);
64/// # let thread =
65/// std::thread::spawn(move || {
66/// // Keep loading it like mad..
67/// while !terminate.load(Ordering::Relaxed) {
68/// let value = cache.load();
69/// do_something(value);
70/// }
71/// });
72/// # threads.push(thread);
73/// }
74/// shared.store(Arc::new(12));
75/// # terminate.store(true, Ordering::Relaxed);
76/// # for thread in threads { thread.join().unwrap() }
77/// ```
78///
79/// Another one with using a thread local storage and explicit types:
80///
81/// ```rust
82/// # use std::sync::Arc;
83/// # use std::ops::Deref;
84/// # use std::cell::RefCell;
85/// #
86/// # use arc_swap::ArcSwap;
87/// # use arc_swap::cache::Cache;
88/// # use once_cell::sync::Lazy;
89/// #
90/// # #[derive(Debug, Default)]
91/// # struct Config;
92/// #
93/// static CURRENT_CONFIG: Lazy<ArcSwap<Config>> = Lazy::new(|| ArcSwap::from_pointee(Config::default()));
94///
95/// thread_local! {
96/// static CACHE: RefCell<Cache<&'static ArcSwap<Config>, Arc<Config>>> = RefCell::new(Cache::from(CURRENT_CONFIG.deref()));
97/// }
98///
99/// CACHE.with(|c| {
100/// // * RefCell needed, because load on cache is `&mut`.
101/// // * You want to operate inside the `with` ‒ cloning the Arc is comparably expensive as
102/// // ArcSwap::load itself and whatever you'd save by the cache would be lost on that.
103/// println!("{:?}", c.borrow_mut().load());
104/// });
105/// ```
106///
107/// [Arc]: std::sync::Arc
108/// [load_full]: ArcSwapAny::load_full
109#[derive(Clone, Debug)]
110pub struct Cache<A, T> {
111 arc_swap: A,
112 cached: T,
113}
114
115impl<A, T, S> Cache<A, T>
116where
117 A: Deref<Target = ArcSwapAny<T, S>>,
118 T: RefCnt,
119 S: Strategy<T>,
120{
121 /// Creates a new caching handle.
122 ///
123 /// The parameter is something dereferencing into an [`ArcSwapAny`] (eg. either to [`ArcSwap`]
124 /// or [`ArcSwapOption`]). That can be [`ArcSwapAny`] itself, but that's not very useful. But
125 /// it also can be a reference to it or `Arc`, which makes it possible to share the
126 /// [`ArcSwapAny`] with multiple caches or access it in non-cached way too.
127 ///
128 /// [`ArcSwapOption`]: crate::ArcSwapOption
129 /// [`ArcSwap`]: crate::ArcSwap
130 pub fn new(arc_swap: A) -> Self {
131 let cached = arc_swap.load_full();
132 Self { arc_swap, cached }
133 }
134
135 /// Gives access to the (possibly shared) cached [`ArcSwapAny`].
136 pub fn arc_swap(&self) -> &A::Target {
137 &self.arc_swap
138 }
139
140 /// Loads the currently held value.
141 ///
142 /// This first checks if the cached value is up to date. This check is very cheap.
143 ///
144 /// If it is up to date, the cached value is simply returned without additional costs. If it is
145 /// outdated, a load is done on the underlying shared storage. The newly loaded value is then
146 /// stored in the cache and returned.
147 #[inline]
148 pub fn load(&mut self) -> &T {
149 self.revalidate();
150 self.load_no_revalidate()
151 }
152
153 #[inline]
154 fn load_no_revalidate(&self) -> &T {
155 &self.cached
156 }
157
158 #[inline]
159 fn revalidate(&mut self) {
160 let cached_ptr = RefCnt::as_ptr(&self.cached);
161 // Node: Relaxed here is fine. We do not synchronize any data through this, we already have
162 // it synchronized in self.cache. We just want to check if it changed, if it did, the
163 // load_full will be responsible for any synchronization needed.
164 let shared_ptr = self.arc_swap.ptr.load(Ordering::Relaxed);
165 if cached_ptr != shared_ptr {
166 self.cached = self.arc_swap.load_full();
167 }
168 }
169
170 /// Turns this cache into a cache with a projection inside the cached value.
171 ///
172 /// You'd use this in case when some part of code needs access to fresh values of `U`, however
173 /// a bigger structure containing `U` is provided by this cache. The possibility of giving the
174 /// whole structure to the part of the code falls short in terms of reusability (the part of
175 /// the code could be used within multiple contexts, each with a bigger different structure
176 /// containing `U`) and code separation (the code shouldn't needs to know about the big
177 /// structure).
178 ///
179 /// # Warning
180 ///
181 /// As the provided `f` is called inside every [`load`][Access::load], this one should be
182 /// cheap. Most often it is expected to be just a closure taking reference of some inner field.
183 ///
184 /// For the same reasons, it should not have side effects and should never panic (these will
185 /// not break Rust's safety rules, but might produce behaviour you don't expect).
186 ///
187 /// # Examples
188 ///
189 /// ```rust
190 /// use arc_swap::ArcSwap;
191 /// use arc_swap::cache::{Access, Cache};
192 ///
193 /// struct InnerCfg {
194 /// answer: usize,
195 /// }
196 ///
197 /// struct FullCfg {
198 /// inner: InnerCfg,
199 /// }
200 ///
201 /// fn use_inner<A: Access<InnerCfg>>(cache: &mut A) {
202 /// let value = cache.load();
203 /// println!("The answer is: {}", value.answer);
204 /// }
205 ///
206 /// let full_cfg = ArcSwap::from_pointee(FullCfg {
207 /// inner: InnerCfg {
208 /// answer: 42,
209 /// }
210 /// });
211 /// let cache = Cache::new(&full_cfg);
212 /// use_inner(&mut cache.map(|full| &full.inner));
213 ///
214 /// let inner_cfg = ArcSwap::from_pointee(InnerCfg { answer: 24 });
215 /// let mut inner_cache = Cache::new(&inner_cfg);
216 /// use_inner(&mut inner_cache);
217 /// ```
218 pub fn map<F, U>(self, f: F) -> MapCache<A, T, F>
219 where
220 F: FnMut(&T) -> &U,
221 {
222 MapCache {
223 inner: self,
224 projection: f,
225 }
226 }
227}
228
229impl<A, T, S> Access<T::Target> for Cache<A, T>
230where
231 A: Deref<Target = ArcSwapAny<T, S>>,
232 T: Deref<Target = <T as RefCnt>::Base> + RefCnt,
233 S: Strategy<T>,
234{
235 fn load(&mut self) -> &T::Target {
236 self.load().deref()
237 }
238}
239
240impl<A, T, S> From<A> for Cache<A, T>
241where
242 A: Deref<Target = ArcSwapAny<T, S>>,
243 T: RefCnt,
244 S: Strategy<T>,
245{
246 fn from(arc_swap: A) -> Self {
247 Self::new(arc_swap)
248 }
249}
250
251/// An implementation of a cache with a projection into the accessed value.
252///
253/// This is the implementation structure for [`Cache::map`]. It can't be created directly and it
254/// should be used through the [`Access`] trait.
255#[derive(Clone, Debug)]
256pub struct MapCache<A, T, F> {
257 inner: Cache<A, T>,
258 projection: F,
259}
260
261impl<A, T, S, F, U> Access<U> for MapCache<A, T, F>
262where
263 A: Deref<Target = ArcSwapAny<T, S>>,
264 T: RefCnt,
265 S: Strategy<T>,
266 F: FnMut(&T) -> &U,
267{
268 fn load(&mut self) -> &U {
269 (self.projection)(self.inner.load())
270 }
271}
272
273#[cfg(test)]
274mod tests {
275 use alloc::sync::Arc;
276
277 use super::*;
278 use crate::{ArcSwap, ArcSwapOption};
279
280 #[test]
281 fn cached_value() {
282 let a = ArcSwap::from_pointee(42);
283 let mut c1 = Cache::new(&a);
284 let mut c2 = Cache::new(&a);
285
286 assert_eq!(42, **c1.load());
287 assert_eq!(42, **c2.load());
288
289 a.store(Arc::new(43));
290 assert_eq!(42, **c1.load_no_revalidate());
291 assert_eq!(43, **c1.load());
292 }
293
294 #[test]
295 fn cached_through_arc() {
296 let a = Arc::new(ArcSwap::from_pointee(42));
297 let mut c = Cache::new(Arc::clone(&a));
298 assert_eq!(42, **c.load());
299 a.store(Arc::new(0));
300 drop(a); // A is just one handle, the ArcSwap is kept alive by the cache.
301 }
302
303 #[test]
304 fn cache_option() {
305 let a = ArcSwapOption::from_pointee(42);
306 let mut c = Cache::new(&a);
307
308 assert_eq!(42, **c.load().as_ref().unwrap());
309 a.store(None);
310 assert!(c.load().is_none());
311 }
312
313 struct Inner {
314 answer: usize,
315 }
316
317 struct Outer {
318 inner: Inner,
319 }
320
321 #[test]
322 fn map_cache() {
323 let a = ArcSwap::from_pointee(Outer {
324 inner: Inner { answer: 42 },
325 });
326
327 let mut cache = Cache::new(&a);
328 let mut inner = cache.clone().map(|outer| &outer.inner);
329 let mut answer = cache.clone().map(|outer| &outer.inner.answer);
330
331 assert_eq!(42, cache.load().inner.answer);
332 assert_eq!(42, inner.load().answer);
333 assert_eq!(42, *answer.load());
334
335 a.store(Arc::new(Outer {
336 inner: Inner { answer: 24 },
337 }));
338
339 assert_eq!(24, cache.load().inner.answer);
340 assert_eq!(24, inner.load().answer);
341 assert_eq!(24, *answer.load());
342 }
343}