dashmap/
lock.rs

1use core::sync::atomic::{AtomicUsize, Ordering};
2use parking_lot_core::{ParkToken, SpinWait, UnparkToken};
3
4pub type RwLock<T> = lock_api::RwLock<RawRwLock, T>;
5pub type RwLockReadGuard<'a, T> = lock_api::RwLockReadGuard<'a, RawRwLock, T>;
6pub type RwLockWriteGuard<'a, T> = lock_api::RwLockWriteGuard<'a, RawRwLock, T>;
7
8const READERS_PARKED: usize = 0b0001;
9const WRITERS_PARKED: usize = 0b0010;
10const ONE_READER: usize = 0b0100;
11const ONE_WRITER: usize = !(READERS_PARKED | WRITERS_PARKED);
12
13pub struct RawRwLock {
14    state: AtomicUsize,
15}
16
17unsafe impl lock_api::RawRwLock for RawRwLock {
18    #[allow(clippy::declare_interior_mutable_const)]
19    const INIT: Self = Self {
20        state: AtomicUsize::new(0),
21    };
22
23    type GuardMarker = lock_api::GuardNoSend;
24
25    #[inline]
26    fn try_lock_exclusive(&self) -> bool {
27        self.state
28            .compare_exchange(0, ONE_WRITER, Ordering::Acquire, Ordering::Relaxed)
29            .is_ok()
30    }
31
32    #[inline]
33    fn lock_exclusive(&self) {
34        if self
35            .state
36            .compare_exchange_weak(0, ONE_WRITER, Ordering::Acquire, Ordering::Relaxed)
37            .is_err()
38        {
39            self.lock_exclusive_slow();
40        }
41    }
42
43    #[inline]
44    unsafe fn unlock_exclusive(&self) {
45        if self
46            .state
47            .compare_exchange(ONE_WRITER, 0, Ordering::Release, Ordering::Relaxed)
48            .is_err()
49        {
50            self.unlock_exclusive_slow();
51        }
52    }
53
54    #[inline]
55    fn try_lock_shared(&self) -> bool {
56        self.try_lock_shared_fast() || self.try_lock_shared_slow()
57    }
58
59    #[inline]
60    fn lock_shared(&self) {
61        if !self.try_lock_shared_fast() {
62            self.lock_shared_slow();
63        }
64    }
65
66    #[inline]
67    unsafe fn unlock_shared(&self) {
68        let state = self.state.fetch_sub(ONE_READER, Ordering::Release);
69
70        if state == (ONE_READER | WRITERS_PARKED) {
71            self.unlock_shared_slow();
72        }
73    }
74}
75
76unsafe impl lock_api::RawRwLockDowngrade for RawRwLock {
77    #[inline]
78    unsafe fn downgrade(&self) {
79        let state = self
80            .state
81            .fetch_and(ONE_READER | WRITERS_PARKED, Ordering::Release);
82        if state & READERS_PARKED != 0 {
83            parking_lot_core::unpark_all((self as *const _ as usize) + 1, UnparkToken(0));
84        }
85    }
86}
87
88impl RawRwLock {
89    #[cold]
90    fn lock_exclusive_slow(&self) {
91        let mut acquire_with = 0;
92        loop {
93            let mut spin = SpinWait::new();
94            let mut state = self.state.load(Ordering::Relaxed);
95
96            loop {
97                while state & ONE_WRITER == 0 {
98                    match self.state.compare_exchange_weak(
99                        state,
100                        state | ONE_WRITER | acquire_with,
101                        Ordering::Acquire,
102                        Ordering::Relaxed,
103                    ) {
104                        Ok(_) => return,
105                        Err(e) => state = e,
106                    }
107                }
108
109                if state & WRITERS_PARKED == 0 {
110                    if spin.spin() {
111                        state = self.state.load(Ordering::Relaxed);
112                        continue;
113                    }
114
115                    if let Err(e) = self.state.compare_exchange_weak(
116                        state,
117                        state | WRITERS_PARKED,
118                        Ordering::Relaxed,
119                        Ordering::Relaxed,
120                    ) {
121                        state = e;
122                        continue;
123                    }
124                }
125
126                let _ = unsafe {
127                    parking_lot_core::park(
128                        self as *const _ as usize,
129                        || {
130                            let state = self.state.load(Ordering::Relaxed);
131                            (state & ONE_WRITER != 0) && (state & WRITERS_PARKED != 0)
132                        },
133                        || {},
134                        |_, _| {},
135                        ParkToken(0),
136                        None,
137                    )
138                };
139
140                acquire_with = WRITERS_PARKED;
141                break;
142            }
143        }
144    }
145
146    #[cold]
147    fn unlock_exclusive_slow(&self) {
148        let state = self.state.load(Ordering::Relaxed);
149        assert_eq!(state & ONE_WRITER, ONE_WRITER);
150
151        let mut parked = state & (READERS_PARKED | WRITERS_PARKED);
152        assert_ne!(parked, 0);
153
154        if parked != (READERS_PARKED | WRITERS_PARKED) {
155            if let Err(new_state) =
156                self.state
157                    .compare_exchange(state, 0, Ordering::Release, Ordering::Relaxed)
158            {
159                assert_eq!(new_state, ONE_WRITER | READERS_PARKED | WRITERS_PARKED);
160                parked = READERS_PARKED | WRITERS_PARKED;
161            }
162        }
163
164        if parked == (READERS_PARKED | WRITERS_PARKED) {
165            self.state.store(WRITERS_PARKED, Ordering::Release);
166            parked = READERS_PARKED;
167        }
168
169        if parked == READERS_PARKED {
170            return unsafe {
171                parking_lot_core::unpark_all((self as *const _ as usize) + 1, UnparkToken(0));
172            };
173        }
174
175        assert_eq!(parked, WRITERS_PARKED);
176        unsafe {
177            parking_lot_core::unpark_one(self as *const _ as usize, |_| UnparkToken(0));
178        }
179    }
180
181    #[inline(always)]
182    fn try_lock_shared_fast(&self) -> bool {
183        let state = self.state.load(Ordering::Relaxed);
184
185        if let Some(new_state) = state.checked_add(ONE_READER) {
186            if new_state & ONE_WRITER != ONE_WRITER {
187                return self
188                    .state
189                    .compare_exchange_weak(state, new_state, Ordering::Acquire, Ordering::Relaxed)
190                    .is_ok();
191            }
192        }
193
194        false
195    }
196
197    #[cold]
198    fn try_lock_shared_slow(&self) -> bool {
199        let mut state = self.state.load(Ordering::Relaxed);
200
201        while let Some(new_state) = state.checked_add(ONE_READER) {
202            if new_state & ONE_WRITER == ONE_WRITER {
203                break;
204            }
205
206            match self.state.compare_exchange_weak(
207                state,
208                new_state,
209                Ordering::Acquire,
210                Ordering::Relaxed,
211            ) {
212                Ok(_) => return true,
213                Err(e) => state = e,
214            }
215        }
216
217        false
218    }
219
220    #[cold]
221    fn lock_shared_slow(&self) {
222        loop {
223            let mut spin = SpinWait::new();
224            let mut state = self.state.load(Ordering::Relaxed);
225
226            loop {
227                let mut backoff = SpinWait::new();
228                while let Some(new_state) = state.checked_add(ONE_READER) {
229                    assert_ne!(
230                        new_state & ONE_WRITER,
231                        ONE_WRITER,
232                        "reader count overflowed",
233                    );
234
235                    if self
236                        .state
237                        .compare_exchange_weak(
238                            state,
239                            new_state,
240                            Ordering::Acquire,
241                            Ordering::Relaxed,
242                        )
243                        .is_ok()
244                    {
245                        return;
246                    }
247
248                    backoff.spin_no_yield();
249                    state = self.state.load(Ordering::Relaxed);
250                }
251
252                if state & READERS_PARKED == 0 {
253                    if spin.spin() {
254                        state = self.state.load(Ordering::Relaxed);
255                        continue;
256                    }
257
258                    if let Err(e) = self.state.compare_exchange_weak(
259                        state,
260                        state | READERS_PARKED,
261                        Ordering::Relaxed,
262                        Ordering::Relaxed,
263                    ) {
264                        state = e;
265                        continue;
266                    }
267                }
268
269                let _ = unsafe {
270                    parking_lot_core::park(
271                        (self as *const _ as usize) + 1,
272                        || {
273                            let state = self.state.load(Ordering::Relaxed);
274                            (state & ONE_WRITER == ONE_WRITER) && (state & READERS_PARKED != 0)
275                        },
276                        || {},
277                        |_, _| {},
278                        ParkToken(0),
279                        None,
280                    )
281                };
282
283                break;
284            }
285        }
286    }
287
288    #[cold]
289    fn unlock_shared_slow(&self) {
290        if self
291            .state
292            .compare_exchange(WRITERS_PARKED, 0, Ordering::Relaxed, Ordering::Relaxed)
293            .is_ok()
294        {
295            unsafe {
296                parking_lot_core::unpark_one(self as *const _ as usize, |_| UnparkToken(0));
297            }
298        }
299    }
300}