aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--embassy-sync/src/raw_rwlock.rs86
-rw-r--r--embassy-sync/src/rwlock.rs287
2 files changed, 231 insertions, 142 deletions
diff --git a/embassy-sync/src/raw_rwlock.rs b/embassy-sync/src/raw_rwlock.rs
new file mode 100644
index 000000000..de4bd1dc5
--- /dev/null
+++ b/embassy-sync/src/raw_rwlock.rs
@@ -0,0 +1,86 @@
1use core::sync::atomic::{AtomicUsize, Ordering};
2use core::task::Waker;
3use core::cell::UnsafeCell;
4
5pub trait RawRwLock {
6 fn lock_read(&self);
7 fn try_lock_read(&self) -> bool;
8 fn unlock_read(&self);
9 fn lock_write(&self);
10 fn try_lock_write(&self) -> bool;
11 fn unlock_write(&self);
12}
13
14pub struct RawRwLockImpl {
15 state: AtomicUsize,
16 waker: UnsafeCell<Option<Waker>>,
17}
18
19impl RawRwLockImpl {
20 pub const fn new() -> Self {
21 Self {
22 state: AtomicUsize::new(0),
23 waker: UnsafeCell::new(None),
24 }
25 }
26}
27
28unsafe impl Send for RawRwLockImpl {}
29unsafe impl Sync for RawRwLockImpl {}
30
31impl RawRwLock for RawRwLockImpl {
32 fn lock_read(&self) {
33 loop {
34 let state = self.state.load(Ordering::Acquire);
35 if state & 1 == 0 {
36 if self.state.compare_and_swap(state, state + 2, Ordering::AcqRel) == state {
37 break;
38 }
39 }
40 }
41 }
42
43 fn try_lock_read(&self) -> bool {
44 let state = self.state.load(Ordering::Acquire);
45 if state & 1 == 0 {
46 if self.state.compare_and_swap(state, state + 2, Ordering::AcqRel) == state {
47 return true;
48 }
49 }
50 false
51 }
52
53 fn unlock_read(&self) {
54 self.state.fetch_sub(2, Ordering::Release);
55 if self.state.load(Ordering::Acquire) == 0 {
56 if let Some(waker) = unsafe { &*self.waker.get() } {
57 waker.wake_by_ref();
58 }
59 }
60 }
61
62 fn lock_write(&self) {
63 loop {
64 let state = self.state.load(Ordering::Acquire);
65 if state == 0 {
66 if self.state.compare_and_swap(0, 1, Ordering::AcqRel) == 0 {
67 break;
68 }
69 }
70 }
71 }
72
73 fn try_lock_write(&self) -> bool {
74 if self.state.compare_and_swap(0, 1, Ordering::AcqRel) == 0 {
75 return true;
76 }
77 false
78 }
79
80 fn unlock_write(&self) {
81 self.state.store(0, Ordering::Release);
82 if let Some(waker) = unsafe { &*self.waker.get() } {
83 waker.wake_by_ref();
84 }
85 }
86}
diff --git a/embassy-sync/src/rwlock.rs b/embassy-sync/src/rwlock.rs
index 15ea8468e..30e1e74ad 100644
--- a/embassy-sync/src/rwlock.rs
+++ b/embassy-sync/src/rwlock.rs
@@ -1,136 +1,134 @@
1use core::cell::RefCell; 1use core::cell::UnsafeCell;
2use core::future::{poll_fn, Future}; 2use core::future::poll_fn;
3use core::ops::{Deref, DerefMut}; 3use core::ops::{Deref, DerefMut};
4use core::task::Poll; 4use core::task::Poll;
5 5
6use crate::blocking_mutex::raw::RawMutex;
7use crate::blocking_mutex::Mutex as BlockingMutex; 6use crate::blocking_mutex::Mutex as BlockingMutex;
8use crate::waitqueue::MultiWakerRegistration; 7use crate::waitqueue::WakerRegistration;
8use crate::raw_rwlock::RawRwLock;
9 9
10/// Error returned by [`RwLock::try_read`] and [`RwLock::try_write`]
11#[derive(PartialEq, Eq, Clone, Copy, Debug)]
12#[cfg_attr(feature = "defmt", derive(defmt::Format))]
13pub struct TryLockError;
14
15/// Async read-write lock.
16///
17/// The lock is generic over a blocking [`RawMutex`](crate::blocking_mutex::raw::RawMutex).
18/// The raw mutex is used to guard access to the internal state. It
19/// is held for very short periods only, while locking and unlocking. It is *not* held
20/// for the entire time the async RwLock is locked.
21///
22/// Which implementation you select depends on the context in which you're using the lock.
23///
24/// Use [`CriticalSectionRawMutex`](crate::blocking_mutex::raw::CriticalSectionRawMutex) when data can be shared between threads and interrupts.
25///
26/// Use [`NoopRawMutex`](crate::blocking_mutex::raw::NoopRawMutex) when data is only shared between tasks running on the same executor.
27///
28/// Use [`ThreadModeRawMutex`](crate::blocking_mutex::raw::ThreadModeRawMutex) when data is shared between tasks running on the same executor but you want a singleton.
29///
30pub struct RwLock<M, T> 10pub struct RwLock<M, T>
31where 11where
32 M: RawMutex, 12 M: RawRwLock,
33 T: ?Sized, 13 T: ?Sized,
34{ 14{
35 state: BlockingMutex<M, RefCell<State>>, 15 state: BlockingMutex<M, RwLockState>,
36 inner: RefCell<T>, 16 inner: UnsafeCell<T>,
37}
38
39struct State {
40 readers: usize,
41 writer: bool,
42 writer_waker: MultiWakerRegistration<1>,
43 reader_wakers: MultiWakerRegistration<8>,
44} 17}
45 18
46impl State { 19unsafe impl<M: RawRwLock + Send, T: ?Sized + Send> Send for RwLock<M, T> {}
47 fn new() -> Self { 20unsafe impl<M: RawRwLock + Sync, T: ?Sized + Send> Sync for RwLock<M, T> {}
48 Self {
49 readers: 0,
50 writer: false,
51 writer_waker: MultiWakerRegistration::new(),
52 reader_wakers: MultiWakerRegistration::new(),
53 }
54 }
55}
56 21
57impl<M, T> RwLock<M, T> 22impl<M, T> RwLock<M, T>
58where 23where
59 M: RawMutex, 24 M: RawRwLock,
60{ 25{
61 /// Create a new read-write lock with the given value.
62 pub const fn new(value: T) -> Self { 26 pub const fn new(value: T) -> Self {
63 Self { 27 Self {
64 inner: RefCell::new(value), 28 inner: UnsafeCell::new(value),
65 state: BlockingMutex::new(RefCell::new(State::new())), 29 state: BlockingMutex::new(RwLockState {
30 locked: LockedState::Unlocked,
31 writer_pending: 0,
32 readers_pending: 0,
33 waker: WakerRegistration::new(),
34 }),
66 } 35 }
67 } 36 }
68} 37}
69 38
70impl<M, T> RwLock<M, T> 39impl<M, T> RwLock<M, T>
71where 40where
72 M: RawMutex, 41 M: RawRwLock,
73 T: ?Sized, 42 T: ?Sized,
74{ 43{
75 /// Acquire a read lock.
76 ///
77 /// This will wait for the lock to be available if it's already locked for writing.
78 pub fn read(&self) -> impl Future<Output = RwLockReadGuard<'_, M, T>> { 44 pub fn read(&self) -> impl Future<Output = RwLockReadGuard<'_, M, T>> {
79 poll_fn(|cx| { 45 poll_fn(|cx| {
80 let mut state = self.state.lock(|s| s.borrow_mut()); 46 let ready = self.state.lock(|s| {
81 if state.writer { 47 let mut s = s.borrow_mut();
82 state.reader_wakers.register(cx.waker()); 48 match s.locked {
83 Poll::Pending 49 LockedState::Unlocked => {
84 } else { 50 s.locked = LockedState::ReadLocked(1);
85 state.readers += 1; 51 true
52 }
53 LockedState::ReadLocked(ref mut count) => {
54 *count += 1;
55 true
56 }
57 LockedState::WriteLocked => {
58 s.readers_pending += 1;
59 s.waker.register(cx.waker());
60 false
61 }
62 }
63 });
64
65 if ready {
86 Poll::Ready(RwLockReadGuard { lock: self }) 66 Poll::Ready(RwLockReadGuard { lock: self })
67 } else {
68 Poll::Pending
87 } 69 }
88 }) 70 })
89 } 71 }
90 72
91 /// Acquire a write lock.
92 ///
93 /// This will wait for the lock to be available if it's already locked for reading or writing.
94 pub fn write(&self) -> impl Future<Output = RwLockWriteGuard<'_, M, T>> { 73 pub fn write(&self) -> impl Future<Output = RwLockWriteGuard<'_, M, T>> {
95 poll_fn(|cx| { 74 poll_fn(|cx| {
96 let mut state = self.state.lock(|s| s.borrow_mut()); 75 let ready = self.state.lock(|s| {
97 if state.writer || state.readers > 0 { 76 let mut s = s.borrow_mut();
98 state.writer_waker.register(cx.waker()); 77 match s.locked {
99 Poll::Pending 78 LockedState::Unlocked => {
100 } else { 79 s.locked = LockedState::WriteLocked;
101 state.writer = true; 80 true
81 }
82 _ => {
83 s.writer_pending += 1;
84 s.waker.register(cx.waker());
85 false
86 }
87 }
88 });
89
90 if ready {
102 Poll::Ready(RwLockWriteGuard { lock: self }) 91 Poll::Ready(RwLockWriteGuard { lock: self })
92 } else {
93 Poll::Pending
103 } 94 }
104 }) 95 })
105 } 96 }
106 97
107 /// Attempt to immediately acquire a read lock.
108 ///
109 /// If the lock is already locked for writing, this will return an error instead of waiting.
110 pub fn try_read(&self) -> Result<RwLockReadGuard<'_, M, T>, TryLockError> { 98 pub fn try_read(&self) -> Result<RwLockReadGuard<'_, M, T>, TryLockError> {
111 let mut state = self.state.lock(|s| s.borrow_mut()); 99 self.state.lock(|s| {
112 if state.writer { 100 let mut s = s.borrow_mut();
113 Err(TryLockError) 101 match s.locked {
114 } else { 102 LockedState::Unlocked => {
115 state.readers += 1; 103 s.locked = LockedState::ReadLocked(1);
116 Ok(RwLockReadGuard { lock: self }) 104 Ok(())
117 } 105 }
106 LockedState::ReadLocked(ref mut count) => {
107 *count += 1;
108 Ok(())
109 }
110 LockedState::WriteLocked => Err(TryLockError),
111 }
112 })?;
113
114 Ok(RwLockReadGuard { lock: self })
118 } 115 }
119 116
120 /// Attempt to immediately acquire a write lock.
121 ///
122 /// If the lock is already locked for reading or writing, this will return an error instead of waiting.
123 pub fn try_write(&self) -> Result<RwLockWriteGuard<'_, M, T>, TryLockError> { 117 pub fn try_write(&self) -> Result<RwLockWriteGuard<'_, M, T>, TryLockError> {
124 let mut state = self.state.lock(|s| s.borrow_mut()); 118 self.state.lock(|s| {
125 if state.writer || state.readers > 0 { 119 let mut s = s.borrow_mut();
126 Err(TryLockError) 120 match s.locked {
127 } else { 121 LockedState::Unlocked => {
128 state.writer = true; 122 s.locked = LockedState::WriteLocked;
129 Ok(RwLockWriteGuard { lock: self }) 123 Ok(())
130 } 124 }
125 _ => Err(TryLockError),
126 }
127 })?;
128
129 Ok(RwLockWriteGuard { lock: self })
131 } 130 }
132 131
133 /// Consumes this lock, returning the underlying data.
134 pub fn into_inner(self) -> T 132 pub fn into_inner(self) -> T
135 where 133 where
136 T: Sized, 134 T: Sized,
@@ -138,19 +136,12 @@ where
138 self.inner.into_inner() 136 self.inner.into_inner()
139 } 137 }
140 138
141 /// Returns a mutable reference to the underlying data.
142 ///
143 /// Since this call borrows the RwLock mutably, no actual locking needs to
144 /// take place -- the mutable borrow statically guarantees no locks exist.
145 pub fn get_mut(&mut self) -> &mut T { 139 pub fn get_mut(&mut self) -> &mut T {
146 self.inner.get_mut() 140 self.inner.get_mut()
147 } 141 }
148} 142}
149 143
150impl<M, T> From<T> for RwLock<M, T> 144impl<M: RawRwLock, T> From<T> for RwLock<M, T> {
151where
152 M: RawMutex,
153{
154 fn from(from: T) -> Self { 145 fn from(from: T) -> Self {
155 Self::new(from) 146 Self::new(from)
156 } 147 }
@@ -158,7 +149,7 @@ where
158 149
159impl<M, T> Default for RwLock<M, T> 150impl<M, T> Default for RwLock<M, T>
160where 151where
161 M: RawMutex, 152 M: RawRwLock,
162 T: Default, 153 T: Default,
163{ 154{
164 fn default() -> Self { 155 fn default() -> Self {
@@ -166,91 +157,103 @@ where
166 } 157 }
167} 158}
168 159
169/// Async read lock guard.
170///
171/// Owning an instance of this type indicates having
172/// successfully locked the RwLock for reading, and grants access to the contents.
173///
174/// Dropping it unlocks the RwLock.
175#[must_use = "if unused the RwLock will immediately unlock"]
176pub struct RwLockReadGuard<'a, M, T> 160pub struct RwLockReadGuard<'a, M, T>
177where 161where
178 M: RawMutex, 162 M: RawRwLock,
179 T: ?Sized, 163 T: ?Sized,
180{ 164{
181 lock: &'a RwLock<M, T>, 165 lock: &'a RwLock<M, T>,
182} 166}
183 167
184impl<'a, M, T> Drop for RwLockReadGuard<'a, M, T> 168impl<'a, M, T> Deref for RwLockReadGuard<'a, M, T>
185where 169where
186 M: RawMutex, 170 M: RawRwLock,
187 T: ?Sized, 171 T: ?Sized,
188{ 172{
189 fn drop(&mut self) { 173 type Target = T;
190 let mut state = self.lock.state.lock(|s| s.borrow_mut()); 174
191 state.readers -= 1; 175 fn deref(&self) -> &Self::Target {
192 if state.readers == 0 { 176 unsafe { &*self.lock.inner.get() }
193 state.writer_waker.wake();
194 }
195 } 177 }
196} 178}
197 179
198impl<'a, M, T> Deref for RwLockReadGuard<'a, M, T> 180impl<'a, M, T> Drop for RwLockReadGuard<'a, M, T>
199where 181where
200 M: RawMutex, 182 M: RawRwLock,
201 T: ?Sized, 183 T: ?Sized,
202{ 184{
203 type Target = T; 185 fn drop(&mut self) {
204 fn deref(&self) -> &Self::Target { 186 self.lock.state.lock(|s| {
205 self.lock.inner.borrow() 187 let mut s = s.borrow_mut();
188 match s.locked {
189 LockedState::ReadLocked(ref mut count) => {
190 *count -= 1;
191 if *count == 0 {
192 s.locked = LockedState::Unlocked;
193 s.waker.wake();
194 }
195 }
196 _ => unreachable!(),
197 }
198 });
206 } 199 }
207} 200}
208 201
209/// Async write lock guard.
210///
211/// Owning an instance of this type indicates having
212/// successfully locked the RwLock for writing, and grants access to the contents.
213///
214/// Dropping it unlocks the RwLock.
215#[must_use = "if unused the RwLock will immediately unlock"]
216pub struct RwLockWriteGuard<'a, M, T> 202pub struct RwLockWriteGuard<'a, M, T>
217where 203where
218 M: RawMutex, 204 M: RawRwLock,
219 T: ?Sized, 205 T: ?Sized,
220{ 206{
221 lock: &'a RwLock<M, T>, 207 lock: &'a RwLock<M, T>,
222} 208}
223 209
224impl<'a, M, T> Drop for RwLockWriteGuard<'a, M, T> 210impl<'a, M, T> Deref for RwLockWriteGuard<'a, M, T>
225where 211where
226 M: RawMutex, 212 M: RawRwLock,
227 T: ?Sized, 213 T: ?Sized,
228{ 214{
229 fn drop(&mut self) { 215 type Target = T;
230 let mut state = self.lock.state.lock(|s| s.borrow_mut()); 216
231 state.writer = false; 217 fn deref(&self) -> &Self::Target {
232 state.reader_wakers.wake(); 218 unsafe { &*self.lock.inner.get() }
233 state.writer_waker.wake();
234 } 219 }
235} 220}
236 221
237impl<'a, M, T> Deref for RwLockWriteGuard<'a, M, T> 222impl<'a, M, T> DerefMut for RwLockWriteGuard<'a, M, T>
238where 223where
239 M: RawMutex, 224 M: RawRwLock,
240 T: ?Sized, 225 T: ?Sized,
241{ 226{
242 type Target = T; 227 fn deref_mut(&mut self) -> &mut Self::Target {
243 fn deref(&self) -> &Self::Target { 228 unsafe { &mut *self.lock.inner.get() }
244 self.lock.inner.borrow()
245 } 229 }
246} 230}
247 231
248impl<'a, M, T> DerefMut for RwLockWriteGuard<'a, M, T> 232impl<'a, M, T> Drop for RwLockWriteGuard<'a, M, T>
249where 233where
250 M: RawMutex, 234 M: RawRwLock,
251 T: ?Sized, 235 T: ?Sized,
252{ 236{
253 fn deref_mut(&mut self) -> &mut Self::Target { 237 fn drop(&mut self) {
254 self.lock.inner.borrow_mut() 238 self.lock.state.lock(|s| {
239 let mut s = s.borrow_mut();
240 s.locked = LockedState::Unlocked;
241 s.waker.wake();
242 });
255 } 243 }
256} 244}
245
246struct RwLockState {
247 locked: LockedState,
248 writer_pending: usize,
249 readers_pending: usize,
250 waker: WakerRegistration,
251}
252
253enum LockedState {
254 Unlocked,
255 ReadLocked(usize),
256 WriteLocked,
257}
258
259pub struct TryLockError;