diff options
| author | Ulf Lilleengen <[email protected]> | 2024-01-23 23:16:06 +0100 |
|---|---|---|
| committer | Ulf Lilleengen <[email protected]> | 2024-01-24 21:23:54 +0100 |
| commit | 6126183db852fbf4187d4a5516cc8bd6d3697443 (patch) | |
| tree | a7ace4bef8dfcf092372ca3a1e0108648bbb6e35 | |
| parent | 25f82538aed809c17264bab8cddad30004fb60cf (diff) | |
fix: remove portable-atomic from rng
| -rw-r--r-- | embassy-nrf/src/rng.rs | 125 |
1 files changed, 66 insertions, 59 deletions
diff --git a/embassy-nrf/src/rng.rs b/embassy-nrf/src/rng.rs index 966097578..6145dd14f 100644 --- a/embassy-nrf/src/rng.rs +++ b/embassy-nrf/src/rng.rs | |||
| @@ -9,8 +9,6 @@ use core::task::Poll; | |||
| 9 | 9 | ||
| 10 | use embassy_hal_internal::drop::OnDrop; | 10 | use embassy_hal_internal::drop::OnDrop; |
| 11 | use embassy_hal_internal::{into_ref, PeripheralRef}; | 11 | use embassy_hal_internal::{into_ref, PeripheralRef}; |
| 12 | use embassy_sync::waitqueue::AtomicWaker; | ||
| 13 | use portable_atomic::{AtomicPtr, Ordering}; | ||
| 14 | 12 | ||
| 15 | use crate::interrupt::typelevel::Interrupt; | 13 | use crate::interrupt::typelevel::Interrupt; |
| 16 | use crate::{interrupt, Peripheral}; | 14 | use crate::{interrupt, Peripheral}; |
| @@ -22,7 +20,6 @@ pub struct InterruptHandler<T: Instance> { | |||
| 22 | 20 | ||
| 23 | impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandler<T> { | 21 | impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandler<T> { |
| 24 | unsafe fn on_interrupt() { | 22 | unsafe fn on_interrupt() { |
| 25 | let s = T::state(); | ||
| 26 | let r = T::regs(); | 23 | let r = T::regs(); |
| 27 | 24 | ||
| 28 | // Clear the event. | 25 | // Clear the event. |
| @@ -30,46 +27,26 @@ impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandl | |||
| 30 | 27 | ||
| 31 | // Mutate the slice within a critical section, | 28 | // Mutate the slice within a critical section, |
| 32 | // so that the future isn't dropped in between us loading the pointer and actually dereferencing it. | 29 | // so that the future isn't dropped in between us loading the pointer and actually dereferencing it. |
| 33 | let (ptr, end) = critical_section::with(|_| { | 30 | critical_section::with(|cs| { |
| 34 | let ptr = s.ptr.load(Ordering::Relaxed); | 31 | let mut state = T::state().borrow_mut(cs); |
| 35 | // We need to make sure we haven't already filled the whole slice, | 32 | // We need to make sure we haven't already filled the whole slice, |
| 36 | // in case the interrupt fired again before the executor got back to the future. | 33 | // in case the interrupt fired again before the executor got back to the future. |
| 37 | let end = s.end.load(Ordering::Relaxed); | 34 | if !state.ptr.is_null() && state.ptr != state.end { |
| 38 | if !ptr.is_null() && ptr != end { | ||
| 39 | // If the future was dropped, the pointer would have been set to null, | 35 | // If the future was dropped, the pointer would have been set to null, |
| 40 | // so we're still good to mutate the slice. | 36 | // so we're still good to mutate the slice. |
| 41 | // The safety contract of `Rng::new` means that the future can't have been dropped | 37 | // The safety contract of `Rng::new` means that the future can't have been dropped |
| 42 | // without calling its destructor. | 38 | // without calling its destructor. |
| 43 | unsafe { | 39 | unsafe { |
| 44 | *ptr = r.value.read().value().bits(); | 40 | *state.ptr = r.value.read().value().bits(); |
| 41 | state.ptr = state.ptr.add(1); | ||
| 45 | } | 42 | } |
| 46 | } | ||
| 47 | (ptr, end) | ||
| 48 | }); | ||
| 49 | |||
| 50 | if ptr.is_null() || ptr == end { | ||
| 51 | // If the future was dropped, there's nothing to do. | ||
| 52 | // If `ptr == end`, we were called by mistake, so return. | ||
| 53 | return; | ||
| 54 | } | ||
| 55 | 43 | ||
| 56 | let new_ptr = unsafe { ptr.add(1) }; | 44 | if state.ptr == state.end { |
| 57 | match s | 45 | state.waker.wake(); |
| 58 | .ptr | ||
| 59 | .compare_exchange(ptr, new_ptr, Ordering::Relaxed, Ordering::Relaxed) | ||
| 60 | { | ||
| 61 | Ok(_) => { | ||
| 62 | let end = s.end.load(Ordering::Relaxed); | ||
| 63 | // It doesn't matter if `end` was changed under our feet, because then this will just be false. | ||
| 64 | if new_ptr == end { | ||
| 65 | s.waker.wake(); | ||
| 66 | } | 46 | } |
| 47 | |||
| 67 | } | 48 | } |
| 68 | Err(_) => { | 49 | }); |
| 69 | // If the future was dropped or finished, there's no point trying to wake it. | ||
| 70 | // It will have already stopped the RNG, so there's no need to do that either. | ||
| 71 | } | ||
| 72 | } | ||
| 73 | } | 50 | } |
| 74 | } | 51 | } |
| 75 | 52 | ||
| @@ -136,13 +113,15 @@ impl<'d, T: Instance> Rng<'d, T> { | |||
| 136 | return; // Nothing to fill | 113 | return; // Nothing to fill |
| 137 | } | 114 | } |
| 138 | 115 | ||
| 139 | let s = T::state(); | ||
| 140 | 116 | ||
| 141 | let range = dest.as_mut_ptr_range(); | 117 | let range = dest.as_mut_ptr_range(); |
| 142 | // Even if we've preempted the interrupt, it can't preempt us again, | 118 | // Even if we've preempted the interrupt, it can't preempt us again, |
| 143 | // so we don't need to worry about the order we write these in. | 119 | // so we don't need to worry about the order we write these in. |
| 144 | s.ptr.store(range.start, Ordering::Relaxed); | 120 | critical_section::with(|cs| { |
| 145 | s.end.store(range.end, Ordering::Relaxed); | 121 | let mut state = T::state().borrow_mut(cs); |
| 122 | state.ptr = range.start; | ||
| 123 | state.end = range.end; | ||
| 124 | }); | ||
| 146 | 125 | ||
| 147 | self.enable_irq(); | 126 | self.enable_irq(); |
| 148 | self.start(); | 127 | self.start(); |
| @@ -151,24 +130,24 @@ impl<'d, T: Instance> Rng<'d, T> { | |||
| 151 | self.stop(); | 130 | self.stop(); |
| 152 | self.disable_irq(); | 131 | self.disable_irq(); |
| 153 | 132 | ||
| 154 | // The interrupt is now disabled and can't preempt us anymore, so the order doesn't matter here. | 133 | critical_section::with(|cs| { |
| 155 | s.ptr.store(ptr::null_mut(), Ordering::Relaxed); | 134 | let mut state = T::state().borrow_mut(cs); |
| 156 | s.end.store(ptr::null_mut(), Ordering::Relaxed); | 135 | state.ptr = ptr::null_mut(); |
| 136 | state.end = ptr::null_mut(); | ||
| 137 | }); | ||
| 157 | }); | 138 | }); |
| 158 | 139 | ||
| 159 | poll_fn(|cx| { | 140 | poll_fn(|cx| { |
| 160 | s.waker.register(cx.waker()); | 141 | critical_section::with(|cs| { |
| 161 | 142 | let mut s = T::state().borrow_mut(cs); | |
| 162 | // The interrupt will never modify `end`, so load it first and then get the most up-to-date `ptr`. | 143 | s.waker.register(cx.waker()); |
| 163 | let end = s.end.load(Ordering::Relaxed); | 144 | if s.ptr == s.end { |
| 164 | let ptr = s.ptr.load(Ordering::Relaxed); | 145 | // We're done. |
| 165 | 146 | Poll::Ready(()) | |
| 166 | if ptr == end { | 147 | } else { |
| 167 | // We're done. | 148 | Poll::Pending |
| 168 | Poll::Ready(()) | 149 | } |
| 169 | } else { | 150 | }) |
| 170 | Poll::Pending | ||
| 171 | } | ||
| 172 | }) | 151 | }) |
| 173 | .await; | 152 | .await; |
| 174 | 153 | ||
| @@ -194,9 +173,11 @@ impl<'d, T: Instance> Rng<'d, T> { | |||
| 194 | impl<'d, T: Instance> Drop for Rng<'d, T> { | 173 | impl<'d, T: Instance> Drop for Rng<'d, T> { |
| 195 | fn drop(&mut self) { | 174 | fn drop(&mut self) { |
| 196 | self.stop(); | 175 | self.stop(); |
| 197 | let s = T::state(); | 176 | critical_section::with(|cs| { |
| 198 | s.ptr.store(ptr::null_mut(), Ordering::Relaxed); | 177 | let mut state = T::state().borrow_mut(cs); |
| 199 | s.end.store(ptr::null_mut(), Ordering::Relaxed); | 178 | state.ptr = ptr::null_mut(); |
| 179 | state.end = ptr::null_mut(); | ||
| 180 | }); | ||
| 200 | } | 181 | } |
| 201 | } | 182 | } |
| 202 | 183 | ||
| @@ -227,21 +208,47 @@ impl<'d, T: Instance> rand_core::RngCore for Rng<'d, T> { | |||
| 227 | impl<'d, T: Instance> rand_core::CryptoRng for Rng<'d, T> {} | 208 | impl<'d, T: Instance> rand_core::CryptoRng for Rng<'d, T> {} |
| 228 | 209 | ||
| 229 | pub(crate) mod sealed { | 210 | pub(crate) mod sealed { |
| 211 | use core::cell::{Ref, RefMut, RefCell}; | ||
| 212 | |||
| 213 | use critical_section::Mutex; | ||
| 214 | use critical_section::CriticalSection; | ||
| 215 | use embassy_sync::waitqueue::WakerRegistration; | ||
| 216 | |||
| 230 | use super::*; | 217 | use super::*; |
| 231 | 218 | ||
| 232 | /// Peripheral static state | 219 | /// Peripheral static state |
| 233 | pub struct State { | 220 | pub struct State { |
| 234 | pub ptr: AtomicPtr<u8>, | 221 | inner: Mutex<RefCell<InnerState>>, |
| 235 | pub end: AtomicPtr<u8>, | 222 | } |
| 236 | pub waker: AtomicWaker, | 223 | |
| 224 | pub struct InnerState { | ||
| 225 | pub ptr: *mut u8, | ||
| 226 | pub end: *mut u8, | ||
| 227 | pub waker: WakerRegistration, | ||
| 237 | } | 228 | } |
| 238 | 229 | ||
| 239 | impl State { | 230 | impl State { |
| 240 | pub const fn new() -> Self { | 231 | pub const fn new() -> Self { |
| 241 | Self { | 232 | Self { |
| 242 | ptr: AtomicPtr::new(ptr::null_mut()), | 233 | inner: Mutex::new(RefCell::new(InnerState::new())), |
| 243 | end: AtomicPtr::new(ptr::null_mut()), | 234 | } |
| 244 | waker: AtomicWaker::new(), | 235 | } |
| 236 | |||
| 237 | pub fn borrow<'cs>(&'cs self, cs: CriticalSection<'cs>) -> Ref<'cs, InnerState> { | ||
| 238 | self.inner.borrow(cs).borrow() | ||
| 239 | } | ||
| 240 | |||
| 241 | pub fn borrow_mut<'cs>(&'cs self, cs: CriticalSection<'cs>) -> RefMut<'cs, InnerState> { | ||
| 242 | self.inner.borrow(cs).borrow_mut() | ||
| 243 | } | ||
| 244 | } | ||
| 245 | |||
| 246 | impl InnerState { | ||
| 247 | pub const fn new() -> Self { | ||
| 248 | Self { | ||
| 249 | ptr: ptr::null_mut(), | ||
| 250 | end: ptr::null_mut(), | ||
| 251 | waker: WakerRegistration::new(), | ||
| 245 | } | 252 | } |
| 246 | } | 253 | } |
| 247 | } | 254 | } |
