aboutsummaryrefslogtreecommitdiff
path: root/embassy-executor/src/raw/run_queue.rs
diff options
context:
space:
mode:
Diffstat (limited to 'embassy-executor/src/raw/run_queue.rs')
-rw-r--r--embassy-executor/src/raw/run_queue.rs13
1 files changed, 7 insertions, 6 deletions
diff --git a/embassy-executor/src/raw/run_queue.rs b/embassy-executor/src/raw/run_queue.rs
index ed8c82a5c..362157535 100644
--- a/embassy-executor/src/raw/run_queue.rs
+++ b/embassy-executor/src/raw/run_queue.rs
@@ -4,7 +4,7 @@ use core::ptr::NonNull;
4use atomic_polyfill::{AtomicPtr, Ordering}; 4use atomic_polyfill::{AtomicPtr, Ordering};
5use critical_section::CriticalSection; 5use critical_section::CriticalSection;
6 6
7use super::TaskHeader; 7use super::{TaskHeader, TaskRef};
8 8
9pub(crate) struct RunQueueItem { 9pub(crate) struct RunQueueItem {
10 next: AtomicPtr<TaskHeader>, 10 next: AtomicPtr<TaskHeader>,
@@ -46,25 +46,26 @@ impl RunQueue {
46 /// 46 ///
47 /// `item` must NOT be already enqueued in any queue. 47 /// `item` must NOT be already enqueued in any queue.
48 #[inline(always)] 48 #[inline(always)]
49 pub(crate) unsafe fn enqueue(&self, _cs: CriticalSection, task: NonNull<TaskHeader>) -> bool { 49 pub(crate) unsafe fn enqueue(&self, _cs: CriticalSection, task: TaskRef) -> bool {
50 let prev = self.head.load(Ordering::Relaxed); 50 let prev = self.head.load(Ordering::Relaxed);
51 task.as_ref().run_queue_item.next.store(prev, Ordering::Relaxed); 51 task.header().run_queue_item.next.store(prev, Ordering::Relaxed);
52 self.head.store(task.as_ptr(), Ordering::Relaxed); 52 self.head.store(task.as_ptr() as _, Ordering::Relaxed);
53 prev.is_null() 53 prev.is_null()
54 } 54 }
55 55
56 /// Empty the queue, then call `on_task` for each task that was in the queue. 56 /// Empty the queue, then call `on_task` for each task that was in the queue.
57 /// NOTE: It is OK for `on_task` to enqueue more tasks. In this case they're left in the queue 57 /// NOTE: It is OK for `on_task` to enqueue more tasks. In this case they're left in the queue
58 /// and will be processed by the *next* call to `dequeue_all`, *not* the current one. 58 /// and will be processed by the *next* call to `dequeue_all`, *not* the current one.
59 pub(crate) fn dequeue_all(&self, on_task: impl Fn(NonNull<TaskHeader>)) { 59 pub(crate) fn dequeue_all(&self, on_task: impl Fn(TaskRef)) {
60 // Atomically empty the queue. 60 // Atomically empty the queue.
61 let mut ptr = self.head.swap(ptr::null_mut(), Ordering::AcqRel); 61 let mut ptr = self.head.swap(ptr::null_mut(), Ordering::AcqRel);
62 62
63 // Iterate the linked list of tasks that were previously in the queue. 63 // Iterate the linked list of tasks that were previously in the queue.
64 while let Some(task) = NonNull::new(ptr) { 64 while let Some(task) = NonNull::new(ptr) {
65 let task = unsafe { TaskRef::from_ptr(task.as_ptr()) };
65 // If the task re-enqueues itself, the `next` pointer will get overwritten. 66 // If the task re-enqueues itself, the `next` pointer will get overwritten.
66 // Therefore, first read the next pointer, and only then process the task. 67 // Therefore, first read the next pointer, and only then process the task.
67 let next = unsafe { task.as_ref() }.run_queue_item.next.load(Ordering::Relaxed); 68 let next = task.header().run_queue_item.next.load(Ordering::Relaxed);
68 69
69 on_task(task); 70 on_task(task);
70 71