aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2023-05-15 23:26:58 +0000
committerGitHub <[email protected]>2023-05-15 23:26:58 +0000
commit34a0c2172b8cdc93daa46ebf186599941baa056e (patch)
tree8ee79c9d003bf8422ad185e937b00b2d957ee72e
parent1a87f7477abdb033d960a6af63c95a9e0575e670 (diff)
parent0c18a13cc056d4d54ca7261289615b2d03769a76 (diff)
Merge #1459
1459: rp/multicore: fix undefined behavior in multicore spawn. r=Dirbaio a=Dirbaio It is UB to pass `entry` to core1 as `&mut`, because core0 keeps an aliasing pointer to that memory region, and actually writes to it (when `spawn_core1` returns, the stack frame gets deallocated and the memory gets reused). This violates noalias requirements. Added the fence just in case, een though it works without. bors r+ Co-authored-by: Dario Nieuwenhuis <[email protected]>
-rw-r--r--embassy-rp/src/multicore.rs11
1 files changed, 8 insertions, 3 deletions
diff --git a/embassy-rp/src/multicore.rs b/embassy-rp/src/multicore.rs
index c84fea5c8..9a445c26e 100644
--- a/embassy-rp/src/multicore.rs
+++ b/embassy-rp/src/multicore.rs
@@ -122,11 +122,16 @@ where
122 extern "C" fn core1_startup<F: FnOnce() -> bad::Never>( 122 extern "C" fn core1_startup<F: FnOnce() -> bad::Never>(
123 _: u64, 123 _: u64,
124 _: u64, 124 _: u64,
125 entry: &mut ManuallyDrop<F>, 125 entry: *mut ManuallyDrop<F>,
126 stack_bottom: *mut usize, 126 stack_bottom: *mut usize,
127 ) -> ! { 127 ) -> ! {
128 core1_setup(stack_bottom); 128 core1_setup(stack_bottom);
129 let entry = unsafe { ManuallyDrop::take(entry) }; 129
130 let entry = unsafe { ManuallyDrop::take(&mut *entry) };
131
132 // make sure the preceding read doesn't get reordered past the following fifo write
133 compiler_fence(Ordering::SeqCst);
134
130 // Signal that it's safe for core 0 to get rid of the original value now. 135 // Signal that it's safe for core 0 to get rid of the original value now.
131 fifo_write(1); 136 fifo_write(1);
132 137
@@ -164,7 +169,7 @@ where
164 169
165 // Push `entry`. 170 // Push `entry`.
166 stack_ptr = stack_ptr.sub(1); 171 stack_ptr = stack_ptr.sub(1);
167 stack_ptr.cast::<&mut ManuallyDrop<F>>().write(&mut entry); 172 stack_ptr.cast::<*mut ManuallyDrop<F>>().write(&mut entry);
168 } 173 }
169 174
170 // Make sure the compiler does not reorder the stack writes after to after the 175 // Make sure the compiler does not reorder the stack writes after to after the