aboutsummaryrefslogtreecommitdiff
path: root/embassy-net/src/pool.rs
diff options
context:
space:
mode:
authorDario Nieuwenhuis <[email protected]>2021-02-03 05:09:37 +0100
committerDario Nieuwenhuis <[email protected]>2021-02-03 05:09:37 +0100
commitcb5931d583d283dda3a1b5ed2014c086bb8f98ae (patch)
tree19a669238e0d562bf74616fe38485388ec40b02a /embassy-net/src/pool.rs
:rainbow:
Diffstat (limited to 'embassy-net/src/pool.rs')
-rw-r--r--embassy-net/src/pool.rs245
1 files changed, 245 insertions, 0 deletions
diff --git a/embassy-net/src/pool.rs b/embassy-net/src/pool.rs
new file mode 100644
index 000000000..3ab36e4cc
--- /dev/null
+++ b/embassy-net/src/pool.rs
@@ -0,0 +1,245 @@
1#![macro_use]
2
3use as_slice::{AsMutSlice, AsSlice};
4use core::cmp;
5use core::fmt;
6use core::hash::{Hash, Hasher};
7use core::mem::MaybeUninit;
8use core::ops::{Deref, DerefMut};
9use core::sync::atomic::{AtomicU32, Ordering};
10
11use crate::fmt::{assert, *};
12
13struct AtomicBitset<const N: usize>
14where
15 [AtomicU32; (N + 31) / 32]: Sized,
16{
17 used: [AtomicU32; (N + 31) / 32],
18}
19
20impl<const N: usize> AtomicBitset<N>
21where
22 [AtomicU32; (N + 31) / 32]: Sized,
23{
24 const fn new() -> Self {
25 const Z: AtomicU32 = AtomicU32::new(0);
26 Self {
27 used: [Z; (N + 31) / 32],
28 }
29 }
30
31 fn alloc(&self) -> Option<usize> {
32 for (i, val) in self.used.iter().enumerate() {
33 let res = val.fetch_update(Ordering::AcqRel, Ordering::Acquire, |val| {
34 let n = val.trailing_ones() as usize + i * 32;
35 if n >= N {
36 None
37 } else {
38 Some(val | (1 << n))
39 }
40 });
41 if let Ok(val) = res {
42 let n = val.trailing_ones() as usize + i * 32;
43 return Some(n);
44 }
45 }
46 None
47 }
48 fn free(&self, i: usize) {
49 assert!(i < N);
50 self.used[i / 32].fetch_and(!(1 << ((i % 32) as u32)), Ordering::AcqRel);
51 }
52}
53
54pub trait Pool<T> {
55 fn alloc(&self) -> Option<*mut T>;
56 unsafe fn free(&self, p: *mut T);
57}
58
59pub struct BitPool<T, const N: usize>
60where
61 [AtomicU32; (N + 31) / 32]: Sized,
62{
63 used: AtomicBitset<N>,
64 data: MaybeUninit<[T; N]>,
65}
66
67impl<T, const N: usize> BitPool<T, N>
68where
69 [AtomicU32; (N + 31) / 32]: Sized,
70{
71 pub const fn new() -> Self {
72 Self {
73 used: AtomicBitset::new(),
74 data: MaybeUninit::uninit(),
75 }
76 }
77}
78
79impl<T, const N: usize> Pool<T> for BitPool<T, N>
80where
81 [AtomicU32; (N + 31) / 32]: Sized,
82{
83 fn alloc(&self) -> Option<*mut T> {
84 let n = self.used.alloc()?;
85 let origin = self.data.as_ptr() as *mut T;
86 Some(unsafe { origin.add(n) })
87 }
88
89 /// safety: p must be a pointer obtained from self.alloc that hasn't been freed yet.
90 unsafe fn free(&self, p: *mut T) {
91 let origin = self.data.as_ptr() as *mut T;
92 let n = p.offset_from(origin);
93 assert!(n >= 0);
94 assert!((n as usize) < N);
95 self.used.free(n as usize);
96 }
97}
98
99pub trait StaticPool: 'static {
100 type Item: 'static;
101 type Pool: Pool<Self::Item>;
102 fn get() -> &'static Self::Pool;
103}
104
105pub struct Box<P: StaticPool> {
106 ptr: *mut P::Item,
107}
108
109impl<P: StaticPool> Box<P> {
110 pub fn new(item: P::Item) -> Option<Self> {
111 let p = match P::get().alloc() {
112 Some(p) => p,
113 None => {
114 warn!("alloc failed!");
115 return None;
116 }
117 };
118 //trace!("allocated {:u32}", p as u32);
119 unsafe { p.write(item) };
120 Some(Self { ptr: p })
121 }
122}
123
124impl<P: StaticPool> Drop for Box<P> {
125 fn drop(&mut self) {
126 unsafe {
127 //trace!("dropping {:u32}", self.ptr as u32);
128 self.ptr.drop_in_place();
129 P::get().free(self.ptr);
130 };
131 }
132}
133
134unsafe impl<P: StaticPool> Send for Box<P> where P::Item: Send {}
135
136unsafe impl<P: StaticPool> Sync for Box<P> where P::Item: Sync {}
137
138unsafe impl<P: StaticPool> stable_deref_trait::StableDeref for Box<P> {}
139
140impl<P: StaticPool> AsSlice for Box<P>
141where
142 P::Item: AsSlice,
143{
144 type Element = <P::Item as AsSlice>::Element;
145
146 fn as_slice(&self) -> &[Self::Element] {
147 self.deref().as_slice()
148 }
149}
150
151impl<P: StaticPool> AsMutSlice for Box<P>
152where
153 P::Item: AsMutSlice,
154{
155 fn as_mut_slice(&mut self) -> &mut [Self::Element] {
156 self.deref_mut().as_mut_slice()
157 }
158}
159
160impl<P: StaticPool> Deref for Box<P> {
161 type Target = P::Item;
162
163 fn deref(&self) -> &P::Item {
164 unsafe { &*self.ptr }
165 }
166}
167
168impl<P: StaticPool> DerefMut for Box<P> {
169 fn deref_mut(&mut self) -> &mut P::Item {
170 unsafe { &mut *self.ptr }
171 }
172}
173
174impl<P: StaticPool> fmt::Debug for Box<P>
175where
176 P::Item: fmt::Debug,
177{
178 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
179 <P::Item as fmt::Debug>::fmt(self, f)
180 }
181}
182
183impl<P: StaticPool> fmt::Display for Box<P>
184where
185 P::Item: fmt::Display,
186{
187 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
188 <P::Item as fmt::Display>::fmt(self, f)
189 }
190}
191
192impl<P: StaticPool> PartialEq for Box<P>
193where
194 P::Item: PartialEq,
195{
196 fn eq(&self, rhs: &Box<P>) -> bool {
197 <P::Item as PartialEq>::eq(self, rhs)
198 }
199}
200
201impl<P: StaticPool> Eq for Box<P> where P::Item: Eq {}
202
203impl<P: StaticPool> PartialOrd for Box<P>
204where
205 P::Item: PartialOrd,
206{
207 fn partial_cmp(&self, rhs: &Box<P>) -> Option<cmp::Ordering> {
208 <P::Item as PartialOrd>::partial_cmp(self, rhs)
209 }
210}
211
212impl<P: StaticPool> Ord for Box<P>
213where
214 P::Item: Ord,
215{
216 fn cmp(&self, rhs: &Box<P>) -> cmp::Ordering {
217 <P::Item as Ord>::cmp(self, rhs)
218 }
219}
220
221impl<P: StaticPool> Hash for Box<P>
222where
223 P::Item: Hash,
224{
225 fn hash<H>(&self, state: &mut H)
226 where
227 H: Hasher,
228 {
229 <P::Item as Hash>::hash(self, state)
230 }
231}
232
233macro_rules! pool {
234 ($vis:vis $name:ident: [$ty:ty; $size:expr]) => {
235 $vis struct $name;
236 impl StaticPool for $name {
237 type Item = $ty;
238 type Pool = BitPool<$ty, $size>;
239 fn get() -> &'static Self::Pool {
240 static POOL: BitPool<$ty, $size> = BitPool::new();
241 &POOL
242 }
243 }
244 };
245}