1#![no_std]
8
9use core::marker::PhantomData;
10use core::num::Wrapping;
11use core::sync::atomic::{AtomicU32, Ordering};
12
13use zerocopy::{FromBytes, IntoBytes};
14
15use sel4_shared_memory::{map_field, SharedMemoryPtr, SharedMemoryRef};
16
17pub mod roles;
18
19use roles::{Read, RingBufferRole, RingBufferRoleValue, RingBuffersRole, Write};
20
21mod descriptor;
22
23pub use descriptor::Descriptor;
24
25pub const RING_BUFFER_SIZE: usize = 512;
31
32#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
33pub struct PeerMisbehaviorError(());
34
35impl PeerMisbehaviorError {
36 fn new() -> Self {
37 Self(())
38 }
39}
40
41pub struct RingBuffers<'a, R: RingBuffersRole, F, T = Descriptor> {
42 free: RingBuffer<'a, R::FreeRole, T>,
43 used: RingBuffer<'a, R::UsedRole, T>,
44 notify: F,
45}
46
47impl<'a, R: RingBuffersRole, F, T: Copy> RingBuffers<'a, R, F, T> {
48 pub fn new(
49 free: RingBuffer<'a, R::FreeRole, T>,
50 used: RingBuffer<'a, R::UsedRole, T>,
51 notify: F,
52 ) -> Self {
53 Self { free, used, notify }
54 }
55
56 pub fn from_ptrs_using_default_initialization_strategy_for_role(
57 free: SharedMemoryRef<'a, RawRingBuffer<T>>,
58 used: SharedMemoryRef<'a, RawRingBuffer<T>>,
59 notify: F,
60 ) -> Self {
61 let initialization_strategy = R::default_initialization_strategy();
62 Self::new(
63 RingBuffer::new(free, initialization_strategy),
64 RingBuffer::new(used, initialization_strategy),
65 notify,
66 )
67 }
68
69 pub fn free(&self) -> &RingBuffer<'a, R::FreeRole, T> {
70 &self.free
71 }
72
73 pub fn used(&self) -> &RingBuffer<'a, R::UsedRole, T> {
74 &self.used
75 }
76
77 pub fn free_mut(&mut self) -> &mut RingBuffer<'a, R::FreeRole, T> {
78 &mut self.free
79 }
80
81 pub fn used_mut(&mut self) -> &mut RingBuffer<'a, R::UsedRole, T> {
82 &mut self.used
83 }
84}
85
86impl<U, R: RingBuffersRole, F: Fn() -> U, T> RingBuffers<'_, R, F, T> {
87 pub fn notify(&self) -> U {
88 (self.notify)()
89 }
90}
91
92impl<U, R: RingBuffersRole, F: FnMut() -> U, T> RingBuffers<'_, R, F, T> {
93 pub fn notify_mut(&mut self) -> U {
94 (self.notify)()
95 }
96}
97
98#[repr(C)]
99#[derive(Debug)]
100pub struct RawRingBuffer<T = Descriptor> {
101 pub write_index: AtomicU32,
102 pub read_index: AtomicU32,
103 pub descriptors: [T; RING_BUFFER_SIZE],
104}
105
106pub struct RingBuffer<'a, R: RingBufferRole, T = Descriptor> {
107 inner: SharedMemoryRef<'a, RawRingBuffer<T>>,
108 stored_write_index: Wrapping<u32>,
109 stored_read_index: Wrapping<u32>,
110 _phantom: PhantomData<R>,
111}
112
113impl<'a, R: RingBufferRole, T: Copy> RingBuffer<'a, R, T> {
114 const SIZE: usize = RING_BUFFER_SIZE;
115
116 pub fn new(
117 ptr: SharedMemoryRef<'a, RawRingBuffer<T>>,
118 initialization_strategy: InitializationStrategy,
119 ) -> Self {
120 let mut inner = ptr;
121 let initial_state = match initialization_strategy {
122 InitializationStrategy::ReadState => {
123 let ptr = inner.as_ptr();
124 InitialState {
125 write_index: map_field!(ptr.write_index).read().into_inner(),
126 read_index: map_field!(ptr.read_index).read().into_inner(),
127 }
128 }
129 InitializationStrategy::UseState(initial_state) => initial_state,
130 InitializationStrategy::UseAndWriteState(initial_state) => {
131 let ptr = inner.as_mut_ptr();
132 map_field!(ptr.write_index).write(initial_state.write_index.into());
133 map_field!(ptr.read_index).write(initial_state.read_index.into());
134 initial_state
135 }
136 };
137 Self {
138 inner,
139 stored_write_index: Wrapping(initial_state.write_index),
140 stored_read_index: Wrapping(initial_state.read_index),
141 _phantom: PhantomData,
142 }
143 }
144
145 const fn role(&self) -> RingBufferRoleValue {
146 R::ROLE
147 }
148
149 pub const fn capacity(&self) -> usize {
150 Self::SIZE - 1
151 }
152
153 fn write_index(&mut self) -> SharedMemoryPtr<'_, AtomicU32> {
154 let ptr = self.inner.as_mut_ptr();
155 map_field!(ptr.write_index)
156 }
157
158 fn read_index(&mut self) -> SharedMemoryPtr<'_, AtomicU32> {
159 let ptr = self.inner.as_mut_ptr();
160 map_field!(ptr.read_index)
161 }
162
163 fn descriptor(&mut self, index: Wrapping<u32>) -> SharedMemoryPtr<'_, T> {
164 let residue = self.residue(index);
165 let ptr = self.inner.as_mut_ptr();
166 map_field!(ptr.descriptors).as_slice().index(residue)
167 }
168
169 fn update_stored_write_index(&mut self) -> Result<(), PeerMisbehaviorError> {
170 debug_assert!(self.role().is_read());
171 let observed_write_index = Wrapping(self.write_index().read().into_inner());
172 let observed_num_filled_slots = self.residue(observed_write_index - self.stored_read_index);
173 if observed_num_filled_slots < self.stored_num_filled_slots() {
174 return Err(PeerMisbehaviorError::new());
175 }
176 self.stored_write_index = observed_write_index;
177 Ok(())
178 }
179
180 fn update_stored_read_index(&mut self) -> Result<(), PeerMisbehaviorError> {
181 debug_assert!(self.role().is_write());
182 let observed_read_index = Wrapping(self.read_index().read().into_inner());
183 let observed_num_filled_slots = self.residue(self.stored_write_index - observed_read_index);
184 if observed_num_filled_slots > self.stored_num_filled_slots() {
185 return Err(PeerMisbehaviorError::new());
186 }
187 self.stored_read_index = observed_read_index;
188 Ok(())
189 }
190
191 fn stored_num_filled_slots(&mut self) -> usize {
192 self.residue(self.stored_write_index - self.stored_read_index)
193 }
194
195 pub fn num_filled_slots(&mut self) -> Result<usize, PeerMisbehaviorError> {
196 match self.role() {
197 RingBufferRoleValue::Read => self.update_stored_write_index(),
198 RingBufferRoleValue::Write => self.update_stored_read_index(),
199 }?;
200 Ok(self.stored_num_filled_slots())
201 }
202
203 pub fn num_empty_slots(&mut self) -> Result<usize, PeerMisbehaviorError> {
204 Ok(self.capacity() - self.num_filled_slots()?)
205 }
206
207 pub fn is_empty(&mut self) -> Result<bool, PeerMisbehaviorError> {
208 Ok(self.num_filled_slots()? == 0)
209 }
210
211 pub fn is_full(&mut self) -> Result<bool, PeerMisbehaviorError> {
212 Ok(self.num_empty_slots()? == 0)
213 }
214
215 fn residue(&self, index: Wrapping<u32>) -> usize {
216 usize::try_from(index.0).unwrap() % Self::SIZE
217 }
218}
219
220impl<T: Copy + FromBytes + IntoBytes> RingBuffer<'_, Write, T> {
221 pub fn enqueue_and_commit(&mut self, desc: T) -> Result<Result<(), T>, PeerMisbehaviorError> {
222 self.enqueue(desc, true)
223 }
224
225 pub fn enqueue_without_committing(
226 &mut self,
227 desc: T,
228 ) -> Result<Result<(), T>, PeerMisbehaviorError> {
229 self.enqueue(desc, false)
230 }
231
232 pub fn enqueue(
233 &mut self,
234 desc: T,
235 commit: bool,
236 ) -> Result<Result<(), T>, PeerMisbehaviorError> {
237 if self.is_full()? {
238 return Ok(Err(desc));
239 }
240 self.force_enqueue(desc, commit);
241 Ok(Ok(()))
242 }
243
244 pub fn force_enqueue(&mut self, desc: T, commit: bool) {
245 self.descriptor(self.stored_write_index).write(desc);
246 self.stored_write_index += 1;
247 if commit {
248 self.commit();
249 }
250 }
251
252 pub fn commit(&mut self) {
253 self.expose_write_index();
254 }
255
256 fn expose_write_index(&mut self) {
257 let write_index = self.stored_write_index.0;
258 self.write_index()
259 .atomic_store(write_index, Ordering::Release);
260 }
261}
262
263impl<T: Copy + FromBytes + IntoBytes> RingBuffer<'_, Read, T> {
264 pub fn dequeue(&mut self) -> Result<Option<T>, PeerMisbehaviorError> {
265 if self.is_empty()? {
266 return Ok(None);
267 }
268 Ok(Some(self.force_dequeue()))
269 }
270
271 pub fn force_dequeue(&mut self) -> T {
272 let desc = self.descriptor(self.stored_read_index).read();
273 self.stored_read_index += 1;
274 self.expose_read_index();
275 desc
276 }
277
278 fn expose_read_index(&mut self) {
279 let read_index = self.stored_read_index.0;
280 self.read_index()
281 .atomic_store(read_index, Ordering::Release);
282 }
283}
284
285#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
286pub enum InitializationStrategy {
287 ReadState,
288 UseState(InitialState),
289 UseAndWriteState(InitialState),
290}
291
292impl Default for InitializationStrategy {
293 fn default() -> Self {
294 Self::ReadState
295 }
296}
297
298#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Default)]
299pub struct InitialState {
300 write_index: u32,
301 read_index: u32,
302}
303
304impl InitialState {
305 pub fn new(write_index: u32, read_index: u32) -> Self {
306 Self {
307 write_index,
308 read_index,
309 }
310 }
311}