sel4_newlib/
heap.rs
1use core::cell::UnsafeCell;
8use core::ffi::{c_int, c_void};
9use core::sync::atomic::{AtomicIsize, Ordering};
10
11use sel4_panicking_env::abort;
12
13use crate::errno;
14
15#[repr(align(4096))] struct BackingMemory<const N: usize>(UnsafeCell<[u8; N]>);
18
19unsafe impl<const N: usize> Sync for BackingMemory<N> {}
20
21impl<const N: usize> BackingMemory<N> {
22 const fn new() -> Self {
23 Self(UnsafeCell::new([0; N]))
24 }
25
26 const fn start(&self) -> *mut u8 {
27 self.0.get().cast()
28 }
29
30 const fn size(&self) -> usize {
31 N
32 }
33}
34
35#[doc(hidden)]
36pub struct StaticHeap<const N: usize> {
37 memory: BackingMemory<N>,
38 watermark: AtomicIsize,
39}
40
41unsafe impl<const N: usize> Sync for StaticHeap<N> {}
42
43impl<const N: usize> StaticHeap<N> {
44 pub const fn new() -> Self {
45 Self {
46 memory: BackingMemory::new(),
47 watermark: AtomicIsize::new(0),
48 }
49 }
50
51 pub fn sbrk(&self, incr: c_int) -> *mut c_void {
53 #[cfg(feature = "log")]
54 {
55 log::trace!("_sbrk({})", incr);
56 }
57 let incr = incr.try_into().unwrap_or_else(|_| abort!());
58 let old = self.watermark.fetch_add(incr, Ordering::SeqCst);
59 let new = old + incr;
60 if new < 0 {
61 abort!("program break below data segment start")
62 }
63 if new > self.memory.size().try_into().unwrap_or_else(|_| abort!()) {
64 self.watermark.fetch_sub(incr, Ordering::SeqCst);
65 errno::set_errno(errno::values::ENOMEM);
66 return usize::MAX as *mut c_void;
67 }
68 self.memory.start().wrapping_offset(old).cast::<c_void>()
69 }
70}
71
72impl<const N: usize> Default for StaticHeap<N> {
73 fn default() -> Self {
74 Self::new()
75 }
76}
77
78#[macro_export]
79macro_rules! declare_sbrk_with_static_heap {
80 ($n:expr) => {
81 #[no_mangle]
82 extern "C" fn _sbrk(incr: core::ffi::c_int) -> *mut core::ffi::c_void {
83 static HEAP: $crate::StaticHeap<{ $n }> = $crate::StaticHeap::new();
84 HEAP.sbrk(incr)
85 }
86 };
87}