sel4_newlib/
heap.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
//
// Copyright 2023, Colias Group, LLC
//
// SPDX-License-Identifier: BSD-2-Clause
//

use core::cell::UnsafeCell;
use core::ffi::{c_int, c_void};
use core::sync::atomic::{AtomicIsize, Ordering};

use sel4_panicking_env::abort;

use crate::errno;

// NOTE(rustc_wishlist) use SyncUnsafeCell once #![feature(sync_unsafe_cell)] stabilizes
#[repr(align(4096))] // no real reason for this
struct BackingMemory<const N: usize>(UnsafeCell<[u8; N]>);

unsafe impl<const N: usize> Sync for BackingMemory<N> {}

impl<const N: usize> BackingMemory<N> {
    const fn new() -> Self {
        Self(UnsafeCell::new([0; N]))
    }

    const fn start(&self) -> *mut u8 {
        self.0.get().cast()
    }

    const fn size(&self) -> usize {
        N
    }
}

#[doc(hidden)]
pub struct StaticHeap<const N: usize> {
    memory: BackingMemory<N>,
    watermark: AtomicIsize,
}

unsafe impl<const N: usize> Sync for StaticHeap<N> {}

impl<const N: usize> StaticHeap<N> {
    pub const fn new() -> Self {
        Self {
            memory: BackingMemory::new(),
            watermark: AtomicIsize::new(0),
        }
    }

    // TODO handle overflowing atomic
    pub fn sbrk(&self, incr: c_int) -> *mut c_void {
        #[cfg(feature = "log")]
        {
            log::trace!("_sbrk({})", incr);
        }
        let incr = incr.try_into().unwrap_or_else(|_| abort!());
        let old = self.watermark.fetch_add(incr, Ordering::SeqCst);
        let new = old + incr;
        if new < 0 {
            abort!("program break below data segment start")
        }
        if new > self.memory.size().try_into().unwrap_or_else(|_| abort!()) {
            self.watermark.fetch_sub(incr, Ordering::SeqCst);
            errno::set_errno(errno::values::ENOMEM);
            return usize::MAX as *mut c_void;
        }
        self.memory.start().wrapping_offset(old).cast::<c_void>()
    }
}

impl<const N: usize> Default for StaticHeap<N> {
    fn default() -> Self {
        Self::new()
    }
}

#[macro_export]
macro_rules! declare_sbrk_with_static_heap {
    ($n:expr) => {
        #[no_mangle]
        extern "C" fn _sbrk(incr: core::ffi::c_int) -> *mut core::ffi::c_void {
            static HEAP: $crate::StaticHeap<{ $n }> = $crate::StaticHeap::new();
            HEAP.sbrk(incr)
        }
    };
}