sel4_dlmalloc/
lib.rs

1//
2// Copyright 2023, Colias Group, LLC
3//
4// SPDX-License-Identifier: BSD-2-Clause
5//
6
7#![no_std]
8
9use core::alloc::{GlobalAlloc, Layout};
10use core::cell::UnsafeCell;
11use core::ptr;
12use core::sync::atomic::{AtomicUsize, Ordering};
13
14use dlmalloc::{Allocator, Dlmalloc};
15use lock_api::{Mutex, RawMutex};
16
17// TODO implement core::alloc::Allocator for StaticDlmalloc once stable
18
19pub struct StaticDlmalloc<R>(
20    SyncDlmalloc<R, SimpleDlmallocAllocatorWrapper<StaticDlmallocAllocator>>,
21);
22
23impl<R> StaticDlmalloc<R> {
24    pub const fn new_with_raw_mutex(raw_mutex: R, bounds: StaticHeapBounds) -> Self {
25        Self(SyncDlmalloc::new(
26            raw_mutex,
27            SimpleDlmallocAllocatorWrapper::new(StaticDlmallocAllocator::new(bounds)),
28        ))
29    }
30}
31
32impl<R: RawMutex> StaticDlmalloc<R> {
33    pub const fn new(bounds: StaticHeapBounds) -> Self {
34        Self::new_with_raw_mutex(R::INIT, bounds)
35    }
36}
37
38impl<R: RawMutex> StaticDlmalloc<R> {
39    #[allow(clippy::missing_safety_doc)]
40    pub unsafe fn raw_mutex(&self) -> &R {
41        unsafe { self.0.raw_mutex() }
42    }
43}
44
45unsafe impl<R: RawMutex> GlobalAlloc for StaticDlmalloc<R> {
46    #[inline]
47    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
48        unsafe { self.0.alloc(layout) }
49    }
50
51    #[inline]
52    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
53        unsafe { self.0.alloc_zeroed(layout) }
54    }
55
56    #[inline]
57    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
58        unsafe { self.0.dealloc(ptr, layout) }
59    }
60
61    #[inline]
62    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
63        unsafe { self.0.realloc(ptr, layout, new_size) }
64    }
65}
66
67pub struct DeferredStaticDlmalloc<R>(
68    SyncDlmalloc<
69        R,
70        SimpleDlmallocAllocatorWrapper<DeferredStaticDlmallocAllocator<StaticDlmallocAllocator>>,
71    >,
72);
73
74impl<R> DeferredStaticDlmalloc<R> {
75    pub const fn new_with_raw_mutex(raw_mutex: R) -> Self {
76        Self(SyncDlmalloc::new(
77            raw_mutex,
78            SimpleDlmallocAllocatorWrapper::new(DeferredStaticDlmallocAllocator::new()),
79        ))
80    }
81}
82
83impl<R: RawMutex> DeferredStaticDlmalloc<R> {
84    pub const fn new() -> Self {
85        Self::new_with_raw_mutex(R::INIT)
86    }
87}
88
89impl<R: RawMutex> Default for DeferredStaticDlmalloc<R> {
90    fn default() -> Self {
91        Self::new()
92    }
93}
94
95impl<R: RawMutex> DeferredStaticDlmalloc<R> {
96    #[allow(clippy::missing_safety_doc)]
97    pub unsafe fn raw_mutex(&self) -> &R {
98        unsafe { self.0.raw_mutex() }
99    }
100
101    pub fn set_bounds(&self, bounds: StaticHeapBounds) -> Result<(), BoundsAlreadySetError> {
102        self.0
103            .dlmalloc
104            .lock()
105            .allocator_mut()
106            .0
107            .set(StaticDlmallocAllocator::new(bounds))
108    }
109}
110
111unsafe impl<R: RawMutex> GlobalAlloc for DeferredStaticDlmalloc<R> {
112    #[inline]
113    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
114        unsafe { self.0.alloc(layout) }
115    }
116
117    #[inline]
118    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
119        unsafe { self.0.alloc_zeroed(layout) }
120    }
121
122    #[inline]
123    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
124        unsafe { self.0.dealloc(ptr, layout) }
125    }
126
127    #[inline]
128    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
129        unsafe { self.0.realloc(ptr, layout, new_size) }
130    }
131}
132
133// // //
134
135struct StaticDlmallocAllocator {
136    bounds: StaticHeapBounds,
137    watermark: AtomicUsize,
138}
139
140impl StaticDlmallocAllocator {
141    const fn new(bounds: StaticHeapBounds) -> Self {
142        Self {
143            bounds,
144            watermark: AtomicUsize::new(0),
145        }
146    }
147}
148
149impl SimpleDlmallocAllocator for StaticDlmallocAllocator {
150    fn alloc_simple(&self, size: usize) -> Option<*mut u8> {
151        let old_watermark = self
152            .watermark
153            .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |old_watermark| {
154                let new_watermark = old_watermark.checked_add(size)?;
155                if new_watermark > self.bounds.size() {
156                    return None;
157                }
158                Some(new_watermark)
159            })
160            .ok()?;
161        Some(
162            self.bounds
163                .start()
164                .wrapping_offset(old_watermark.try_into().unwrap()),
165        )
166    }
167}
168
169struct DeferredStaticDlmallocAllocator<T> {
170    state: Option<T>,
171}
172
173#[derive(Debug, Copy, Clone, Eq, PartialEq)]
174pub struct BoundsAlreadySetError(());
175
176impl<T> DeferredStaticDlmallocAllocator<T> {
177    const fn new() -> Self {
178        Self { state: None }
179    }
180
181    fn set(&mut self, state: T) -> Result<(), BoundsAlreadySetError> {
182        if self.state.is_none() {
183            self.state = Some(state);
184            Ok(())
185        } else {
186            Err(BoundsAlreadySetError(()))
187        }
188    }
189}
190
191impl<T: SimpleDlmallocAllocator> SimpleDlmallocAllocator for DeferredStaticDlmallocAllocator<T> {
192    fn alloc_simple(&self, size: usize) -> Option<*mut u8> {
193        self.state
194            .as_ref()
195            .and_then(|state| state.alloc_simple(size))
196    }
197}
198
199// // //
200
201struct SyncDlmalloc<R, T> {
202    dlmalloc: Mutex<R, Dlmalloc<T>>,
203}
204
205impl<R, T> SyncDlmalloc<R, T> {
206    const fn new(raw_mutex: R, state: T) -> Self {
207        Self {
208            dlmalloc: Mutex::from_raw(raw_mutex, Dlmalloc::new_with_allocator(state)),
209        }
210    }
211}
212
213impl<R: RawMutex, T> SyncDlmalloc<R, T> {
214    #[allow(clippy::missing_safety_doc)]
215    unsafe fn raw_mutex(&self) -> &R {
216        unsafe { self.dlmalloc.raw() }
217    }
218}
219
220unsafe impl<R: RawMutex, T: Allocator> GlobalAlloc for SyncDlmalloc<R, T> {
221    #[inline]
222    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
223        unsafe { self.dlmalloc.lock().malloc(layout.size(), layout.align()) }
224    }
225
226    #[inline]
227    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
228        unsafe { self.dlmalloc.lock().calloc(layout.size(), layout.align()) }
229    }
230
231    #[inline]
232    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
233        unsafe {
234            self.dlmalloc
235                .lock()
236                .free(ptr, layout.size(), layout.align())
237        }
238    }
239
240    #[inline]
241    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
242        unsafe {
243            self.dlmalloc
244                .lock()
245                .realloc(ptr, layout.size(), layout.align(), new_size)
246        }
247    }
248}
249
250trait SimpleDlmallocAllocator: Send {
251    fn alloc_simple(&self, size: usize) -> Option<*mut u8>;
252}
253
254struct SimpleDlmallocAllocatorWrapper<T>(T);
255
256impl<T> SimpleDlmallocAllocatorWrapper<T> {
257    const fn new(inner: T) -> Self {
258        Self(inner)
259    }
260}
261
262unsafe impl<T: SimpleDlmallocAllocator> Allocator for SimpleDlmallocAllocatorWrapper<T> {
263    fn alloc(&self, size: usize) -> (*mut u8, usize, u32) {
264        match self.0.alloc_simple(size) {
265            Some(start) => (start, size, 0),
266            None => (ptr::null_mut(), 0, 0),
267        }
268    }
269
270    fn remap(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize, _can_move: bool) -> *mut u8 {
271        ptr::null_mut()
272    }
273
274    fn free_part(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize) -> bool {
275        false
276    }
277
278    fn free(&self, _ptr: *mut u8, _size: usize) -> bool {
279        false
280    }
281
282    fn can_release_part(&self, _flags: u32) -> bool {
283        false
284    }
285
286    fn allocates_zeros(&self) -> bool {
287        true
288    }
289
290    fn page_size(&self) -> usize {
291        // TODO should depend on configuration
292        4096
293    }
294}
295
296// // //
297
298#[derive(Debug, Copy, Clone, Eq, PartialEq)]
299pub struct StaticHeapBounds {
300    ptr: *mut u8,
301    size: usize,
302}
303
304unsafe impl Send for StaticHeapBounds {}
305
306impl StaticHeapBounds {
307    pub const fn new(ptr: *mut u8, size: usize) -> Self {
308        Self { ptr, size }
309    }
310
311    pub const fn start(&self) -> *mut u8 {
312        self.ptr
313    }
314
315    pub fn end(&self) -> *mut u8 {
316        self.start()
317            .wrapping_offset(self.size().try_into().unwrap())
318    }
319
320    pub const fn size(&self) -> usize {
321        self.size
322    }
323}
324
325#[repr(C)]
326pub struct StaticHeap<const N: usize, A = ()> {
327    _alignment: [A; 0],
328    space: UnsafeCell<[u8; N]>,
329}
330
331unsafe impl<const N: usize, A> Sync for StaticHeap<N, A> {}
332
333impl<const N: usize, A> StaticHeap<N, A> {
334    pub const fn new() -> Self {
335        Self {
336            _alignment: [],
337            space: UnsafeCell::new([0; N]),
338        }
339    }
340
341    pub const fn bounds(&self) -> StaticHeapBounds {
342        StaticHeapBounds::new(self.space.get().cast(), N)
343    }
344}
345
346impl<const N: usize, A> Default for StaticHeap<N, A> {
347    fn default() -> Self {
348        Self::new()
349    }
350}