1#![no_std]
8
9use core::alloc::{GlobalAlloc, Layout};
10use core::cell::{RefCell, UnsafeCell};
11use core::ptr;
12use core::sync::atomic::{AtomicUsize, Ordering};
13
14use dlmalloc::{Allocator, Dlmalloc};
15use lock_api::{Mutex, RawMutex};
16
17pub struct StaticDlmalloc<R>(
20 SyncDlmalloc<R, SimpleDlmallocAllocatorWrapper<StaticDlmallocAllocator>>,
21);
22
23impl<R> StaticDlmalloc<R> {
24 pub const fn new_with_raw_mutex(raw_mutex: R, bounds: StaticHeapBounds) -> Self {
25 Self(SyncDlmalloc::new(
26 raw_mutex,
27 SimpleDlmallocAllocatorWrapper::new(StaticDlmallocAllocator::new(bounds)),
28 ))
29 }
30}
31
32impl<R: RawMutex> StaticDlmalloc<R> {
33 pub const fn new(bounds: StaticHeapBounds) -> Self {
34 Self::new_with_raw_mutex(R::INIT, bounds)
35 }
36}
37
38impl<R: RawMutex> StaticDlmalloc<R> {
39 #[allow(clippy::missing_safety_doc)]
40 pub unsafe fn raw_mutex(&self) -> &R {
41 self.0.raw_mutex()
42 }
43}
44
45unsafe impl<R: RawMutex> GlobalAlloc for StaticDlmalloc<R> {
46 #[inline]
47 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
48 self.0.alloc(layout)
49 }
50
51 #[inline]
52 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
53 self.0.alloc_zeroed(layout)
54 }
55
56 #[inline]
57 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
58 self.0.dealloc(ptr, layout)
59 }
60
61 #[inline]
62 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
63 self.0.realloc(ptr, layout, new_size)
64 }
65}
66
67pub struct DeferredStaticDlmalloc<R>(
68 SyncDlmalloc<
69 R,
70 SimpleDlmallocAllocatorWrapper<DeferredStaticDlmallocAllocator<StaticDlmallocAllocator>>,
71 >,
72);
73
74impl<R> DeferredStaticDlmalloc<R> {
75 pub const fn new_with_raw_mutex(raw_mutex: R) -> Self {
76 Self(SyncDlmalloc::new(
77 raw_mutex,
78 SimpleDlmallocAllocatorWrapper::new(DeferredStaticDlmallocAllocator::new()),
79 ))
80 }
81}
82
83impl<R: RawMutex> DeferredStaticDlmalloc<R> {
84 pub const fn new() -> Self {
85 Self::new_with_raw_mutex(R::INIT)
86 }
87}
88
89impl<R: RawMutex> Default for DeferredStaticDlmalloc<R> {
90 fn default() -> Self {
91 Self::new()
92 }
93}
94
95impl<R: RawMutex> DeferredStaticDlmalloc<R> {
96 #[allow(clippy::missing_safety_doc)]
97 pub unsafe fn raw_mutex(&self) -> &R {
98 self.0.raw_mutex()
99 }
100
101 pub fn set_bounds(&self, bounds: StaticHeapBounds) -> Result<(), BoundsAlreadySetError> {
102 self.0
103 .dlmalloc
104 .lock()
105 .allocator()
106 .0
107 .set(StaticDlmallocAllocator::new(bounds))
108 }
109}
110
111unsafe impl<R: RawMutex> GlobalAlloc for DeferredStaticDlmalloc<R> {
112 #[inline]
113 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
114 self.0.alloc(layout)
115 }
116
117 #[inline]
118 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
119 self.0.alloc_zeroed(layout)
120 }
121
122 #[inline]
123 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
124 self.0.dealloc(ptr, layout)
125 }
126
127 #[inline]
128 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
129 self.0.realloc(ptr, layout, new_size)
130 }
131}
132
133struct StaticDlmallocAllocator {
136 bounds: StaticHeapBounds,
137 watermark: AtomicUsize,
138}
139
140impl StaticDlmallocAllocator {
141 const fn new(bounds: StaticHeapBounds) -> Self {
142 Self {
143 bounds,
144 watermark: AtomicUsize::new(0),
145 }
146 }
147}
148
149impl SimpleDlmallocAllocator for StaticDlmallocAllocator {
150 fn alloc_simple(&self, size: usize) -> Option<*mut u8> {
151 let old_watermark = self
152 .watermark
153 .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |old_watermark| {
154 let new_watermark = old_watermark.checked_add(size)?;
155 if new_watermark > self.bounds.size() {
156 return None;
157 }
158 Some(new_watermark)
159 })
160 .ok()?;
161 Some(
162 self.bounds
163 .start()
164 .wrapping_offset(old_watermark.try_into().unwrap()),
165 )
166 }
167}
168
169struct DeferredStaticDlmallocAllocator<T> {
172 state: RefCell<Option<T>>,
173}
174
175#[derive(Debug, Copy, Clone, Eq, PartialEq)]
176pub struct BoundsAlreadySetError(());
177
178impl<T> DeferredStaticDlmallocAllocator<T> {
179 const fn new() -> Self {
180 Self {
181 state: RefCell::new(None),
182 }
183 }
184
185 fn set(&self, state: T) -> Result<(), BoundsAlreadySetError> {
186 let mut state_opt = self.state.borrow_mut();
187 if state_opt.is_none() {
188 *state_opt = Some(state);
189 Ok(())
190 } else {
191 Err(BoundsAlreadySetError(()))
192 }
193 }
194}
195
196impl<T: SimpleDlmallocAllocator> SimpleDlmallocAllocator for DeferredStaticDlmallocAllocator<T> {
197 fn alloc_simple(&self, size: usize) -> Option<*mut u8> {
198 self.state
199 .borrow()
200 .as_ref()
201 .and_then(|state| state.alloc_simple(size))
202 }
203}
204
205struct SyncDlmalloc<R, T> {
208 dlmalloc: Mutex<R, Dlmalloc<T>>,
209}
210
211impl<R, T> SyncDlmalloc<R, T> {
212 const fn new(raw_mutex: R, state: T) -> Self {
213 Self {
214 dlmalloc: Mutex::from_raw(raw_mutex, Dlmalloc::new_with_allocator(state)),
215 }
216 }
217}
218
219impl<R: RawMutex, T> SyncDlmalloc<R, T> {
220 #[allow(clippy::missing_safety_doc)]
221 unsafe fn raw_mutex(&self) -> &R {
222 self.dlmalloc.raw()
223 }
224}
225
226unsafe impl<R: RawMutex, T: Allocator> GlobalAlloc for SyncDlmalloc<R, T> {
227 #[inline]
228 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
229 self.dlmalloc.lock().malloc(layout.size(), layout.align())
230 }
231
232 #[inline]
233 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
234 self.dlmalloc.lock().calloc(layout.size(), layout.align())
235 }
236
237 #[inline]
238 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
239 self.dlmalloc
240 .lock()
241 .free(ptr, layout.size(), layout.align())
242 }
243
244 #[inline]
245 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
246 self.dlmalloc
247 .lock()
248 .realloc(ptr, layout.size(), layout.align(), new_size)
249 }
250}
251
252trait SimpleDlmallocAllocator: Send {
253 fn alloc_simple(&self, size: usize) -> Option<*mut u8>;
254}
255
256struct SimpleDlmallocAllocatorWrapper<T>(T);
257
258impl<T> SimpleDlmallocAllocatorWrapper<T> {
259 const fn new(inner: T) -> Self {
260 Self(inner)
261 }
262}
263
264unsafe impl<T: SimpleDlmallocAllocator> Allocator for SimpleDlmallocAllocatorWrapper<T> {
265 fn alloc(&self, size: usize) -> (*mut u8, usize, u32) {
266 match self.0.alloc_simple(size) {
267 Some(start) => (start, size, 0),
268 None => (ptr::null_mut(), 0, 0),
269 }
270 }
271
272 fn remap(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize, _can_move: bool) -> *mut u8 {
273 ptr::null_mut()
274 }
275
276 fn free_part(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize) -> bool {
277 false
278 }
279
280 fn free(&self, _ptr: *mut u8, _size: usize) -> bool {
281 false
282 }
283
284 fn can_release_part(&self, _flags: u32) -> bool {
285 false
286 }
287
288 fn allocates_zeros(&self) -> bool {
289 true
290 }
291
292 fn page_size(&self) -> usize {
293 4096
295 }
296}
297
298#[derive(Debug, Copy, Clone, Eq, PartialEq)]
301pub struct StaticHeapBounds {
302 ptr: *mut u8,
303 size: usize,
304}
305
306unsafe impl Send for StaticHeapBounds {}
307
308impl StaticHeapBounds {
309 pub const fn new(ptr: *mut u8, size: usize) -> Self {
310 Self { ptr, size }
311 }
312
313 pub const fn start(&self) -> *mut u8 {
314 self.ptr
315 }
316
317 pub fn end(&self) -> *mut u8 {
318 self.start()
319 .wrapping_offset(self.size().try_into().unwrap())
320 }
321
322 pub const fn size(&self) -> usize {
323 self.size
324 }
325}
326
327#[repr(C)]
328pub struct StaticHeap<const N: usize, A = ()> {
329 _alignment: [A; 0],
330 space: UnsafeCell<[u8; N]>,
331}
332
333unsafe impl<const N: usize, A> Sync for StaticHeap<N, A> {}
334
335impl<const N: usize, A> StaticHeap<N, A> {
336 pub const fn new() -> Self {
337 Self {
338 _alignment: [],
339 space: UnsafeCell::new([0; N]),
340 }
341 }
342
343 pub const fn bounds(&self) -> StaticHeapBounds {
344 StaticHeapBounds::new(self.space.get().cast(), N)
345 }
346}
347
348impl<const N: usize, A> Default for StaticHeap<N, A> {
349 fn default() -> Self {
350 Self::new()
351 }
352}