1#![no_std]
8
9use core::alloc::{GlobalAlloc, Layout};
10use core::cell::{RefCell, UnsafeCell};
11use core::ptr;
12use core::sync::atomic::{AtomicUsize, Ordering};
13
14use dlmalloc::{Allocator, Dlmalloc};
15use lock_api::{Mutex, RawMutex};
16
17pub struct StaticDlmalloc<R>(
20 SyncDlmalloc<R, SimpleDlmallocAllocatorWrapper<StaticDlmallocAllocator>>,
21);
22
23impl<R> StaticDlmalloc<R> {
24 pub const fn new_with_raw_mutex(raw_mutex: R, bounds: StaticHeapBounds) -> Self {
25 Self(SyncDlmalloc::new(
26 raw_mutex,
27 SimpleDlmallocAllocatorWrapper::new(StaticDlmallocAllocator::new(bounds)),
28 ))
29 }
30}
31
32impl<R: RawMutex> StaticDlmalloc<R> {
33 pub const fn new(bounds: StaticHeapBounds) -> Self {
34 Self::new_with_raw_mutex(R::INIT, bounds)
35 }
36}
37
38impl<R: RawMutex> StaticDlmalloc<R> {
39 #[allow(clippy::missing_safety_doc)]
40 pub unsafe fn raw_mutex(&self) -> &R {
41 unsafe { self.0.raw_mutex() }
42 }
43}
44
45unsafe impl<R: RawMutex> GlobalAlloc for StaticDlmalloc<R> {
46 #[inline]
47 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
48 unsafe { self.0.alloc(layout) }
49 }
50
51 #[inline]
52 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
53 unsafe { self.0.alloc_zeroed(layout) }
54 }
55
56 #[inline]
57 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
58 unsafe { self.0.dealloc(ptr, layout) }
59 }
60
61 #[inline]
62 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
63 unsafe { self.0.realloc(ptr, layout, new_size) }
64 }
65}
66
67pub struct DeferredStaticDlmalloc<R>(
68 SyncDlmalloc<
69 R,
70 SimpleDlmallocAllocatorWrapper<DeferredStaticDlmallocAllocator<StaticDlmallocAllocator>>,
71 >,
72);
73
74impl<R> DeferredStaticDlmalloc<R> {
75 pub const fn new_with_raw_mutex(raw_mutex: R) -> Self {
76 Self(SyncDlmalloc::new(
77 raw_mutex,
78 SimpleDlmallocAllocatorWrapper::new(DeferredStaticDlmallocAllocator::new()),
79 ))
80 }
81}
82
83impl<R: RawMutex> DeferredStaticDlmalloc<R> {
84 pub const fn new() -> Self {
85 Self::new_with_raw_mutex(R::INIT)
86 }
87}
88
89impl<R: RawMutex> Default for DeferredStaticDlmalloc<R> {
90 fn default() -> Self {
91 Self::new()
92 }
93}
94
95impl<R: RawMutex> DeferredStaticDlmalloc<R> {
96 #[allow(clippy::missing_safety_doc)]
97 pub unsafe fn raw_mutex(&self) -> &R {
98 unsafe { self.0.raw_mutex() }
99 }
100
101 pub fn set_bounds(&self, bounds: StaticHeapBounds) -> Result<(), BoundsAlreadySetError> {
102 self.0
103 .dlmalloc
104 .lock()
105 .allocator()
106 .0
107 .set(StaticDlmallocAllocator::new(bounds))
108 }
109}
110
111unsafe impl<R: RawMutex> GlobalAlloc for DeferredStaticDlmalloc<R> {
112 #[inline]
113 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
114 unsafe { self.0.alloc(layout) }
115 }
116
117 #[inline]
118 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
119 unsafe { self.0.alloc_zeroed(layout) }
120 }
121
122 #[inline]
123 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
124 unsafe { self.0.dealloc(ptr, layout) }
125 }
126
127 #[inline]
128 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
129 unsafe { self.0.realloc(ptr, layout, new_size) }
130 }
131}
132
133struct StaticDlmallocAllocator {
136 bounds: StaticHeapBounds,
137 watermark: AtomicUsize,
138}
139
140impl StaticDlmallocAllocator {
141 const fn new(bounds: StaticHeapBounds) -> Self {
142 Self {
143 bounds,
144 watermark: AtomicUsize::new(0),
145 }
146 }
147}
148
149impl SimpleDlmallocAllocator for StaticDlmallocAllocator {
150 fn alloc_simple(&self, size: usize) -> Option<*mut u8> {
151 let old_watermark = self
152 .watermark
153 .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |old_watermark| {
154 let new_watermark = old_watermark.checked_add(size)?;
155 if new_watermark > self.bounds.size() {
156 return None;
157 }
158 Some(new_watermark)
159 })
160 .ok()?;
161 Some(
162 self.bounds
163 .start()
164 .wrapping_offset(old_watermark.try_into().unwrap()),
165 )
166 }
167}
168
169struct DeferredStaticDlmallocAllocator<T> {
172 state: RefCell<Option<T>>,
173}
174
175#[derive(Debug, Copy, Clone, Eq, PartialEq)]
176pub struct BoundsAlreadySetError(());
177
178impl<T> DeferredStaticDlmallocAllocator<T> {
179 const fn new() -> Self {
180 Self {
181 state: RefCell::new(None),
182 }
183 }
184
185 fn set(&self, state: T) -> Result<(), BoundsAlreadySetError> {
186 let mut state_opt = self.state.borrow_mut();
187 if state_opt.is_none() {
188 *state_opt = Some(state);
189 Ok(())
190 } else {
191 Err(BoundsAlreadySetError(()))
192 }
193 }
194}
195
196impl<T: SimpleDlmallocAllocator> SimpleDlmallocAllocator for DeferredStaticDlmallocAllocator<T> {
197 fn alloc_simple(&self, size: usize) -> Option<*mut u8> {
198 self.state
199 .borrow()
200 .as_ref()
201 .and_then(|state| state.alloc_simple(size))
202 }
203}
204
205struct SyncDlmalloc<R, T> {
208 dlmalloc: Mutex<R, Dlmalloc<T>>,
209}
210
211impl<R, T> SyncDlmalloc<R, T> {
212 const fn new(raw_mutex: R, state: T) -> Self {
213 Self {
214 dlmalloc: Mutex::from_raw(raw_mutex, Dlmalloc::new_with_allocator(state)),
215 }
216 }
217}
218
219impl<R: RawMutex, T> SyncDlmalloc<R, T> {
220 #[allow(clippy::missing_safety_doc)]
221 unsafe fn raw_mutex(&self) -> &R {
222 unsafe { self.dlmalloc.raw() }
223 }
224}
225
226unsafe impl<R: RawMutex, T: Allocator> GlobalAlloc for SyncDlmalloc<R, T> {
227 #[inline]
228 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
229 unsafe { self.dlmalloc.lock().malloc(layout.size(), layout.align()) }
230 }
231
232 #[inline]
233 unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
234 unsafe { self.dlmalloc.lock().calloc(layout.size(), layout.align()) }
235 }
236
237 #[inline]
238 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
239 unsafe {
240 self.dlmalloc
241 .lock()
242 .free(ptr, layout.size(), layout.align())
243 }
244 }
245
246 #[inline]
247 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
248 unsafe {
249 self.dlmalloc
250 .lock()
251 .realloc(ptr, layout.size(), layout.align(), new_size)
252 }
253 }
254}
255
256trait SimpleDlmallocAllocator: Send {
257 fn alloc_simple(&self, size: usize) -> Option<*mut u8>;
258}
259
260struct SimpleDlmallocAllocatorWrapper<T>(T);
261
262impl<T> SimpleDlmallocAllocatorWrapper<T> {
263 const fn new(inner: T) -> Self {
264 Self(inner)
265 }
266}
267
268unsafe impl<T: SimpleDlmallocAllocator> Allocator for SimpleDlmallocAllocatorWrapper<T> {
269 fn alloc(&self, size: usize) -> (*mut u8, usize, u32) {
270 match self.0.alloc_simple(size) {
271 Some(start) => (start, size, 0),
272 None => (ptr::null_mut(), 0, 0),
273 }
274 }
275
276 fn remap(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize, _can_move: bool) -> *mut u8 {
277 ptr::null_mut()
278 }
279
280 fn free_part(&self, _ptr: *mut u8, _oldsize: usize, _newsize: usize) -> bool {
281 false
282 }
283
284 fn free(&self, _ptr: *mut u8, _size: usize) -> bool {
285 false
286 }
287
288 fn can_release_part(&self, _flags: u32) -> bool {
289 false
290 }
291
292 fn allocates_zeros(&self) -> bool {
293 true
294 }
295
296 fn page_size(&self) -> usize {
297 4096
299 }
300}
301
302#[derive(Debug, Copy, Clone, Eq, PartialEq)]
305pub struct StaticHeapBounds {
306 ptr: *mut u8,
307 size: usize,
308}
309
310unsafe impl Send for StaticHeapBounds {}
311
312impl StaticHeapBounds {
313 pub const fn new(ptr: *mut u8, size: usize) -> Self {
314 Self { ptr, size }
315 }
316
317 pub const fn start(&self) -> *mut u8 {
318 self.ptr
319 }
320
321 pub fn end(&self) -> *mut u8 {
322 self.start()
323 .wrapping_offset(self.size().try_into().unwrap())
324 }
325
326 pub const fn size(&self) -> usize {
327 self.size
328 }
329}
330
331#[repr(C)]
332pub struct StaticHeap<const N: usize, A = ()> {
333 _alignment: [A; 0],
334 space: UnsafeCell<[u8; N]>,
335}
336
337unsafe impl<const N: usize, A> Sync for StaticHeap<N, A> {}
338
339impl<const N: usize, A> StaticHeap<N, A> {
340 pub const fn new() -> Self {
341 Self {
342 _alignment: [],
343 space: UnsafeCell::new([0; N]),
344 }
345 }
346
347 pub const fn bounds(&self) -> StaticHeapBounds {
348 StaticHeapBounds::new(self.space.get().cast(), N)
349 }
350}
351
352impl<const N: usize, A> Default for StaticHeap<N, A> {
353 fn default() -> Self {
354 Self::new()
355 }
356}