sel4_abstract_allocator/
basic.rs

1//
2// Copyright 2023, Colias Group, LLC
3//
4// SPDX-License-Identifier: BSD-2-Clause
5//
6
7use alloc::collections::BTreeMap;
8use core::alloc::Layout;
9use core::ops::Range;
10
11use crate::{AbstractAllocator, AbstractAllocatorAllocation};
12
13// TODO
14// This is basically just a free list. Should use a more efficient implementation.
15
16// NOTE
17// Once #![feature(allocator_api)] and #![feature(btreemap_alloc)] land, this should be
18// parameterized with an allocator A, to enable this type to be used without a global allocator.
19
20// NOTE
21// #![feature(btree_cursors)] would make this stand-in implementation simpler and more efficient.
22// See git history.
23
24const DEFAULT_GRANULE_SIZE: usize = 512;
25
26type Offset = usize;
27type Size = usize;
28
29pub struct BasicAllocator {
30    granule_size: usize,
31    holes: BTreeMap<Offset, Size>,
32}
33
34impl BasicAllocator {
35    pub fn new(size: Size) -> Self {
36        Self::with_granule_size(size, DEFAULT_GRANULE_SIZE)
37    }
38
39    pub fn with_granule_size(size: usize, granule_size: usize) -> Self {
40        assert_eq!(size % granule_size, 0);
41        let mut holes = BTreeMap::new();
42        holes.insert(0, size);
43        Self {
44            granule_size,
45            holes,
46        }
47    }
48
49    fn granule_size(&self) -> usize {
50        self.granule_size
51    }
52}
53
54impl AbstractAllocator for BasicAllocator {
55    type AllocationError = InsufficientResources;
56
57    type Allocation = Allocation;
58
59    fn allocate(&mut self, orig_layout: Layout) -> Result<Self::Allocation, Self::AllocationError> {
60        let layout = Layout::from_size_align(
61            orig_layout.size().next_multiple_of(self.granule_size()),
62            orig_layout.align().max(self.granule_size()),
63        )
64        .unwrap();
65
66        let (buffer_offset, hole_offset, hole_size) = self
67            .holes
68            .iter()
69            .find_map(|(&hole_offset, &hole_size)| {
70                let buffer_offset = hole_offset.next_multiple_of(layout.align());
71                if buffer_offset + layout.size() <= hole_offset + hole_size {
72                    Some((buffer_offset, hole_offset, hole_size))
73                } else {
74                    None
75                }
76            })
77            .ok_or(InsufficientResources::new())?;
78
79        self.holes.remove(&hole_offset).unwrap();
80
81        if hole_offset < buffer_offset {
82            self.holes.insert(hole_offset, buffer_offset - hole_offset);
83        }
84
85        let hole_end_offset = hole_offset + hole_size;
86        let buffer_end_offset = buffer_offset + layout.size();
87
88        if buffer_end_offset < hole_end_offset {
89            self.holes
90                .insert(buffer_end_offset, hole_end_offset - buffer_end_offset);
91        }
92
93        Ok(Allocation::new(
94            buffer_offset..(buffer_offset + orig_layout.size()),
95        ))
96    }
97
98    fn deallocate(&mut self, allocation: Self::Allocation) {
99        let range = allocation.range();
100        let offset = range.start;
101        let size = range.len();
102
103        assert_eq!(offset % self.granule_size(), 0);
104        let size = size.next_multiple_of(self.granule_size());
105
106        let holes = self
107            .holes
108            .range(..&offset)
109            .next_back()
110            .map(copy_typle_fields)
111            .map(|prev_hole| {
112                (
113                    prev_hole,
114                    self.holes.range(&offset..).next().map(copy_typle_fields),
115                )
116            });
117
118        let mut island = true;
119
120        if let Some(((prev_hole_offset, prev_hole_size), next_hole)) = holes {
121            assert!(prev_hole_offset + prev_hole_size <= offset);
122            let adjacent_to_prev = prev_hole_offset + prev_hole_size == offset;
123            if adjacent_to_prev {
124                island = false;
125                *self.holes.get_mut(&prev_hole_offset).unwrap() += size;
126            }
127            if let Some((next_hole_offset, next_hole_size)) = next_hole {
128                assert!(offset + size <= next_hole_offset);
129                let adjacent_to_next = offset + size == next_hole_offset;
130                if adjacent_to_next {
131                    island = false;
132                    self.holes.remove(&next_hole_offset).unwrap();
133                    if adjacent_to_prev {
134                        *self.holes.get_mut(&prev_hole_offset).unwrap() += next_hole_size;
135                    } else {
136                        self.holes.insert(offset, size + next_hole_size);
137                    }
138                }
139            }
140        }
141
142        if island {
143            self.holes.insert(offset, size);
144        }
145    }
146}
147
148pub struct Allocation(Range<usize>);
149
150impl Allocation {
151    fn new(range: Range<usize>) -> Self {
152        Self(range)
153    }
154}
155
156impl AbstractAllocatorAllocation for Allocation {
157    fn range(&self) -> Range<usize> {
158        self.0.clone()
159    }
160}
161
162#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
163pub struct InsufficientResources(());
164
165impl InsufficientResources {
166    fn new() -> Self {
167        Self(())
168    }
169}
170
171fn copy_typle_fields<T: Copy, U: Copy>((&t, &u): (&T, &U)) -> (T, U) {
172    (t, u)
173}