1 #![cfg_attr(feature = "const_fn", feature(const_mut_refs, const_fn_fn_ptr_basics))]
2 #![no_std]
3
4 #[cfg(test)]
5 #[macro_use]
6 extern crate std;
7
8 #[cfg(feature = "use_spin")]
9 extern crate spin;
10
11 extern crate alloc;
12
13 use core::alloc::{GlobalAlloc, Layout};
14 use core::cmp::{max, min};
15 use core::fmt;
16 use core::mem::size_of;
17 #[cfg(feature = "use_spin")]
18 use core::ops::Deref;
19 use core::ptr::NonNull;
20 #[cfg(feature = "use_spin")]
21 use spin::Mutex;
22
23 mod frame;
24 pub mod linked_list;
25 #[cfg(test)]
26 mod test;
27
28 pub use frame::*;
29
30 /// A heap that uses buddy system with configurable order.
31 ///
32 /// # Usage
33 ///
34 /// Create a heap and add a memory region to it:
35 /// ```
36 /// use buddy_system_allocator::*;
37 /// # use core::mem::size_of;
38 /// let mut heap = Heap::<32>::empty();
39 /// # let space: [usize; 100] = [0; 100];
40 /// # let begin: usize = space.as_ptr() as usize;
41 /// # let end: usize = begin + 100 * size_of::<usize>();
42 /// # let size: usize = 100 * size_of::<usize>();
43 /// unsafe {
44 /// heap.init(begin, size);
45 /// // or
46 /// heap.add_to_heap(begin, end);
47 /// }
48 /// ```
49 pub struct Heap<const ORDER: usize> {
50 // buddy system with max order of `ORDER`
51 free_list: [linked_list::LinkedList; ORDER],
52
53 // statistics
54 user: usize,
55 allocated: usize,
56 total: usize,
57 }
58
59 impl<const ORDER: usize> Heap<ORDER> {
60 /// Create an empty heap
new() -> Self61 pub const fn new() -> Self {
62 Heap {
63 free_list: [linked_list::LinkedList::new(); ORDER],
64 user: 0,
65 allocated: 0,
66 total: 0,
67 }
68 }
69
70 /// Create an empty heap
empty() -> Self71 pub const fn empty() -> Self {
72 Self::new()
73 }
74
75 /// Add a range of memory [start, end) to the heap
add_to_heap(&mut self, mut start: usize, mut end: usize)76 pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
77 // avoid unaligned access on some platforms
78 start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
79 end = end & (!size_of::<usize>() + 1);
80 assert!(start <= end);
81
82 let mut total = 0;
83 let mut current_start = start;
84
85 while current_start + size_of::<usize>() <= end {
86 let lowbit = current_start & (!current_start + 1);
87 let size = min(lowbit, prev_power_of_two(end - current_start));
88 total += size;
89
90 self.free_list[size.trailing_zeros() as usize].push(current_start as *mut usize);
91 current_start += size;
92 }
93
94 self.total += total;
95 }
96
97 /// Add a range of memory [start, start+size) to the heap
init(&mut self, start: usize, size: usize)98 pub unsafe fn init(&mut self, start: usize, size: usize) {
99 self.add_to_heap(start, start + size);
100 }
101
102 /// Alloc a range of memory from the heap satifying `layout` requirements
alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()>103 pub fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
104 let size = max(
105 layout.size().next_power_of_two(),
106 max(layout.align(), size_of::<usize>()),
107 );
108 let class = size.trailing_zeros() as usize;
109 for i in class..self.free_list.len() {
110 // Find the first non-empty size class
111 if !self.free_list[i].is_empty() {
112 // Split buffers
113 for j in (class + 1..i + 1).rev() {
114 if let Some(block) = self.free_list[j].pop() {
115 unsafe {
116 self.free_list[j - 1]
117 .push((block as usize + (1 << (j - 1))) as *mut usize);
118 self.free_list[j - 1].push(block);
119 }
120 } else {
121 return Err(());
122 }
123 }
124
125 let result = NonNull::new(
126 self.free_list[class]
127 .pop()
128 .expect("current block should have free space now")
129 as *mut u8,
130 );
131 if let Some(result) = result {
132 self.user += layout.size();
133 self.allocated += size;
134 return Ok(result);
135 } else {
136 return Err(());
137 }
138 }
139 }
140 Err(())
141 }
142
143 /// Dealloc a range of memory from the heap
dealloc(&mut self, ptr: NonNull<u8>, layout: Layout)144 pub fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
145 let size = max(
146 layout.size().next_power_of_two(),
147 max(layout.align(), size_of::<usize>()),
148 );
149 let class = size.trailing_zeros() as usize;
150
151 unsafe {
152 // Put back into free list
153 self.free_list[class].push(ptr.as_ptr() as *mut usize);
154
155 // Merge free buddy lists
156 let mut current_ptr = ptr.as_ptr() as usize;
157 let mut current_class = class;
158 while current_class < self.free_list.len() {
159 let buddy = current_ptr ^ (1 << current_class);
160 let mut flag = false;
161 for block in self.free_list[current_class].iter_mut() {
162 if block.value() as usize == buddy {
163 block.pop();
164 flag = true;
165 break;
166 }
167 }
168
169 // Free buddy found
170 if flag {
171 self.free_list[current_class].pop();
172 current_ptr = min(current_ptr, buddy);
173 current_class += 1;
174 self.free_list[current_class].push(current_ptr as *mut usize);
175 } else {
176 break;
177 }
178 }
179 }
180
181 self.user -= layout.size();
182 self.allocated -= size;
183 }
184
185 /// Return the number of bytes that user requests
stats_alloc_user(&self) -> usize186 pub fn stats_alloc_user(&self) -> usize {
187 self.user
188 }
189
190 /// Return the number of bytes that are actually allocated
stats_alloc_actual(&self) -> usize191 pub fn stats_alloc_actual(&self) -> usize {
192 self.allocated
193 }
194
195 /// Return the total number of bytes in the heap
stats_total_bytes(&self) -> usize196 pub fn stats_total_bytes(&self) -> usize {
197 self.total
198 }
199 }
200
201 impl<const ORDER: usize> fmt::Debug for Heap<ORDER> {
fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result202 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
203 fmt.debug_struct("Heap")
204 .field("user", &self.user)
205 .field("allocated", &self.allocated)
206 .field("total", &self.total)
207 .finish()
208 }
209 }
210
211 /// A locked version of `Heap`
212 ///
213 /// # Usage
214 ///
215 /// Create a locked heap and add a memory region to it:
216 /// ```
217 /// use buddy_system_allocator::*;
218 /// # use core::mem::size_of;
219 /// let mut heap = LockedHeap::<32>::new();
220 /// # let space: [usize; 100] = [0; 100];
221 /// # let begin: usize = space.as_ptr() as usize;
222 /// # let end: usize = begin + 100 * size_of::<usize>();
223 /// # let size: usize = 100 * size_of::<usize>();
224 /// unsafe {
225 /// heap.lock().init(begin, size);
226 /// // or
227 /// heap.lock().add_to_heap(begin, end);
228 /// }
229 /// ```
230 #[cfg(feature = "use_spin")]
231 pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
232
233 #[cfg(feature = "use_spin")]
234 impl<const ORDER: usize> LockedHeap<ORDER> {
235 /// Creates an empty heap
new() -> Self236 pub const fn new() -> Self {
237 LockedHeap(Mutex::new(Heap::<ORDER>::new()))
238 }
239
240 /// Creates an empty heap
empty() -> Self241 pub const fn empty() -> Self {
242 LockedHeap(Mutex::new(Heap::<ORDER>::new()))
243 }
244 }
245
246 #[cfg(feature = "use_spin")]
247 impl<const ORDER: usize> Deref for LockedHeap<ORDER> {
248 type Target = Mutex<Heap<ORDER>>;
249
deref(&self) -> &Self::Target250 fn deref(&self) -> &Self::Target {
251 &self.0
252 }
253 }
254
255 #[cfg(feature = "use_spin")]
256 unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
alloc(&self, layout: Layout) -> *mut u8257 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
258 self.0
259 .lock()
260 .alloc(layout)
261 .ok()
262 .map_or(0 as *mut u8, |allocation| allocation.as_ptr())
263 }
264
dealloc(&self, ptr: *mut u8, layout: Layout)265 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
266 self.0.lock().dealloc(NonNull::new_unchecked(ptr), layout)
267 }
268 }
269
270 /// A locked version of `Heap` with rescue before oom
271 ///
272 /// # Usage
273 ///
274 /// Create a locked heap:
275 /// ```
276 /// use buddy_system_allocator::*;
277 /// let heap = LockedHeapWithRescue::new(|heap: &mut Heap<32>, layout: &core::alloc::Layout| {});
278 /// ```
279 ///
280 /// Before oom, the allocator will try to call rescue function and try for one more time.
281 #[cfg(feature = "use_spin")]
282 pub struct LockedHeapWithRescue<const ORDER: usize> {
283 inner: Mutex<Heap<ORDER>>,
284 rescue: fn(&mut Heap<ORDER>, &Layout),
285 }
286
287 #[cfg(feature = "use_spin")]
288 impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
289 /// Creates an empty heap
290 #[cfg(feature = "const_fn")]
new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self291 pub const fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
292 LockedHeapWithRescue {
293 inner: Mutex::new(Heap::<ORDER>::new()),
294 rescue,
295 }
296 }
297
298 /// Creates an empty heap
299 #[cfg(not(feature = "const_fn"))]
new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self300 pub fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
301 LockedHeapWithRescue {
302 inner: Mutex::new(Heap::<ORDER>::new()),
303 rescue,
304 }
305 }
306 }
307
308 #[cfg(feature = "use_spin")]
309 impl<const ORDER: usize> Deref for LockedHeapWithRescue<ORDER> {
310 type Target = Mutex<Heap<ORDER>>;
311
deref(&self) -> &Self::Target312 fn deref(&self) -> &Self::Target {
313 &self.inner
314 }
315 }
316
317 #[cfg(feature = "use_spin")]
318 unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
alloc(&self, layout: Layout) -> *mut u8319 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
320 let mut inner = self.inner.lock();
321 match inner.alloc(layout) {
322 Ok(allocation) => allocation.as_ptr(),
323 Err(_) => {
324 (self.rescue)(&mut inner, &layout);
325 inner
326 .alloc(layout)
327 .ok()
328 .map_or(0 as *mut u8, |allocation| allocation.as_ptr())
329 }
330 }
331 }
332
dealloc(&self, ptr: *mut u8, layout: Layout)333 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
334 self.inner
335 .lock()
336 .dealloc(NonNull::new_unchecked(ptr), layout)
337 }
338 }
339
prev_power_of_two(num: usize) -> usize340 pub(crate) fn prev_power_of_two(num: usize) -> usize {
341 1 << (8 * (size_of::<usize>()) - num.leading_zeros() as usize - 1)
342 }
343