aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--StrixKernel/src/allocator/fixed_size_block.rs109
1 files changed, 109 insertions, 0 deletions
diff --git a/StrixKernel/src/allocator/fixed_size_block.rs b/StrixKernel/src/allocator/fixed_size_block.rs
new file mode 100644
index 0000000..790cc65
--- /dev/null
+++ b/StrixKernel/src/allocator/fixed_size_block.rs
@@ -0,0 +1,109 @@
+use super::Locked;
+use alloc::alloc::{GlobalAlloc, Layout};
+use core::{
+ mem,
+ ptr::{self, NonNull},
+};
+
+/// The block sizes to use.
+///
+/// The sizes must each be power of 2 because they are also used as
+/// the block alignment (alignments must be always powers of 2).
+const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048];
+
+/// Choose an appropriate block size for the given layout.
+///
+/// Returns an index into the `BLOCK_SIZES` array.
+fn list_index(layout: &Layout) -> Option<usize> {
+ let required_block_size = layout.size().max(layout.align());
+ BLOCK_SIZES.iter().position(|&s| s >= required_block_size)
+}
+
+struct ListNode {
+ next: Option<&'static mut ListNode>,
+}
+
+pub struct FixedSizeBlockAllocator {
+ list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()],
+ fallback_allocator: linked_list_allocator::Heap,
+}
+
+impl FixedSizeBlockAllocator {
+ /// Creates an empty FixedSizeBlockAllocator.
+ pub const fn new() -> Self {
+ const EMPTY: Option<&'static mut ListNode> = None;
+ FixedSizeBlockAllocator {
+ list_heads: [EMPTY; BLOCK_SIZES.len()],
+ fallback_allocator: linked_list_allocator::Heap::empty(),
+ }
+ }
+
+ /// Initialize the allocator with the given heap bounds.
+ ///
+ /// This function is unsafe because the caller must guarantee that the given
+ /// heap bounds are valid and that the heap is unused. This method must be
+ /// called only once.
+ pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
+ unsafe {
+ self.fallback_allocator.init(heap_start, heap_size);
+ }
+ }
+
+ /// Allocates using the fallback allocator.
+ fn fallback_alloc(&mut self, layout: Layout) -> *mut u8 {
+ match self.fallback_allocator.allocate_first_fit(layout) {
+ Ok(ptr) => ptr.as_ptr(),
+ Err(_) => ptr::null_mut(),
+ }
+ }
+}
+
+unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
+ unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
+ let mut allocator = self.lock();
+ match list_index(&layout) {
+ Some(index) => {
+ match allocator.list_heads[index].take() {
+ Some(node) => {
+ allocator.list_heads[index] = node.next.take();
+ node as *mut ListNode as *mut u8
+ }
+ None => {
+ // no block exists in list => allocate new block
+ let block_size = BLOCK_SIZES[index];
+ // only works if all block sizes are a power of 2
+ let block_align = block_size;
+ let layout = Layout::from_size_align(block_size, block_align).unwrap();
+ allocator.fallback_alloc(layout)
+ }
+ }
+ }
+ None => allocator.fallback_alloc(layout),
+ }
+ }
+
+ unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
+ let mut allocator = self.lock();
+ match list_index(&layout) {
+ Some(index) => {
+ let new_node = ListNode {
+ next: allocator.list_heads[index].take(),
+ };
+ // verify that block has size and alignment required for storing node
+ assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
+ assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
+ let new_node_ptr = ptr as *mut ListNode;
+ unsafe {
+ new_node_ptr.write(new_node);
+ allocator.list_heads[index] = Some(&mut *new_node_ptr);
+ }
+ }
+ None => {
+ let ptr = NonNull::new(ptr).unwrap();
+ unsafe {
+ allocator.fallback_allocator.deallocate(ptr, layout);
+ }
+ }
+ }
+ }
+} \ No newline at end of file