aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNatasha Moongrave <natasha@256phi.eu>2026-04-01 20:04:09 +0200
committerNatasha Moongrave <natasha@256phi.eu>2026-04-01 20:04:09 +0200
commitbd87767255c784156deb19ea166826ab78a023fb (patch)
tree1a7c238c67d4250494685e036146c950ce506fe8
parent4a1014593cd628098e80f15782a4cb51ce267cd4 (diff)
Added all new implemented allocators to allocator.rs
-rw-r--r--StrixKernel/src/allocator.rs32
1 files changed, 30 insertions, 2 deletions
diff --git a/StrixKernel/src/allocator.rs b/StrixKernel/src/allocator.rs
index 7792666..4bae6bd 100644
--- a/StrixKernel/src/allocator.rs
+++ b/StrixKernel/src/allocator.rs
@@ -1,6 +1,6 @@
use alloc::alloc::{GlobalAlloc, Layout};
use core::ptr::null_mut;
-use linked_list_allocator::LockedHeap;
+use fixed_size_block::FixedSizeBlockAllocator;
use x86_64::{
VirtAddr,
structures::paging::{
@@ -8,11 +8,15 @@ use x86_64::{
},
};
+pub mod bump;
+pub mod fixed_size_block;
+pub mod linked_list;
+
pub const HEAP_START: usize = 0x_4444_4444_0000;
pub const HEAP_SIZE: usize = 100 * 1024; // 100 KiB
#[global_allocator]
-static ALLOCATOR: LockedHeap = LockedHeap::empty();
+static ALLOCATOR: Locked<FixedSizeBlockAllocator> = Locked::new(FixedSizeBlockAllocator::new());
pub fn init_heap(
mapper: &mut impl Mapper<Size4KiB>,
@@ -51,4 +55,28 @@ unsafe impl GlobalAlloc for Dummy {
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
panic!("dealloc should be never called")
}
+}
+
+/// A wrapper around spin::Mutex to permit trait implementations.
+pub struct Locked<A> {
+ inner: spin::Mutex<A>,
+}
+
+impl<A> Locked<A> {
+ pub const fn new(inner: A) -> Self {
+ Locked {
+ inner: spin::Mutex::new(inner),
+ }
+ }
+
+ pub fn lock(&self) -> spin::MutexGuard<A> {
+ self.inner.lock()
+ }
+}
+
+/// Align the given address `addr` upwards to alignment `align`.
+///
+/// Requires that `align` is a power of two.
+fn align_up(addr: usize, align: usize) -> usize {
+ (addr + align - 1) & !(align - 1)
} \ No newline at end of file