kernel/src/kernel_heap.rs

100 lines
3.7 KiB
Rust

use crate::{println, virtual_memory::KERNEL_SPACE};
use core::{
alloc::{GlobalAlloc, Layout},
ptr::{self, NonNull},
sync::atomic::{AtomicUsize, Ordering},
};
use humansize::{SizeFormatter, BINARY};
use linked_list_allocator::hole::HoleList;
use spin::Mutex;
use x86_64::structures::paging::PageTableFlags;
pub struct Heap {
hole_list: Mutex<HoleList>,
bytes_used: AtomicUsize,
pmem_size: AtomicUsize,
}
impl Heap {
pub fn print_stats(&self) {
println!(
"[HEAP] {} currenly allocated",
SizeFormatter::new(self.bytes_used.load(Ordering::Relaxed), BINARY)
);
println!(
"[HEAP] {} physical memory used for heap",
SizeFormatter::new(self.pmem_size.load(Ordering::Relaxed), BINARY)
);
}
}
unsafe impl Send for Heap {}
unsafe impl Sync for Heap {}
unsafe impl GlobalAlloc for Heap {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut locked_self = self.hole_list.lock();
let (ptr, true_layout) = locked_self
.allocate_first_fit(layout)
.map_or_else(|()| {
drop(locked_self);
let bytes_added = layout.size().next_multiple_of(4096).saturating_mul(2) & !(0xFFF);
let num_pages = bytes_added / 4096;
self.pmem_size.fetch_add(bytes_added, Ordering::Relaxed);
let Ok(ptr) = KERNEL_SPACE.lock().map_free(num_pages, PageTableFlags::empty()) else {
return (ptr::null_mut(), layout);
};
#[expect(clippy::unwrap_used, reason = "
from_size_align requires align to be a nonzero power of two, which it is.
Also, size must be less than isize when rounded up to a multiple of align.
Since size is already a multiple of align, no overflow will occur.
")]
let layout = Layout::from_size_align(bytes_added, 4096).unwrap();
unsafe { self.hole_list.lock().deallocate(NonNull::new_unchecked(ptr), layout) };
#[expect(clippy::expect_used, reason = "
The deallocate call should have given enough space to complete the allocation.
Thus, if this allocation fails it is a bug in the allocator, which should be treated as fatal.
")]
self.hole_list
.lock()
.allocate_first_fit(layout)
.map(|(allocation, true_layout)| (allocation.as_ptr(), true_layout))
.expect("Failed to allocate after adding memory to the heap")
}, |(allocation, true_layout)| (allocation.as_ptr(), true_layout));
if ptr.is_null() {
return ptr::null_mut();
}
#[expect(
clippy::arithmetic_side_effects,
reason = "align cannot be 0, so the subtraction cannot underflow"
)]
{
assert!((ptr.expose_provenance() & (layout.align() - 1)) == 0);
}
self.bytes_used.fetch_add(true_layout.size(), Ordering::Relaxed);
ptr
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.bytes_used.fetch_sub(layout.size(), Ordering::Relaxed);
unsafe { self.hole_list.lock().deallocate(NonNull::new_unchecked(ptr), layout) };
}
}
#[global_allocator]
pub static HEAP: Heap = Heap {
hole_list: Mutex::new(HoleList::empty()),
bytes_used: AtomicUsize::new(0),
pmem_size: AtomicUsize::new(0),
};
#[alloc_error_handler]
#[expect(
clippy::panic,
reason = "A panic is our only real choice here as this function must diverge"
)]
fn alloc_error_handler(layout: Layout) -> ! {
panic!("allocation error: {:?}", layout)
}