Skip to main content

proka_kernel/memory/
allocator.rs

1//! Heap allocator module
2//!
3//! This module implements the heap allocator for the kernel.
4//! It uses the `linked_list_allocator` crate to manage heap memory.
5
6use crate::config::KERNEL_DEFAULT_HEAP_SIZE;
7use talc::{ClaimOnOom, Span, Talc, Talck};
8use x86_64::{
9    structures::paging::{
10        mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
11    },
12    VirtAddr,
13};
14
15/// The starting virtual address of the heap
16pub const HEAP_START: usize = 0x_4444_4444_0000;
17
18#[global_allocator]
19pub static ALLOCATOR: Talck<spin::Mutex<()>, ClaimOnOom> = Talc::new(unsafe {
20    // if we're in a hosted environment, the Rust runtime may allocate before
21    // main() is called, so we need to initialize the arena automatically
22    ClaimOnOom::new(Span::empty())
23})
24.lock();
25
26/// Initialize the heap
27///
28/// This function maps the heap memory region and initializes the global allocator.
29///
30/// # Arguments
31/// * `mapper` - The page table mapper
32/// * `frame_allocator` - The frame allocator
33///
34/// # Returns
35/// * `Ok(())` on success
36/// * `Err(MapToError)` on failure
37pub fn init_heap(
38    mapper: &mut impl Mapper<Size4KiB>,
39    frame_allocator: &mut impl FrameAllocator<Size4KiB>,
40) -> Result<(), MapToError<Size4KiB>> {
41    let heap_start = VirtAddr::new(HEAP_START as u64);
42    let heap_end = heap_start + KERNEL_DEFAULT_HEAP_SIZE;
43
44    let page_range = {
45        let heap_start_page = Page::containing_address(heap_start);
46        let heap_end_page = Page::containing_address(heap_end - 1u64);
47        Page::range_inclusive(heap_start_page, heap_end_page)
48    };
49
50    for page in page_range {
51        let frame = frame_allocator
52            .allocate_frame()
53            .ok_or(MapToError::FrameAllocationFailed)?;
54        let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
55        unsafe {
56            mapper.map_to(page, frame, flags, frame_allocator)?.flush();
57        }
58    }
59
60    unsafe {
61        ALLOCATOR
62            .lock()
63            .claim(Span::new(
64                heap_start.as_mut_ptr::<u8>(),
65                heap_end.as_mut_ptr::<u8>(),
66            ))
67            .expect("Failed to claim heap region");
68    }
69
70    Ok(())
71}