2019-10-24 00:30:47 +00:00
|
|
|
use alloc::alloc::{GlobalAlloc, Layout};
|
|
|
|
use core::ptr::null_mut;
|
2020-03-27 22:45:12 +00:00
|
|
|
use fixed_size_block::FixedSizeBlockAllocator;
|
2019-10-24 00:30:47 +00:00
|
|
|
use x86_64::{
|
|
|
|
structures::paging::{
|
|
|
|
mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
|
|
|
|
},
|
|
|
|
VirtAddr,
|
|
|
|
};
|
|
|
|
|
2020-03-03 21:38:09 +00:00
|
|
|
pub mod bump;
|
|
|
|
pub mod fixed_size_block;
|
2020-03-26 15:06:17 +00:00
|
|
|
pub mod linked_list;
|
2020-03-03 21:38:09 +00:00
|
|
|
|
2020-03-27 22:45:12 +00:00
|
|
|
pub const HEAP_START: usize = 0x_4444_4444_0000;
|
|
|
|
pub const HEAP_SIZE: usize = 4 * 1024 * 1024; // 4 MiB
|
2020-03-03 21:38:09 +00:00
|
|
|
|
|
|
|
#[global_allocator]
|
2020-03-26 15:06:17 +00:00
|
|
|
static ALLOCATOR: Locked<FixedSizeBlockAllocator> = Locked::new(FixedSizeBlockAllocator::new());
|
2020-03-03 21:38:09 +00:00
|
|
|
|
2019-10-24 00:30:47 +00:00
|
|
|
pub fn init_heap(
|
|
|
|
mapper: &mut impl Mapper<Size4KiB>,
|
|
|
|
frame_allocator: &mut impl FrameAllocator<Size4KiB>,
|
2020-03-27 21:11:09 +00:00
|
|
|
) -> Result<(), MapToError<Size4KiB>> {
|
2019-10-24 00:30:47 +00:00
|
|
|
let page_range = {
|
|
|
|
let heap_start = VirtAddr::new(HEAP_START as u64);
|
|
|
|
let heap_end = heap_start + HEAP_SIZE - 1u64;
|
|
|
|
let heap_start_page = Page::containing_address(heap_start);
|
|
|
|
let heap_end_page = Page::containing_address(heap_end);
|
|
|
|
Page::range_inclusive(heap_start_page, heap_end_page)
|
|
|
|
};
|
|
|
|
|
|
|
|
for page in page_range {
|
|
|
|
let frame = frame_allocator
|
|
|
|
.allocate_frame()
|
|
|
|
.ok_or(MapToError::FrameAllocationFailed)?;
|
|
|
|
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
|
2020-03-27 21:11:09 +00:00
|
|
|
mapper.map_to(page, frame, flags, frame_allocator)?.flush();
|
2019-10-24 00:30:47 +00:00
|
|
|
}
|
|
|
|
|
2020-03-27 22:45:12 +00:00
|
|
|
unsafe {
|
|
|
|
ALLOCATOR.lock().init(HEAP_START, HEAP_SIZE);
|
|
|
|
}
|
|
|
|
|
2019-10-24 00:30:47 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-03-27 22:45:12 +00:00
|
|
|
pub struct Dummy;
|
2019-10-24 14:43:38 +00:00
|
|
|
|
2020-03-27 22:45:12 +00:00
|
|
|
unsafe impl GlobalAlloc for Dummy {
|
|
|
|
unsafe fn alloc(&self, _layout: Layout) -> *mut u8 {
|
|
|
|
null_mut()
|
|
|
|
}
|
2019-10-24 00:30:47 +00:00
|
|
|
|
2020-03-27 22:45:12 +00:00
|
|
|
unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
|
|
|
|
panic!("dealloc should be never called")
|
|
|
|
}
|
2019-10-24 00:30:47 +00:00
|
|
|
}
|
2020-03-03 21:38:09 +00:00
|
|
|
|
|
|
|
/// A wrapper around spin::Mutex to permit trait implementations.
|
|
|
|
pub struct Locked<A> {
|
|
|
|
inner: spin::Mutex<A>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<A> Locked<A> {
|
|
|
|
pub const fn new(inner: A) -> Self {
|
|
|
|
Locked {
|
|
|
|
inner: spin::Mutex::new(inner),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn lock(&self) -> spin::MutexGuard<A> {
|
|
|
|
self.inner.lock()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Align the given address `addr` upwards to alignment `align`.
|
|
|
|
///
|
|
|
|
/// Requires that `align` is a power of two.
|
|
|
|
fn align_up(addr: usize, align: usize) -> usize {
|
|
|
|
(addr + align - 1) & !(align - 1)
|
|
|
|
}
|