Implement rescue for heap allocator

This commit is contained in:
Jianfeng Jiang
2023-07-18 18:54:35 +08:00
committed by Tate, Hongliang Tian
parent 9137ef434f
commit c452fc35c6
4 changed files with 93 additions and 16 deletions

View File

@ -4,7 +4,7 @@ use log::Level;
pub const USER_STACK_SIZE: usize = PAGE_SIZE * 4;
pub const KERNEL_STACK_SIZE: usize = PAGE_SIZE * 64;
pub const KERNEL_HEAP_SIZE: usize = 0x5_000_000;
pub const KERNEL_HEAP_SIZE: usize = PAGE_SIZE * 256;
pub const KERNEL_OFFSET: usize = 0xffffffff80000000;

View File

@ -14,6 +14,7 @@
#![feature(const_ops)]
#![feature(generators)]
#![feature(iter_from_generator)]
#![feature(const_mut_refs)]
extern crate alloc;

View File

@ -1,13 +1,13 @@
use alloc::vec::Vec;
use buddy_system_allocator::FrameAllocator;
use log::info;
use spin::{Mutex, Once};
use spin::Once;
use crate::config::PAGE_SIZE;
use crate::{config::PAGE_SIZE, sync::SpinLock};
use super::{frame::VmFrameFlags, MemoryRegions, MemoryRegionsType, VmFrame};
static FRAME_ALLOCATOR: Once<Mutex<FrameAllocator>> = Once::new();
pub(super) static FRAME_ALLOCATOR: Once<SpinLock<FrameAllocator>> = Once::new();
pub(crate) fn alloc(flags: VmFrameFlags) -> Option<VmFrame> {
FRAME_ALLOCATOR
@ -71,5 +71,5 @@ pub(crate) fn init(regions: &Vec<MemoryRegions>) {
);
}
}
FRAME_ALLOCATOR.call_once(|| Mutex::new(allocator));
FRAME_ALLOCATOR.call_once(|| SpinLock::new(allocator));
}

View File

@ -1,12 +1,21 @@
use crate::{config::KERNEL_HEAP_SIZE, sync::SpinLock};
use crate::config::{KERNEL_HEAP_SIZE, PAGE_SIZE};
use crate::prelude::*;
use crate::sync::SpinLock;
use crate::trap::disable_local;
use crate::vm::frame_allocator::FRAME_ALLOCATOR;
use crate::Error;
use align_ext::AlignExt;
use buddy_system_allocator::Heap;
use core::{
alloc::{GlobalAlloc, Layout},
ptr::NonNull,
};
use log::debug;
use super::paddr_to_vaddr;
#[global_allocator]
static HEAP_ALLOCATOR: LockedHeap<32> = LockedHeap::new();
static HEAP_ALLOCATOR: LockedHeapWithRescue<32> = LockedHeapWithRescue::new(rescue);
#[alloc_error_handler]
pub fn handle_alloc_error(layout: core::alloc::Layout) -> ! {
@ -22,32 +31,99 @@ pub fn init() {
}
}
struct LockedHeap<const ORDER: usize>(SpinLock<Heap<ORDER>>);
struct LockedHeapWithRescue<const ORDER: usize> {
heap: SpinLock<Heap<ORDER>>,
rescue: fn(&Self, &Layout) -> Result<()>,
}
impl<const ORDER: usize> LockedHeap<ORDER> {
impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
/// Creates an new heap
pub const fn new() -> Self {
LockedHeap(SpinLock::new(Heap::<ORDER>::new()))
pub const fn new(rescue: fn(&Self, &Layout) -> Result<()>) -> Self {
Self {
heap: SpinLock::new(Heap::<ORDER>::new()),
rescue,
}
}
/// Safety: The range [start, start + size) must be a valid memory region.
pub unsafe fn init(&self, start: *const u8, size: usize) {
self.0.lock_irq_disabled().init(start as usize, size);
self.heap.lock_irq_disabled().init(start as usize, size);
}
/// Safety: The range [start, start + size) must be a valid memory region.
unsafe fn add_to_heap(&self, start: usize, size: usize) {
self.heap
.lock_irq_disabled()
.add_to_heap(start, start + size)
}
}
unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.0
.lock_irq_disabled()
let _guard = disable_local();
if let Ok(allocation) = self.heap.lock().alloc(layout) {
return allocation.as_ptr();
}
// Avoid locking self.heap when calling rescue.
if (self.rescue)(&self, &layout).is_err() {
return 0 as *mut u8;
}
self.heap
.lock()
.alloc(layout)
.map_or(0 as *mut u8, |allocation| allocation.as_ptr())
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
debug_assert!(ptr as usize != 0);
self.0
self.heap
.lock_irq_disabled()
.dealloc(NonNull::new_unchecked(ptr), layout)
}
}
fn rescue<const ORDER: usize>(heap: &LockedHeapWithRescue<ORDER>, layout: &Layout) -> Result<()> {
const MIN_NUM_FRAMES: usize = 0x4000000 / PAGE_SIZE; // 64MB
debug!("enlarge heap, layout = {:?}", layout);
let mut num_frames = {
let align = PAGE_SIZE.max(layout.align());
debug_assert!(align % PAGE_SIZE == 0);
let size = layout.size().align_up(align);
size / PAGE_SIZE
};
let allocation_start = {
let mut frame_allocator = FRAME_ALLOCATOR.get().unwrap().lock();
if num_frames >= MIN_NUM_FRAMES {
frame_allocator.alloc(num_frames).ok_or(Error::NoMemory)?
} else {
match frame_allocator.alloc(MIN_NUM_FRAMES) {
None => frame_allocator.alloc(num_frames).ok_or(Error::NoMemory)?,
Some(start) => {
num_frames = MIN_NUM_FRAMES;
start
}
}
}
};
// FIXME: the alloc function internally allocates heap memory(inside FrameAllocator).
// So if the heap is nearly run out, allocating frame will fail too.
let vaddr = paddr_to_vaddr(allocation_start * PAGE_SIZE);
// Safety: the frame is allocated from FramAllocator and never be deallocated,
// so the addr is always valid.
unsafe {
debug!(
"add frames to heap: addr = 0x{:x}, size = 0x{:x}",
vaddr,
PAGE_SIZE * num_frames
);
heap.add_to_heap(vaddr, PAGE_SIZE * num_frames);
}
Ok(())
}