Avoid multiple lock in heap allocator

This commit is contained in:
Yuke Peng
2024-09-10 14:44:47 +08:00
committed by Tate, Hongliang Tian
parent e995ed3ebc
commit f26c7a01cd

View File

@ -11,13 +11,12 @@ use super::paddr_to_vaddr;
use crate::{ use crate::{
mm::{page::allocator::PAGE_ALLOCATOR, PAGE_SIZE}, mm::{page::allocator::PAGE_ALLOCATOR, PAGE_SIZE},
prelude::*, prelude::*,
sync::SpinLock, sync::{LocalIrqDisabled, SpinLock, SpinLockGuard},
trap::disable_local,
Error, Error,
}; };
#[global_allocator] #[global_allocator]
static HEAP_ALLOCATOR: LockedHeapWithRescue = LockedHeapWithRescue::new(rescue); static HEAP_ALLOCATOR: LockedHeapWithRescue = LockedHeapWithRescue::new();
#[alloc_error_handler] #[alloc_error_handler]
pub fn handle_alloc_error(layout: core::alloc::Layout) -> ! { pub fn handle_alloc_error(layout: core::alloc::Layout) -> ! {
@ -40,16 +39,12 @@ pub fn init() {
struct LockedHeapWithRescue { struct LockedHeapWithRescue {
heap: Once<SpinLock<Heap>>, heap: Once<SpinLock<Heap>>,
rescue: fn(&Self, &Layout) -> Result<()>,
} }
impl LockedHeapWithRescue { impl LockedHeapWithRescue {
/// Creates an new heap /// Creates an new heap
pub const fn new(rescue: fn(&Self, &Layout) -> Result<()>) -> Self { pub const fn new() -> Self {
Self { Self { heap: Once::new() }
heap: Once::new(),
rescue,
}
} }
/// SAFETY: The range [start, start + size) must be a valid memory region. /// SAFETY: The range [start, start + size) must be a valid memory region.
@ -57,41 +52,25 @@ impl LockedHeapWithRescue {
self.heap self.heap
.call_once(|| SpinLock::new(Heap::new(start as usize, size))); .call_once(|| SpinLock::new(Heap::new(start as usize, size)));
} }
/// SAFETY: The range [start, start + size) must be a valid memory region.
unsafe fn add_to_heap(&self, start: usize, size: usize) {
self.heap
.get()
.unwrap()
.disable_irq()
.lock()
.add_memory(start, size);
}
} }
unsafe impl GlobalAlloc for LockedHeapWithRescue { unsafe impl GlobalAlloc for LockedHeapWithRescue {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 { unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let _guard = disable_local(); let mut heap = self.heap.get().unwrap().disable_irq().lock();
if let Ok(allocation) = self.heap.get().unwrap().lock().allocate(layout) { if let Ok(allocation) = heap.allocate(layout) {
return allocation as *mut u8; return allocation as *mut u8;
} }
// Avoid locking self.heap when calling rescue. // Avoid locking self.heap when calling rescue.
if (self.rescue)(self, &layout).is_err() { if rescue(&mut heap, &layout).is_err() {
return core::ptr::null_mut::<u8>(); return core::ptr::null_mut::<u8>();
} }
let res = self heap.allocate(layout)
.heap
.get()
.unwrap()
.lock()
.allocate(layout)
.map_or(core::ptr::null_mut::<u8>(), |allocation| { .map_or(core::ptr::null_mut::<u8>(), |allocation| {
allocation as *mut u8 allocation as *mut u8
}); })
res
} }
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
@ -105,7 +84,7 @@ unsafe impl GlobalAlloc for LockedHeapWithRescue {
} }
} }
fn rescue(heap: &LockedHeapWithRescue, layout: &Layout) -> Result<()> { fn rescue(heap: &mut SpinLockGuard<Heap, LocalIrqDisabled>, layout: &Layout) -> Result<()> {
const MIN_NUM_FRAMES: usize = 0x4000000 / PAGE_SIZE; // 64MB const MIN_NUM_FRAMES: usize = 0x4000000 / PAGE_SIZE; // 64MB
debug!("enlarge heap, layout = {:?}", layout); debug!("enlarge heap, layout = {:?}", layout);
@ -142,7 +121,7 @@ fn rescue(heap: &LockedHeapWithRescue, layout: &Layout) -> Result<()> {
vaddr, vaddr,
PAGE_SIZE * num_frames PAGE_SIZE * num_frames
); );
heap.add_to_heap(vaddr, PAGE_SIZE * num_frames); heap.add_memory(vaddr, PAGE_SIZE * num_frames);
} }
Ok(()) Ok(())