From 04f16dd1ce4e5bffe58d60b6c56b7c70228f69f0 Mon Sep 17 00:00:00 2001 From: Sainnhe Park Date: Thu, 13 Jan 2022 16:31:49 +0800 Subject: [PATCH] Allocator Designs --- src/allocator.rs | 28 +++++++- src/allocator/bump.rs | 56 +++++++++++++++ src/allocator/fixed_size_block.rs | 86 +++++++++++++++++++++++ src/allocator/linked_list.rs | 113 ++++++++++++++++++++++++++++++ src/lib.rs | 5 +- tests/heap_allocation.rs | 10 +++ 6 files changed, 294 insertions(+), 4 deletions(-) create mode 100644 src/allocator/bump.rs create mode 100644 src/allocator/fixed_size_block.rs create mode 100644 src/allocator/linked_list.rs diff --git a/src/allocator.rs b/src/allocator.rs index 8067411..03d9c1b 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -1,6 +1,6 @@ use alloc::alloc::{GlobalAlloc, Layout}; use core::ptr::null_mut; -use linked_list_allocator::LockedHeap; +use fixed_size_block::FixedSizeBlockAllocator; use x86_64::{ structures::paging::{ mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB, @@ -8,11 +8,15 @@ use x86_64::{ VirtAddr, }; +pub mod bump; +pub mod fixed_size_block; +pub mod linked_list; + pub const HEAP_START: usize = 0x_4444_4444_0000; pub const HEAP_SIZE: usize = 100 * 1024; #[global_allocator] -static ALLOCATOR: LockedHeap = LockedHeap::empty(); +static ALLOCATOR: Locked = Locked::new(FixedSizeBlockAllocator::new()); pub fn init_heap( mapper: &mut impl Mapper, @@ -52,3 +56,23 @@ unsafe impl GlobalAlloc for Dummy { panic!("dealloc should be never called") } } + +pub struct Locked { + inner: spin::Mutex, +} + +impl Locked { + pub const fn new(inner: A) -> Self { + Locked { + inner: spin::Mutex::new(inner), + } + } + + pub fn lock(&self) -> spin::MutexGuard { + self.inner.lock() + } +} + +fn align_up(addr: usize, align: usize) -> usize { + (addr + align - 1) & !(align - 1) +} diff --git a/src/allocator/bump.rs b/src/allocator/bump.rs new file mode 100644 index 0000000..e7836fd --- /dev/null +++ b/src/allocator/bump.rs @@ -0,0 +1,56 @@ +use super::{align_up, Locked}; +use alloc::alloc::{GlobalAlloc, Layout}; +use core::ptr; + +pub struct BumpAllocator { + heap_start: usize, + heap_end: usize, + next: usize, + allocations: usize, +} + +impl BumpAllocator { + pub const fn new() -> Self { + BumpAllocator { + heap_start: 0, + heap_end: 0, + next: 0, + allocations: 0, + } + } + + pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { + self.heap_start = heap_start; + self.heap_end = heap_start.saturating_add(heap_size); + self.next = heap_start; + } +} + +unsafe impl GlobalAlloc for Locked { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let mut bump = self.lock(); + + let alloc_start = align_up(bump.next, layout.align()); + let alloc_end = match alloc_start.checked_add(layout.size()) { + Some(end) => end, + None => return ptr::null_mut(), + }; + + if alloc_end > bump.heap_end { + ptr::null_mut() + } else { + bump.next = alloc_end; + bump.allocations += 1; + alloc_start as *mut u8 + } + } + + unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) { + let mut bump = self.lock(); + + bump.allocations -= 1; + if bump.allocations == 0 { + bump.next = bump.heap_start; + } + } +} diff --git a/src/allocator/fixed_size_block.rs b/src/allocator/fixed_size_block.rs new file mode 100644 index 0000000..8293719 --- /dev/null +++ b/src/allocator/fixed_size_block.rs @@ -0,0 +1,86 @@ +use super::Locked; +use alloc::alloc::{GlobalAlloc, Layout}; +use core::{ + mem, + ptr::{self, NonNull}, +}; + +const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048]; + +fn list_index(layout: &Layout) -> Option { + let required_block_size = layout.size().max(layout.align()); + BLOCK_SIZES.iter().position(|&s| s >= required_block_size) +} + +struct ListNode { + next: Option<&'static mut ListNode>, +} + +pub struct FixedSizeBlockAllocator { + list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()], + fallback_allocator: linked_list_allocator::Heap, +} + +impl FixedSizeBlockAllocator { + pub const fn new() -> Self { + const EMPTY: Option<&'static mut ListNode> = None; + FixedSizeBlockAllocator { + list_heads: [EMPTY; BLOCK_SIZES.len()], + fallback_allocator: linked_list_allocator::Heap::empty(), + } + } + + pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { + self.fallback_allocator.init(heap_start, heap_size); + } + + fn fallback_alloc(&mut self, layout: Layout) -> *mut u8 { + match self.fallback_allocator.allocate_first_fit(layout) { + Ok(ptr) => ptr.as_ptr(), + Err(_) => ptr::null_mut(), + } + } +} + +unsafe impl GlobalAlloc for Locked { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let mut allocator = self.lock(); + match list_index(&layout) { + Some(index) => { + match allocator.list_heads[index].take() { + Some(node) => { + allocator.list_heads[index] = node.next.take(); + node as *mut ListNode as *mut u8 + } + None => { + let block_size = BLOCK_SIZES[index]; + let block_align = block_size; + let layout = Layout::from_size_align(block_size, block_align).unwrap(); + allocator.fallback_alloc(layout) + } + } + } + None => allocator.fallback_alloc(layout), + } + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + let mut allocator = self.lock(); + match list_index(&layout) { + Some(index) => { + let new_node = ListNode { + next: allocator.list_heads[index].take(), + }; + assert!(mem::size_of::() <= BLOCK_SIZES[index]); + assert!(mem::align_of::() <= BLOCK_SIZES[index]); + let new_node_ptr = ptr as *mut ListNode; + new_node_ptr.write(new_node); + allocator.list_heads[index] = Some(&mut *new_node_ptr); + } + None => { + let ptr = NonNull::new(ptr).unwrap(); + allocator.fallback_allocator.deallocate(ptr, layout); + } + } + } +} diff --git a/src/allocator/linked_list.rs b/src/allocator/linked_list.rs new file mode 100644 index 0000000..91c7142 --- /dev/null +++ b/src/allocator/linked_list.rs @@ -0,0 +1,113 @@ +use super::{align_up, Locked}; +use alloc::alloc::{GlobalAlloc, Layout}; +use core::{mem, ptr}; + +struct ListNode { + size: usize, + next: Option<&'static mut ListNode>, +} + +impl ListNode { + const fn new(size: usize) -> Self { + ListNode { size, next: None } + } + + fn start_addr(&self) -> usize { + self as *const Self as usize + } + + fn end_addr(&self) -> usize { + self.start_addr() + self.size + } +} + +pub struct LinkedListAllocator { + head: ListNode, +} + +impl LinkedListAllocator { + pub const fn new() -> Self { + Self { + head: ListNode::new(0), + } + } + + pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) { + self.add_free_region(heap_start, heap_size); + } + + unsafe fn add_free_region(&mut self, addr: usize, size: usize) { + assert_eq!(align_up(addr, mem::align_of::()), addr); + assert!(size >= mem::size_of::()); + + let mut node = ListNode::new(size); + node.next = self.head.next.take(); + let node_ptr = addr as *mut ListNode; + node_ptr.write(node); + self.head.next = Some(&mut *node_ptr) + } + + fn find_region(&mut self, size: usize, align: usize) -> Option<(&'static mut ListNode, usize)> { + let mut current = &mut self.head; + while let Some(ref mut region) = current.next { + if let Ok(alloc_start) = Self::alloc_from_region(®ion, size, align) { + let next = region.next.take(); + let ret = Some((current.next.take().unwrap(), alloc_start)); + current.next = next; + return ret; + } else { + current = current.next.as_mut().unwrap(); + } + } + + None + } + + fn alloc_from_region(region: &ListNode, size: usize, align: usize) -> Result { + let alloc_start = align_up(region.start_addr(), align); + let alloc_end = alloc_start.checked_add(size).ok_or(())?; + + if alloc_end > region.end_addr() { + } + + let excess_size = region.end_addr() - alloc_end; + if excess_size > 0 && excess_size < mem::size_of::() { + return Err(()); + } + + Ok(alloc_start) + } + + fn size_align(layout: Layout) -> (usize, usize) { + let layout = layout + .align_to(mem::align_of::()) + .expect("adjusting alignment failed") + .pad_to_align(); + let size = layout.size().max(mem::size_of::()); + (size, layout.align()) + } +} + +unsafe impl GlobalAlloc for Locked { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let (size, align) = LinkedListAllocator::size_align(layout); + let mut allocator = self.lock(); + + if let Some((region, alloc_start)) = allocator.find_region(size, align) { + let alloc_end = alloc_start.checked_add(size).expect("overflow"); + let excess_size = region.end_addr() - alloc_end; + if excess_size > 0 { + allocator.add_free_region(alloc_end, excess_size); + } + alloc_start as *mut u8 + } else { + ptr::null_mut() + } + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + let (size, _) = LinkedListAllocator::size_align(layout); + + self.lock().add_free_region(ptr as usize, size) + } +} diff --git a/src/lib.rs b/src/lib.rs index f64f77f..d602150 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,10 +1,11 @@ #![no_std] #![cfg_attr(test, no_main)] #![feature(custom_test_frameworks)] -#![test_runner(crate::test_runner)] -#![reexport_test_harness_main = "test_main"] #![feature(abi_x86_interrupt)] #![feature(alloc_error_handler)] +#![feature(const_mut_refs)] +#![test_runner(crate::test_runner)] +#![reexport_test_harness_main = "test_main"] extern crate alloc; diff --git a/tests/heap_allocation.rs b/tests/heap_allocation.rs index 558511b..9150cbe 100644 --- a/tests/heap_allocation.rs +++ b/tests/heap_allocation.rs @@ -54,6 +54,16 @@ fn many_boxes() { } } +#[test_case] +fn many_boxes_long_lived() { + let long_lived = Box::new(1); + for i in 0..HEAP_SIZE { + let x = Box::new(i); + assert_eq!(*x, i); + } + assert_eq!(*long_lived, 1); +} + #[panic_handler] fn panic(info: &PanicInfo) -> ! { anos::test_panic_handler(info)