Implement heap tests

This commit is contained in:
2025-12-20 16:32:33 +01:00
parent abdef70198
commit e22dc65588
6 changed files with 310 additions and 47 deletions

0
heap/.cargo/config.toml Normal file
View File

View File

@@ -5,3 +5,6 @@ edition = "2024"
[dependencies]
NovaError = {path = "../NovaError"}
[dev-dependencies]
rand = "0.9.2"

View File

@@ -3,35 +3,28 @@
use core::{
alloc::GlobalAlloc,
default::Default,
mem::size_of,
prelude::v1::*,
ptr::{self, null_mut, read_volatile},
ptr::{self, null_mut},
result::Result,
};
use NovaError::NovaError;
#[cfg(not(target_os = "none"))]
extern crate std;
extern crate alloc;
#[repr(C, align(16))]
#[derive(Clone, Copy)]
pub struct HeapHeader {
pub next: *mut HeapHeader,
before: *mut HeapHeader,
pub size: usize,
next: Option<*mut HeapHeader>,
before: Option<*mut HeapHeader>,
size: usize,
free: bool,
}
const HEAP_HEADER_SIZE: usize = size_of::<HeapHeader>();
const MIN_BLOCK_SIZE: usize = 16;
// TODO: This implementation has to be reevaluated when implementing multiprocessing
// Spinlock could be a solution but has its issues:
// https://matklad.github.io/2020/01/02/spinlocks-considered-harmful.html
pub struct Heap {
pub start_address: *mut HeapHeader,
pub end_address: *mut HeapHeader,
@@ -50,14 +43,14 @@ impl Heap {
self.start_address = heap_start as *mut HeapHeader;
self.end_address = heap_end as *mut HeapHeader;
self.raw_size = heap_end - heap_start;
self.raw_size = heap_end - heap_start + 1;
unsafe {
ptr::write(
self.start_address,
HeapHeader {
next: null_mut(),
before: null_mut(),
next: None,
before: None,
size: self.raw_size - HEAP_HEADER_SIZE,
free: true,
},
@@ -68,10 +61,11 @@ impl Heap {
unsafe fn find_first_fit(&self, size: usize) -> Result<*mut HeapHeader, NovaError> {
let mut current = self.start_address;
while !fits(size, current) {
if (*self.start_address).next.is_null() {
if let Some(next) = (*self.start_address).next {
current = next;
} else {
return Err(NovaError::HeapFull);
}
current = (*current).next;
}
Ok(current)
}
@@ -113,8 +107,8 @@ impl Heap {
// Handle case where fragmenting center free space
let next = (*current).next;
if !(*current).next.is_null() {
(*next).before = new_address;
if let Some(next) = next {
(*next).before = Some(new_address);
}
unsafe {
@@ -122,35 +116,38 @@ impl Heap {
new_address as *mut HeapHeader,
HeapHeader {
next,
before: current,
size: (*current).size - size - HEAP_HEADER_SIZE,
before: Some(current),
size: (*current).size - byte_offset,
free: true,
},
)
};
(*current).next = new_address;
(*current).next = Some(new_address);
(*current).free = false;
(*current).size = size;
}
pub fn free(&self, pointer: *mut u8) -> Result<(), NovaError> {
let mut segment = unsafe { pointer.sub(HEAP_HEADER_SIZE) as *mut HeapHeader };
let mut segment = Self::get_header_ref_from_data_pointer(pointer);
unsafe {
// IF prev is free:
// Delete header, add size to previous and fix pointers.
// Move Head left
if !(*segment).before.is_null() && (*(*segment).before).free {
let before_head = (*segment).before;
(*before_head).size += (*segment).size + HEAP_HEADER_SIZE;
delete_header(segment);
segment = before_head;
if let Some(before_head) = (*segment).before {
if (*before_head).free {
(*before_head).size += (*segment).size + HEAP_HEADER_SIZE;
delete_header(segment);
segment = before_head;
}
}
// IF next is free:
// Delete next header and merge size, fix pointers
if !(*segment).next.is_null() && (*(*segment).next).free {
let next_head = (*segment).next;
(*segment).size += (*next_head).size + HEAP_HEADER_SIZE;
delete_header(next_head);
if let Some(next_head) = (*segment).next {
if (*next_head).free {
(*segment).size += (*next_head).size + HEAP_HEADER_SIZE;
delete_header(next_head);
}
}
// Neither: Set free
@@ -159,6 +156,10 @@ impl Heap {
Ok(())
}
const fn get_header_ref_from_data_pointer(pointer: *mut u8) -> *mut HeapHeader {
unsafe { pointer.sub(HEAP_HEADER_SIZE) as *mut HeapHeader }
}
}
unsafe impl GlobalAlloc for Heap {
@@ -178,17 +179,17 @@ unsafe fn fits(size: usize, header: *mut HeapHeader) -> bool {
}
unsafe fn delete_header(header: *mut HeapHeader) {
let before = (*header).before;
let next = (*header).next;
let before_opt = (*header).before;
let next_opt = (*header).next;
if !before.is_null() {
(*before).next = next;
if let Some(before) = before_opt {
(*before).next = next_opt;
}
if !next.is_null() {
(*next).before = before;
if let Some(next) = next_opt {
(*next).before = before_opt;
}
}
#[cfg(test)]
mod tests {}
mod tests;

125
heap/src/tests.rs Normal file
View File

@@ -0,0 +1,125 @@
use super::*;
use rand::{self, random_range};
extern crate std;
static HEAP_SIZE: usize = 1024;
#[test]
fn test_heap_allocation() {
let heap_vector = Box::new([0u8; HEAP_SIZE]);
let mut heap = Heap::empty();
heap.init(
&heap_vector[0] as *const u8 as usize,
&heap_vector[HEAP_SIZE - 1] as *const u8 as usize,
);
let root_header = heap.start_address;
let malloc_size = random_range(0..(HEAP_SIZE - HEAP_HEADER_SIZE));
let malloc = heap.malloc(malloc_size).unwrap();
let malloc_header = Heap::get_header_ref_from_data_pointer(malloc);
assert_eq!(root_header, malloc_header);
unsafe {
let actual_alloc_size = (*malloc_header).size;
let actual_raw_size = actual_alloc_size + HEAP_HEADER_SIZE;
// Verify sizing
assert!(actual_alloc_size >= malloc_size);
assert_eq!(actual_alloc_size % MIN_BLOCK_SIZE, 0);
// Verify section is occupied
assert!((*malloc_header).free == false);
// Verify next header has been created
let next = (*malloc_header).next.unwrap();
assert_eq!(malloc_header.byte_add(actual_raw_size), next);
assert!((*next).free);
assert_eq!((*malloc_header).next.unwrap(), next);
assert_eq!((*next).before.unwrap(), malloc_header);
assert_eq!((*next).size, HEAP_SIZE - actual_raw_size - HEAP_HEADER_SIZE)
}
}
#[test]
fn test_full_heap() {
let heap_vector = Box::new([0u8; HEAP_SIZE]);
let mut heap = Heap::empty();
heap.init(
&heap_vector[0] as *const u8 as usize,
&heap_vector[HEAP_SIZE - 1] as *const u8 as usize,
);
let malloc_size = HEAP_SIZE - HEAP_HEADER_SIZE;
let malloc = heap.malloc(malloc_size).unwrap();
let malloc_header = Heap::get_header_ref_from_data_pointer(malloc);
unsafe {
assert_eq!((*malloc_header).free, false);
assert!((*malloc_header).next.is_none());
}
let malloc2 = heap.malloc(MIN_BLOCK_SIZE);
assert!(malloc2.is_err());
}
#[test]
fn test_freeing_root() {
let heap_vector = Box::new([0u8; HEAP_SIZE]);
let mut heap = Heap::empty();
heap.init(
&heap_vector[0] as *const u8 as usize,
&heap_vector[HEAP_SIZE - 1] as *const u8 as usize,
);
let root_header = heap.start_address;
let root_header_start_size = unsafe { (*root_header).size };
let malloc_size = random_range(0..((HEAP_SIZE - HEAP_HEADER_SIZE) / 2));
let malloc = heap.malloc(malloc_size).unwrap();
let malloc_header = Heap::get_header_ref_from_data_pointer(malloc);
unsafe {
assert_eq!((*malloc_header).free, false);
assert!((*malloc_header).size >= malloc_size);
assert!((*root_header).next.is_some());
assert!(heap.free(malloc).is_ok());
assert_eq!((*root_header).size, root_header_start_size);
assert!((*root_header).next.is_none());
}
fn test_merging_free_sections() {
let heap_vector = Box::new([0u8; HEAP_SIZE]);
let mut heap = Heap::empty();
heap.init(
&heap_vector[0] as *const u8 as usize,
&heap_vector[HEAP_SIZE - 1] as *const u8 as usize,
);
let root_header = heap.start_address;
let root_header_start_size = unsafe { (*root_header).size };
let malloc1 = heap.malloc(MIN_BLOCK_SIZE).unwrap();
let malloc_header_before = unsafe { *Heap::get_header_ref_from_data_pointer(malloc1) };
let malloc2 = heap.malloc(MIN_BLOCK_SIZE).unwrap();
let malloc3 = heap.malloc(MIN_BLOCK_SIZE).unwrap();
unsafe {
assert!(heap.free(malloc1).is_ok());
let malloc_header_free = *Heap::get_header_ref_from_data_pointer(malloc1);
assert_ne!(malloc_header_before.free, malloc_header_free.free);
assert_eq!(malloc_header_before.size, malloc_header_free.size);
assert!(heap.free(malloc2).is_ok());
let malloc_header_merge = *Heap::get_header_ref_from_data_pointer(malloc1);
assert!(malloc_header_merge.free);
assert_eq!(
malloc_header_merge.size,
malloc_header_free.size + MIN_BLOCK_SIZE + HEAP_HEADER_SIZE
);
}
}
}