Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
Signed-off-by: Graham MacDonald <[email protected]>
  • Loading branch information
gmacd committed Sep 25, 2024
1 parent d895415 commit 4a11bbb
Show file tree
Hide file tree
Showing 7 changed files with 182 additions and 93 deletions.
17 changes: 8 additions & 9 deletions aarch64/src/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use crate::vm::kernel_root;
use crate::vm::PageTable;
use crate::vmalloc;
use alloc::boxed::Box;
use core::alloc::Layout;
use core::ptr;
use port::bumpalloc::Bump;
use port::fdt::DeviceTree;
Expand Down Expand Up @@ -130,14 +129,14 @@ pub fn init(dtb_va: usize) {

println!("looping now");

{
let test = vmalloc::alloc(unsafe { Layout::from_size_align_unchecked(1024, 16) });
println!("test alloc: {:p}", test);
let test2 = vmalloc::alloc(unsafe { Layout::from_size_align_unchecked(1024, 16) });
println!("test alloc: {:p}", test2);
let test3 = vmalloc::alloc(unsafe { Layout::from_size_align_unchecked(1024, 4096) });
println!("test alloc: {:p}", test3);
}
// {
// let test = vmalloc::alloc(unsafe { Layout::from_size_align_unchecked(1024, 16) });
// println!("test alloc: {:p}", test);
// let test2 = vmalloc::alloc(unsafe { Layout::from_size_align_unchecked(1024, 16) });
// println!("test alloc: {:p}", test2);
// let test3 = vmalloc::alloc(unsafe { Layout::from_size_align_unchecked(1024, 4096) });
// println!("test alloc: {:p}", test3);
// }

#[allow(clippy::empty_loop)]
loop {}
Expand Down
20 changes: 4 additions & 16 deletions aarch64/src/runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,15 @@ use crate::kmem::physaddr_as_virt;
use crate::registers::rpi_mmio;
use crate::uartmini::MiniUart;
use crate::vmalloc;
use alloc::alloc::{GlobalAlloc, Layout};
use alloc::alloc::Layout;
use core::fmt::Write;
use core::panic::PanicInfo;
use port::devcons::PanicConsole;
use port::mem::VirtRange;

#[global_allocator]
static ALLOCATOR: vmalloc::Allocator = vmalloc::Allocator {};

// TODO
// - Add qemu integration test
// - Use Console via println!() macro once available
Expand Down Expand Up @@ -40,18 +43,3 @@ pub fn panic(info: &PanicInfo) -> ! {
fn oom(_layout: Layout) -> ! {
panic!("oom");
}

struct Allocator {}

unsafe impl GlobalAlloc for Allocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
vmalloc::alloc(layout)
}

unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
panic!("fake dealloc");
}
}

#[global_allocator]
static ALLOCATOR: Allocator = Allocator {};
88 changes: 30 additions & 58 deletions aarch64/src/vmalloc.rs
Original file line number Diff line number Diff line change
@@ -1,76 +1,48 @@
use alloc::sync::Arc;
use core::{alloc::Layout, mem::MaybeUninit};
use alloc::alloc::{GlobalAlloc, Layout};
use core::mem::MaybeUninit;
use port::{
mcslock::{Lock, LockNode},
mem::{VirtRange, PAGE_SIZE_4K},
vmem::{Allocator, Arena, Boundary},
mem::VirtRange,
vmemalloc::VmemAlloc,
};

#[cfg(not(test))]
use port::println;

// TODO replace with some sort of OnceLock? We need this to be dynamically created,
// but we're assuming VmAlloc is Sync.
static VMALLOC: Lock<Option<&'static mut VmAlloc>> = Lock::new("vmalloc", None);

// The core arenas are statically allocated. They cannot be created in const
// functions, so the we declare them as MaybeUninit before intialising and
// referening them from VmAlloc, from where they can be used in the global allocator.
//static mut MAYBE_HEAP_ARENA: MaybeUninit<Arena> = MaybeUninit::uninit();

/// VmAlloc is an attempt to write a Bonwick vmem-style allocator. It currently
/// expects another allocator to exist beforehand.
/// TODO Use the allocator api trait.
struct VmAlloc {
heap_arena: Arc<Lock<Arena>, &'static dyn core::alloc::Allocator>,
_va_arena: Arc<Lock<Arena>, &'static dyn core::alloc::Allocator>,
}

impl VmAlloc {
fn new(early_allocator: &'static dyn core::alloc::Allocator, heap_range: VirtRange) -> Self {
let heap_arena = Arc::new_in(
Lock::new(
"heap_arena",
Arena::new_with_allocator(
"heap",
Some(Boundary::from(heap_range)),
PAGE_SIZE_4K,
early_allocator,
),
),
early_allocator,
);

// va_arena imports from heap_arena, so can use allocations from that heap to
// allocate blocks of tags.
let va_arena = Arc::new_in(
Lock::new(
"kmem_va_arena",
Arena::new("kmem_va_arena", None, PAGE_SIZE_4K, Some(heap_arena.clone())),
),
early_allocator,
);

Self { heap_arena, _va_arena: va_arena }
}
}
static VMEM_ALLOC: Lock<Option<&'static mut VmemAlloc>> = Lock::new("vmemalloc", None);

pub fn init(early_allocator: &'static dyn core::alloc::Allocator, heap_range: VirtRange) {
let node = LockNode::new();
let mut vmalloc = VMALLOC.lock(&node);
let mut vmalloc = VMEM_ALLOC.lock(&node);
*vmalloc = Some({
static mut MAYBE_VMALLOC: MaybeUninit<VmAlloc> = MaybeUninit::uninit();
static mut MAYBE_VMALLOC: MaybeUninit<VmemAlloc> = MaybeUninit::uninit();
unsafe {
MAYBE_VMALLOC.write(VmAlloc::new(early_allocator, heap_range));
MAYBE_VMALLOC.write({
let vmemalloc = VmemAlloc::new(early_allocator, heap_range);
vmemalloc.init();
vmemalloc
});
MAYBE_VMALLOC.assume_init_mut()
}
});
}

pub fn alloc(layout: Layout) -> *mut u8 {
let node = LockNode::new();
let mut lock = VMALLOC.lock(&node);
let vmalloc = lock.as_deref_mut().unwrap();
pub struct Allocator {}

let node = LockNode::new();
let mut guard = vmalloc.heap_arena.lock(&node);
// TODO use layout properly
guard.alloc(layout.size())
unsafe impl GlobalAlloc for Allocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
println!("vmalloc::alloc");

// Get the main allocator
let node = LockNode::new();
let mut lock = VMEM_ALLOC.lock(&node);
let vmemalloc = lock.as_deref_mut().unwrap();
vmemalloc.alloc(layout)
}

unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
panic!("fake dealloc");
}
}
1 change: 1 addition & 0 deletions port/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,6 @@ pub mod fdt;
pub mod mcslock;
pub mod mem;
pub mod vmem;
pub mod vmemalloc;

extern crate alloc;
16 changes: 7 additions & 9 deletions port/src/vmem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@ use crate::{mcslock::Lock, mem::PAGE_SIZE_4K};
use alloc::sync::Arc;
use core::{alloc::Layout, ops::Range, ptr::null_mut, slice};

#[cfg(not(test))]
use crate::println;

// TODO reserve recursive area in vmem(?)
// TODO Add hashtable for allocated tags - makes it faster when freeing, given only an address.
// TODO Add support for quantum caches once we have slab allocators implemented.
Expand Down Expand Up @@ -311,10 +308,11 @@ impl Arena {
quantum: usize,
_parent: Option<Arc<Lock<Arena>, &dyn core::alloc::Allocator>>,
) -> Self {
println!("Arena::new name:{} initial_span:{:?} quantum:{:x}", name, initial_span, quantum);
// println!("Arena::new name:{} initial_span:{:?} quantum:{:x}", name, initial_span, quantum);

let mut arena =
Self { name, quantum, segment_list: TagList::new(), tag_pool: TagPool::new() };
//arena.add_tags_to_pool(tags);

if let Some(span) = initial_span {
arena.add_initial_span(span);
Expand Down Expand Up @@ -354,10 +352,10 @@ impl Arena {
quantum: usize,
tags: &mut [TagItem],
) -> Self {
println!(
"Arena::new_with_tags name:{} initial_span:{:?} quantum:{:x}",
name, initial_span, quantum
);
// println!(
// "Arena::new_with_tags name:{} initial_span:{:?} quantum:{:x}",
// name, initial_span, quantum
// );

let mut arena =
Self { name, quantum, segment_list: TagList::new(), tag_pool: TagPool::new() };
Expand Down Expand Up @@ -400,7 +398,7 @@ impl Arena {

/// Allocate a segment, returned as a boundary
fn alloc_segment(&mut self, size: usize) -> Result<Boundary, AllocError> {
println!("alloc_segment size: {}", size);
// println!("alloc_segment size: {}", size);

// Round size up to a multiple of quantum
let size = {
Expand Down
131 changes: 131 additions & 0 deletions port/src/vmemalloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
use crate::{
mcslock::{Lock, LockNode},
mem::{VirtRange, PAGE_SIZE_4K},
vmem::{Allocator, Arena, Boundary},
};
use alloc::sync::Arc;
use core::alloc::{AllocError, Layout};
use core::ptr::NonNull;

/// VmAlloc is an attempt to write a Bonwick vmem-style allocator. It currently
/// expects another allocator to exist beforehand.
/// TODO Use the allocator api trait.
pub struct VmemAlloc {
heap_arena: Arc<Lock<Arena>, &'static dyn core::alloc::Allocator>,
va_arena: Option<Arc<Lock<Arena>, &'static dyn core::alloc::Allocator>>,
kmem_default_arena: Option<Arc<Lock<Arena>, &'static dyn core::alloc::Allocator>>,
}

impl VmemAlloc {
// TODO Specify quantum caching
pub fn new(
early_allocator: &'static dyn core::alloc::Allocator,
heap_range: VirtRange,
) -> Self {
let heap_arena = Arc::new_in(
Lock::new(
"heap_arena",
Arena::new_with_allocator(
"heap",
Some(Boundary::from(heap_range)),
PAGE_SIZE_4K,
early_allocator,
),
),
early_allocator,
);

// va_arena imports from heap_arena, so can use allocations from that heap to
// allocate blocks of tags.
let va_arena = Arc::new_in(
Lock::new(
"kmem_va",
Arena::new("kmem_va_arena", None, PAGE_SIZE_4K, Some(heap_arena.clone())),
),
early_allocator,
);

// kmem_default_arena - backing store for most object caches
let kmem_default_arena = Arc::new_in(
Lock::new(
"kmem_default_arena",
Arena::new("kmem_default", None, PAGE_SIZE_4K, Some(va_arena.clone())),
),
early_allocator,
);

Self { heap_arena, va_arena: Some(va_arena), kmem_default_arena: Some(kmem_default_arena) }
}

/// Create the remaining early arenas. To be called immediately after new()
/// as it uses self as the allocator.
pub fn init(&self) {
// va_arena imports from heap_arena, so can use allocations from that heap to
// allocate blocks of tags.
let va_arena = Arc::new_in(
Lock::new(
"kmem_va",
Arena::new("kmem_va_arena", None, PAGE_SIZE_4K, Some(self.heap_arena.clone())),
),
self,
);

// kmem_default_arena - backing store for most object caches
// let kmem_default_arena = Arc::new_in(
// Lock::new(
// "kmem_default_arena",
// Arena::new("kmem_default", None, PAGE_SIZE_4K, Some(va_arena.clone())),
// ),
// self,
// );
//self.va_arena = Some(va_arena as Allocator);
}

pub fn alloc(&self, layout: Layout) -> *mut u8 {
let node = LockNode::new();
let mut guard = self
.kmem_default_arena
.as_deref()
.expect("kmem_default_arena not yet created")
.lock(&node);
// TODO use layout properly
guard.alloc(layout.size())
}
}

unsafe impl core::alloc::Allocator for VmemAlloc {
fn allocate(
&self,
layout: Layout,
) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
let bytes = self.alloc(layout);
if bytes.is_null() {
Err(AllocError {})
} else {
let nonnull_bytes_ptr = NonNull::new(bytes).unwrap();
Ok(NonNull::slice_from_raw_parts(nonnull_bytes_ptr, layout.size()))
}
}

unsafe fn deallocate(&self, _ptr: core::ptr::NonNull<u8>, _layout: Layout) {
todo!()
}
}

#[cfg(test)]
mod tests {

use crate::bumpalloc::Bump;

use super::*;

#[test]
fn alloc_with_importing() {
static BUMP_ALLOC: Bump<{ 4 * PAGE_SIZE_4K }, PAGE_SIZE_4K> = Bump::new(0);
let vmalloc =
VmemAlloc::new(&BUMP_ALLOC, VirtRange::with_len(0xffff800000800000, 0x1000000));
vmalloc.init();
let b = vmalloc.alloc(unsafe { Layout::from_size_align_unchecked(1024, 1) });
assert_ne!(b, 0 as *mut u8);
}
}
2 changes: 1 addition & 1 deletion rust-toolchain.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[toolchain]
channel = "nightly-2024-08-27"
channel = "nightly-2024-09-08"
components = ["rustfmt", "rust-src", "clippy", "llvm-tools"]
targets = [
"aarch64-unknown-none",
Expand Down

0 comments on commit 4a11bbb

Please sign in to comment.