Added kinda working key input checking
This commit is contained in:
@@ -1,46 +1,46 @@
|
||||
use core::{alloc::{AllocError, GlobalAlloc, Layout}, sync::atomic::{AtomicUsize, Ordering}};
|
||||
//use core::{alloc::{AllocError, GlobalAlloc, Layout}, sync::atomic::{AtomicUsize, Ordering}};
|
||||
|
||||
use x86_64::align_up;
|
||||
//use x86_64::align_up;
|
||||
|
||||
pub const HEAP_START: usize = 0o_000_001_000_000_0000;
|
||||
pub const HEAP_START: usize = 0o_000_001_000_000_0000; // 0x40000000 - 0x40019000
|
||||
pub const HEAP_SIZE: usize = 100 * 1024; // 100 KiB
|
||||
|
||||
/// A simple allocator that allocates memory linearly and ignores freed memory.
|
||||
#[derive(Debug)]
|
||||
pub struct BumpAllocator {
|
||||
heap_start: usize,
|
||||
heap_end: usize,
|
||||
next: AtomicUsize,
|
||||
}
|
||||
// A simple allocator that allocates memory linearly and ignores freed memory.
|
||||
//#[derive(Debug)]
|
||||
//pub struct BumpAllocator {
|
||||
// heap_start: usize,
|
||||
// heap_end: usize,
|
||||
// next: AtomicUsize,
|
||||
//}
|
||||
|
||||
impl BumpAllocator {
|
||||
pub const fn new(heap_start: usize, heap_end: usize) -> Self {
|
||||
Self { heap_start, heap_end, next: AtomicUsize::new(heap_start) }
|
||||
}
|
||||
}
|
||||
//impl BumpAllocator {
|
||||
// pub const fn new(heap_start: usize, heap_end: usize) -> Self {
|
||||
// Self { heap_start, heap_end, next: AtomicUsize::new(heap_start) }
|
||||
// }
|
||||
//}
|
||||
|
||||
unsafe impl GlobalAlloc for BumpAllocator {
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
loop {
|
||||
// load current state of the `next` field
|
||||
let current_next = self.next.load(Ordering::Relaxed);
|
||||
let alloc_start = align_up(current_next as u64, layout.align() as u64);
|
||||
let alloc_end = alloc_start.saturating_add(layout.size() as u64);
|
||||
//unsafe impl GlobalAlloc for BumpAllocator {
|
||||
// unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
// loop {
|
||||
// // load current state of the `next` field
|
||||
// let current_next = self.next.load(Ordering::Relaxed);
|
||||
// let alloc_start = align_up(current_next as u64, layout.align() as u64);
|
||||
// let alloc_end = alloc_start.saturating_add(layout.size() as u64);
|
||||
//
|
||||
// if alloc_end <= self.heap_end as u64 {
|
||||
// // update the `next` pointer if it still has the value `current_next`
|
||||
// let next_now = self.next.compare_exchange(current_next, alloc_end as usize, Ordering::Relaxed, Ordering::Relaxed).unwrap();
|
||||
// if next_now == current_next {
|
||||
// // next address was successfully updated, allocation succeeded
|
||||
// return alloc_start as *mut u8;
|
||||
// }
|
||||
// } else {
|
||||
// panic!("OUT OF MEMORY");
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
if alloc_end <= self.heap_end as u64 {
|
||||
// update the `next` pointer if it still has the value `current_next`
|
||||
let next_now = self.next.compare_exchange(current_next, alloc_end as usize, Ordering::Relaxed, Ordering::Relaxed).unwrap();
|
||||
if next_now == current_next {
|
||||
// next address was successfully updated, allocation succeeded
|
||||
return alloc_start as *mut u8;
|
||||
}
|
||||
} else {
|
||||
panic!("OUT OF MEMORY");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
|
||||
// do nothing, leak memory
|
||||
}
|
||||
}
|
||||
// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
|
||||
// // do nothing, leak memory
|
||||
// }
|
||||
//}
|
||||
|
||||
@@ -161,7 +161,7 @@ impl MemInfo<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(mb_ptr: usize) {
|
||||
pub fn init(mb_ptr: usize) -> paging::ActivePageTable {
|
||||
once::assert_has_not_been_called!("mem::init must be called only once");
|
||||
let mem_info = MemInfo::load(mb_ptr);
|
||||
let mut frame_alloc = area_frame_alloc::AreaFrameAllocator::new(&mem_info);
|
||||
@@ -179,4 +179,5 @@ pub fn init(mb_ptr: usize) {
|
||||
unsafe {
|
||||
GLOBAL_ALLOCATOR.lock().claim(Span::from_base_size(HEAP_START as *mut u8, HEAP_SIZE)).unwrap();
|
||||
}
|
||||
active_table
|
||||
}
|
||||
|
||||
@@ -189,7 +189,9 @@ pub fn remap_the_kernel<A>(allocator: &mut A, mem_info: &MemInfo) -> ActivePageT
|
||||
continue;
|
||||
}
|
||||
|
||||
log::debug!("mapping section '{}' at addr: {:#x}, size: {:#x}", section.name().unwrap_or("NONE"), section.start_address(), section.size());
|
||||
//log::debug!("mapping section '{}' at addr: {:#x}, size: {:#x}",
|
||||
// section.name().unwrap_or("NONE"), section.start_address(), section.size());
|
||||
|
||||
assert!(section.start_address() as usize % PAGE_SIZE == 0,
|
||||
"sections need to be page aligned");
|
||||
|
||||
|
||||
Reference in New Issue
Block a user