Added kinda working key input checking

This commit is contained in:
2024-09-01 23:42:39 +03:00
parent 1d61f97abb
commit 2dfd253f34
24 changed files with 566 additions and 90 deletions

View File

@@ -1,46 +1,46 @@
use core::{alloc::{AllocError, GlobalAlloc, Layout}, sync::atomic::{AtomicUsize, Ordering}};
//use core::{alloc::{AllocError, GlobalAlloc, Layout}, sync::atomic::{AtomicUsize, Ordering}};
use x86_64::align_up;
//use x86_64::align_up;
pub const HEAP_START: usize = 0o_000_001_000_000_0000;
pub const HEAP_START: usize = 0o_000_001_000_000_0000; // 0x40000000 - 0x40019000
pub const HEAP_SIZE: usize = 100 * 1024; // 100 KiB
/// A simple allocator that allocates memory linearly and ignores freed memory.
#[derive(Debug)]
pub struct BumpAllocator {
heap_start: usize,
heap_end: usize,
next: AtomicUsize,
}
// A simple allocator that allocates memory linearly and ignores freed memory.
//#[derive(Debug)]
//pub struct BumpAllocator {
// heap_start: usize,
// heap_end: usize,
// next: AtomicUsize,
//}
impl BumpAllocator {
pub const fn new(heap_start: usize, heap_end: usize) -> Self {
Self { heap_start, heap_end, next: AtomicUsize::new(heap_start) }
}
}
//impl BumpAllocator {
// pub const fn new(heap_start: usize, heap_end: usize) -> Self {
// Self { heap_start, heap_end, next: AtomicUsize::new(heap_start) }
// }
//}
unsafe impl GlobalAlloc for BumpAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
loop {
// load current state of the `next` field
let current_next = self.next.load(Ordering::Relaxed);
let alloc_start = align_up(current_next as u64, layout.align() as u64);
let alloc_end = alloc_start.saturating_add(layout.size() as u64);
//unsafe impl GlobalAlloc for BumpAllocator {
// unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
// loop {
// // load current state of the `next` field
// let current_next = self.next.load(Ordering::Relaxed);
// let alloc_start = align_up(current_next as u64, layout.align() as u64);
// let alloc_end = alloc_start.saturating_add(layout.size() as u64);
//
// if alloc_end <= self.heap_end as u64 {
// // update the `next` pointer if it still has the value `current_next`
// let next_now = self.next.compare_exchange(current_next, alloc_end as usize, Ordering::Relaxed, Ordering::Relaxed).unwrap();
// if next_now == current_next {
// // next address was successfully updated, allocation succeeded
// return alloc_start as *mut u8;
// }
// } else {
// panic!("OUT OF MEMORY");
// }
// }
// }
if alloc_end <= self.heap_end as u64 {
// update the `next` pointer if it still has the value `current_next`
let next_now = self.next.compare_exchange(current_next, alloc_end as usize, Ordering::Relaxed, Ordering::Relaxed).unwrap();
if next_now == current_next {
// next address was successfully updated, allocation succeeded
return alloc_start as *mut u8;
}
} else {
panic!("OUT OF MEMORY");
}
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
// do nothing, leak memory
}
}
// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
// // do nothing, leak memory
// }
//}