sparreal_kernel/mem/
mod.rs

1#![allow(unused)]
2
3use core::{
4    alloc::GlobalAlloc,
5    ptr::{NonNull, null_mut, slice_from_raw_parts_mut},
6};
7
8use buddy_system_allocator::Heap;
9use log::debug;
10use spin::Mutex;
11
12use crate::{globals::global_val, platform::kstack_size, println};
13
14mod addr;
15mod cache;
16#[cfg(feature = "mmu")]
17pub mod mmu;
18pub mod region;
19
20pub use addr::*;
21
22#[global_allocator]
23static ALLOCATOR: KAllocator = KAllocator {
24    inner: Mutex::new(Heap::empty()),
25};
26
27pub struct KAllocator {
28    pub(crate) inner: Mutex<Heap<32>>,
29}
30
31impl KAllocator {
32    pub fn reset(&self, memory: &mut [u8]) {
33        let mut g = self.inner.lock();
34
35        let mut h = Heap::empty();
36
37        unsafe { h.init(memory.as_mut_ptr() as usize, memory.len()) };
38
39        *g = h;
40    }
41
42    pub fn add_to_heap(&self, memory: &mut [u8]) {
43        let mut g = self.inner.lock();
44        let range = memory.as_mut_ptr_range();
45
46        unsafe { g.add_to_heap(range.start as usize, range.end as usize) };
47    }
48}
49
50unsafe impl GlobalAlloc for KAllocator {
51    unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 {
52        if let Ok(p) = self.inner.lock().alloc(layout) {
53            p.as_ptr()
54        } else {
55            null_mut()
56        }
57    }
58
59    unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) {
60        self.inner
61            .lock()
62            .dealloc(unsafe { NonNull::new_unchecked(ptr) }, layout);
63    }
64}
65
66static mut VA_OFFSET: usize = 0;
67static mut VA_OFFSET_NOW: usize = 0;
68
69pub(crate) fn set_va_offset(offset: usize) {
70    unsafe { VA_OFFSET = offset };
71}
72
73pub fn va_offset() -> usize {
74    unsafe { VA_OFFSET }
75}
76
77pub(crate) unsafe fn set_va_offset_now(va: usize) {
78    unsafe { VA_OFFSET_NOW = va };
79}
80
81fn va_offset_now() -> usize {
82    unsafe { VA_OFFSET_NOW }
83}
84
85pub(crate) fn init_heap() {
86    let main = global_val().main_memory.clone();
87    let mut start = VirtAddr::from(main.start);
88    let mut end = VirtAddr::from(main.end);
89
90    let bss_end = crate::mem::region::bss().as_ptr_range().end.into();
91
92    if (start..end).contains(&bss_end) {
93        start = bss_end;
94    }
95
96    let stack_top = VirtAddr::from(global_val().kstack_top);
97    let stack_bottom = stack_top - kstack_size();
98
99    if (start..end).contains(&stack_bottom) {
100        end = stack_bottom;
101    }
102
103    println!("heap add memory [{}, {})", start, end);
104    ALLOCATOR
105        .add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.as_mut_ptr(), end - start) });
106
107    println!("heap initialized");
108}
109
110pub(crate) fn init_page_and_memory() {
111    #[cfg(feature = "mmu")]
112    mmu::init_table();
113
114    let main = global_val().main_memory.clone();
115
116    for memory in global_val().platform_info.memorys() {
117        if memory.contains(&main.start) {
118            continue;
119        }
120        let start = VirtAddr::from(memory.start);
121        let end = VirtAddr::from(memory.end);
122        let len = memory.end - memory.start;
123
124        debug!("Heap add memory [{}, {})", start, end);
125        ALLOCATOR.add_to_heap(unsafe { &mut *slice_from_raw_parts_mut(start.as_mut_ptr(), len) });
126    }
127}
128
129#[repr(C)]
130#[derive(Debug, Clone, Copy)]
131pub struct CMemRange {
132    pub start: usize,
133    pub end: usize,
134}
135
136impl CMemRange {
137    pub fn as_slice(&self) -> &'static [u8] {
138        unsafe { core::slice::from_raw_parts(self.start as *const u8, self.end - self.start) }
139    }
140}
141
142#[repr(C)]
143#[derive(Debug, Clone, Copy)]
144pub struct KernelRegions {
145    pub text: CMemRange,
146    pub rodata: CMemRange,
147    pub data: CMemRange,
148    pub bss: CMemRange,
149}
150
151pub fn iomap(paddr: PhysAddr, _size: usize) -> NonNull<u8> {
152    #[cfg(feature = "mmu")]
153    {
154        mmu::iomap(paddr, _size)
155    }
156
157    #[cfg(not(feature = "mmu"))]
158    unsafe {
159        NonNull::new_unchecked(paddr.as_usize() as *mut u8)
160    }
161}