~kameliya/syso

e1e59ca17a1649d2102744770310c62b671e3ee3 — Yuki Izumi 4 years ago c121ae4
Add placement allocator until we get a heap
3 files changed, 76 insertions(+), 44 deletions(-)

M libs/hole_list_allocator/src/lib.rs
M src/lib.rs
M src/mem.rs
M libs/hole_list_allocator/src/lib.rs => libs/hole_list_allocator/src/lib.rs +54 -8
@@ 7,18 7,64 @@ extern crate linked_list_allocator;

use linked_list_allocator::Heap;

static mut HEAP: Heap = Heap::empty();
trait SysoAllocator {
    fn alloc(&mut self, size: usize, align: usize) -> *mut u8;
    fn free(&mut self, ptr: *mut u8, size: usize, align: usize);
}

impl SysoAllocator for Heap {
    fn alloc(&mut self, size: usize, align: usize) -> *mut u8 {
        match self.allocate_first_fit(size, align) {
            Some(ptr) => ptr,
            None => panic!("SysoAllocator for Heap::alloc"),
        }
    }

    fn free(&mut self, ptr: *mut u8, size: usize, align: usize) {
        unsafe { self.deallocate(ptr, size, align) }
    }
}

struct PlacementAllocator {
    addr: usize,
}

impl SysoAllocator for PlacementAllocator {
    fn alloc(&mut self, size: usize, align: usize) -> *mut u8 {
        if self.addr % align != 0 {
            self.addr += align - (self.addr % align);
        }
        let p = self.addr;
        self.addr += size;
        p as *mut u8
    }

pub unsafe fn init(base: usize, size: usize) {
    HEAP = Heap::new(base, size);
    fn free(&mut self, _ptr: *mut u8, _size: usize, _align: usize) {}
}

static mut USING_HEAP: bool = false;
static mut PLACEMENT_ALLOCATOR: PlacementAllocator = PlacementAllocator { addr: 0 };
static mut HEAP_ALLOCATOR: Heap = Heap::empty();

pub unsafe fn init_placement(base: usize) {
    PLACEMENT_ALLOCATOR = PlacementAllocator { addr: base };
}

pub unsafe fn init_heap(base: usize, size: usize) {
    HEAP_ALLOCATOR = Heap::new(base, size);
}

unsafe fn allocator() -> &'static mut SysoAllocator {
    if USING_HEAP {
        &mut HEAP_ALLOCATOR
    } else {
        &mut PLACEMENT_ALLOCATOR
    }
}

#[no_mangle]
pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
    match unsafe { HEAP.allocate_first_fit(size, align) } {
        Some(ptr) => ptr,
        None => panic!("__rust_allocate"),
    }
    unsafe { allocator().alloc(size, align) }
}

#[no_mangle]


@@ 28,7 74,7 @@ pub extern fn __rust_usable_size(size: usize, _align: usize) -> usize {

#[no_mangle]
pub extern fn __rust_deallocate(ptr: *mut u8, size: usize, align: usize) {
    unsafe { HEAP.deallocate(ptr, size, align) }
    unsafe { allocator().free(ptr, size, align) }
}

#[no_mangle]

M src/lib.rs => src/lib.rs +2 -1
@@ 1,4 1,5 @@
#![feature(alloc)]
#![feature(bit_set)]
#![feature(collections)]
#![feature(lang_items)]
#![feature(asm)]


@@ 6,6 7,7 @@
#![no_std]

extern crate alloc;
#[macro_use]
extern crate collections;
extern crate rlibc;
extern crate cpuio;


@@ 34,4 36,3 @@ pub extern fn rust_entry(multiboot_addr: usize) {

    unsafe { asm!("hlt") }
}


M src/mem.rs => src/mem.rs +20 -35
@@ 1,5 1,5 @@
use core::fmt::Write;
use collections::BTreeSet;
use collections::{BTreeSet, Vec};
use multiboot2::BootInformation;
use hole_list_allocator;



@@ 7,52 7,37 @@ use debug::DEBUG;

pub fn init(multiboot: &BootInformation) {
    let memory_map_tag = multiboot.memory_map_tag().expect("Memory map tag required");
    unsafe { write!(DEBUG, "memory map:\n").unwrap() }
    for area in memory_map_tag.memory_areas() {
        unsafe { write!(DEBUG, "\tstart: 0x{:x}, length: 0x{:x}\n", area.base_addr, area.length).unwrap() }
    }

    let elf_sections_tag = multiboot.elf_sections_tag().expect("ELF sections tag required");
    unsafe { write!(DEBUG, "kernel sections:\n").unwrap() }
    for section in elf_sections_tag.sections() {
        unsafe { write!(DEBUG, "\taddr: 0x{:x}, size: 0x{:x}, flags: 0x{:x}\n", section.addr, section.size, section.flags).unwrap() }
    }

    let kernel_start = elf_sections_tag.sections().map(|s| s.addr)
        .min().unwrap() as usize;
    let kernel_end = elf_sections_tag.sections().map(|s| s.addr + s.size)
        .max().unwrap() as usize;

    let multiboot_start = multiboot.start_address();
    let multiboot_end = multiboot.end_address();

    unsafe { write!(DEBUG, "kernel_start: 0x{:x}, kernel_end: 0x{:x}\n", kernel_start, kernel_end).unwrap() }
    unsafe { write!(DEBUG, "multiboot_start: 0x{:x}, multiboot_end: 0x{:x}\n", multiboot_start, multiboot_end).unwrap() }
    let placement_start = if kernel_end > multiboot_end { kernel_end } else { multiboot_end };
    unsafe { hole_list_allocator::init_placement(placement_start) }

    let heap_start = if kernel_end > multiboot_end { kernel_end } else { multiboot_end };
    /*
    let upper_memory = memory_map_tag.memory_areas().map(|a| a.base_addr + a.length)
        .max().unwrap() as usize;

    unsafe { hole_list_allocator::init(0x20000, 0x20000) }
    unsafe { write!(DEBUG, "HEAP created at 0x{:x}\n", heap_start).unwrap() }
    let frame_count = upper_memory / 0x1000;
    let mut frames: Vec<u8> = vec![0; frame_count / 8 + 1];

    let mut usable_memory: BTreeSet<Extent> = BTreeSet::new();

    usable_memory.insert(Extent::new(6, 9));
    usable_memory.insert(Extent::new(0, 3));
    usable_memory.insert(Extent::new(9, 13));
    unsafe { write!(DEBUG, "Usable memory: {:?}\n", usable_memory).unwrap() }
}
    unsafe { hole_list_allocator::init_heap(heap_start, 0x10000) }
    unsafe { write!(DEBUG, "HEAP created at 0x{:x}\n", heap_start).unwrap() }

#[derive(PartialEq, Eq, Ord, PartialOrd, Debug)]
struct Extent {
    base: usize,
    length: usize,
}
    {
        let mut usable_memory: BTreeSet<Extent> = BTreeSet::new();

impl Extent {
    fn new(base: usize, length: usize) -> Extent {
        Extent {
            base: base,
            length: length,
        for area in memory_map_tag.memory_areas() {
            usable_memory.insert(Extent::new(area.base_addr as usize, area.length as usize));
        }

        //remove_extents(&usable_memory, kernel_start, kernel_end);
        //remove_extents(&usable_memory, multiboot_start, multiboot_end);
        unsafe { write!(DEBUG, "Upper memory: 0x{:x}\n", upper_memory).unwrap() }
        unsafe { write!(DEBUG, "Usable memory: {:?}\n", usable_memory).unwrap() }
    }
    */
}