Fetching buffer arguments from user space.

This commit is contained in:
Yifan Wu 2020-12-06 13:56:13 +08:00
parent d38b24a9cb
commit 9366099b28
20 changed files with 405 additions and 82 deletions

View file

@ -71,6 +71,7 @@ impl VirtAddr {
pub fn floor(&self) -> VirtPageNum { VirtPageNum(self.0 / PAGE_SIZE) }
pub fn ceil(&self) -> VirtPageNum { VirtPageNum((self.0 + PAGE_SIZE - 1) / PAGE_SIZE) }
pub fn page_offset(&self) -> usize { self.0 & (PAGE_SIZE - 1) }
pub fn aligned(&self) -> bool { self.page_offset() == 0 }
}
impl From<VirtAddr> for VirtPageNum {
fn from(v: VirtAddr) -> Self {
@ -85,6 +86,7 @@ impl PhysAddr {
pub fn floor(&self) -> PhysPageNum { PhysPageNum(self.0 / PAGE_SIZE) }
pub fn ceil(&self) -> PhysPageNum { PhysPageNum((self.0 + PAGE_SIZE - 1) / PAGE_SIZE) }
pub fn page_offset(&self) -> usize { self.0 & (PAGE_SIZE - 1) }
pub fn aligned(&self) -> bool { self.page_offset() == 0 }
}
impl From<PhysAddr> for PhysPageNum {
fn from(v: PhysAddr) -> Self {
@ -150,6 +152,8 @@ impl<T> SimpleRange<T> where
assert!(start <= end, "start {:?} > end {:?}!", start, end);
Self { l: start, r: end }
}
pub fn get_start(&self) -> T { self.l }
pub fn get_end(&self) -> T { self.r }
}
impl<T> IntoIterator for SimpleRange<T> where
T: StepByOne + Copy + PartialEq + PartialOrd + Debug, {

View file

@ -1,14 +1,7 @@
use super::{
PageTable,
PTEFlags,
VirtAddr,
VirtPageNum,
PhysAddr,
PhysPageNum,
FrameTracker,
VPNRange,
frame_alloc,
};
use super::{PageTable, PageTableEntry, PTEFlags};
use super::{VirtPageNum, VirtAddr, PhysPageNum, PhysAddr};
use super::{FrameTracker, frame_alloc};
use super::{VPNRange, StepByOne};
use core::ops::Range;
use alloc::collections::BTreeMap;
use alloc::vec::Vec;
@ -16,7 +9,14 @@ use riscv::register::satp;
use alloc::sync::Arc;
use lazy_static::*;
use spin::Mutex;
use crate::config::MEMORY_END;
use crate::config::{
MEMORY_END,
PAGE_SIZE,
TRAMPOLINE,
TRAP_CONTEXT,
USER_STACK_SIZE
};
use xmas_elf::ElfFile;
extern "C" {
fn stext();
@ -28,6 +28,7 @@ extern "C" {
fn sbss_with_stack();
fn ebss();
fn ekernel();
fn strampoline();
}
lazy_static! {
@ -48,12 +49,39 @@ impl MemorySet {
areas: Vec::new(),
}
}
fn push(&mut self, mut map_area: MapArea) {
pub fn token(&self) -> usize {
self.page_table.token()
}
/// Assume that no conflicts.
pub fn insert_framed_area(&mut self, start_va: VirtAddr, end_va: VirtAddr, permission: MapPermission) {
self.push(MapArea::new(
start_va,
end_va,
MapType::Framed,
permission,
), None);
}
fn push(&mut self, mut map_area: MapArea, data: Option<&[u8]>) {
map_area.map(&mut self.page_table);
if let Some(data) = data {
map_area.copy_data(&mut self.page_table, data);
}
self.areas.push(map_area);
}
/// Mention that trampoline is not collected by areas.
fn map_trampoline(&mut self) {
self.page_table.map(
VirtAddr::from(TRAMPOLINE).into(),
PhysAddr::from(strampoline as usize).into(),
PTEFlags::R | PTEFlags::X,
);
}
/// Without kernel stacks.
pub fn new_kernel() -> Self {
let mut memory_set = Self::new_bare();
// map trampoline
memory_set.map_trampoline();
// map kernel sections
println!(".text [{:#x}, {:#x})", stext as usize, etext as usize);
println!(".rodata [{:#x}, {:#x})", srodata as usize, erodata as usize);
println!(".data [{:#x}, {:#x})", sdata as usize, edata as usize);
@ -64,37 +92,105 @@ impl MemorySet {
(etext as usize).into(),
MapType::Identical,
MapPermission::R | MapPermission::X,
));
), None);
println!("mapping .rodata section");
memory_set.push(MapArea::new(
(srodata as usize).into(),
(erodata as usize).into(),
MapType::Identical,
MapPermission::R,
));
), None);
println!("mapping .data section");
memory_set.push(MapArea::new(
(sdata as usize).into(),
(edata as usize).into(),
MapType::Identical,
MapPermission::R | MapPermission::W,
));
), None);
println!("mapping .bss section");
memory_set.push(MapArea::new(
(sbss_with_stack as usize).into(),
(ebss as usize).into(),
MapType::Identical,
MapPermission::R | MapPermission::W,
));
), None);
println!("mapping physical memory");
memory_set.push(MapArea::new(
(ekernel as usize).into(),
MEMORY_END.into(),
MapType::Identical,
MapPermission::R | MapPermission::W,
));
), None);
memory_set
}
/// Include sections in elf and trampoline and TrapContext and user stack,
/// also returns user_sp and entry point.
pub fn from_elf(elf_data: &[u8]) -> (Self, usize, usize) {
//println!("into from_elf!");
let mut memory_set = Self::new_bare();
// map trampoline
//println!("mapping trampoline!");
memory_set.map_trampoline();
// map program headers of elf, with U flag
//println!("mapping elf!");
let elf = xmas_elf::ElfFile::new(elf_data).unwrap();
let elf_header = elf.header;
let magic = elf_header.pt1.magic;
assert_eq!(magic, [0x7f, 0x45, 0x4c, 0x46], "invalid elf!");
let ph_count = elf_header.pt2.ph_count();
//println!("ph_count = {}", ph_count);
let mut max_end_vpn = VirtPageNum(0);
for i in 0..ph_count {
let ph = elf.program_header(i).unwrap();
if ph.get_type().unwrap() == xmas_elf::program::Type::Load {
//println!("ph#{},va={},memsz={}", i, ph.virtual_addr(), ph.mem_size());
let start_va: VirtAddr = (ph.virtual_addr() as usize).into();
let end_va: VirtAddr = ((ph.virtual_addr() + ph.mem_size()) as usize).into();
let mut map_perm = MapPermission::U;
let ph_flags = ph.flags();
if ph_flags.is_read() { map_perm |= MapPermission::R; }
if ph_flags.is_write() { map_perm |= MapPermission::W; }
if ph_flags.is_execute() { map_perm |= MapPermission::X; }
//println!("creating MapArea!");
let map_area = MapArea::new(
start_va,
end_va,
MapType::Framed,
map_perm,
);
//println!("end_vpn = {:?}", map_area.vpn_range.get_end());
max_end_vpn = map_area.vpn_range.get_end();
//println!("pushing MapArea!");
memory_set.push(
map_area,
Some(&elf.input[ph.offset() as usize..(ph.offset() + ph.file_size()) as usize])
);
}
}
// map user stack with U flags
//println!("mapping user stack!");
let mut max_end_va: VirtAddr = max_end_vpn.into();
let mut user_stack_bottom: usize = max_end_va.into();
// guard page
user_stack_bottom += PAGE_SIZE;
let user_stack_top = user_stack_bottom + USER_STACK_SIZE;
//println!("user stack={:#x},{:#x}", user_stack_bottom, user_stack_top);
memory_set.push(MapArea::new(
user_stack_bottom.into(),
user_stack_top.into(),
MapType::Framed,
MapPermission::R | MapPermission::W | MapPermission::U,
), None);
// map TrapContext
//println!("mapping TrapContext {:#x},{:#x}", TRAP_CONTEXT, TRAMPOLINE);
memory_set.push(MapArea::new(
TRAP_CONTEXT.into(),
TRAMPOLINE.into(),
MapType::Framed,
MapPermission::R | MapPermission::W,
), None);
(memory_set, user_stack_top, elf.header.pt2.entry_point() as usize)
}
pub fn activate(&self) {
let satp = self.page_table.token();
unsafe {
@ -102,6 +198,9 @@ impl MemorySet {
llvm_asm!("sfence.vma" :::: "volatile");
}
}
pub fn translate(&self, vpn: VirtPageNum) -> Option<PageTableEntry> {
self.page_table.translate(vpn)
}
}
pub struct MapArea {
@ -118,9 +217,8 @@ impl MapArea {
map_type: MapType,
map_perm: MapPermission
) -> Self {
// alignment assertion, limit to kernel remapping
let start_vpn: VirtPageNum = start_va.into();
let end_vpn: VirtPageNum = end_va.into();
let start_vpn: VirtPageNum = start_va.floor();
let end_vpn: VirtPageNum = end_va.ceil();
Self {
vpn_range: VPNRange::new(start_vpn, end_vpn),
data_frames: BTreeMap::new(),
@ -129,7 +227,7 @@ impl MapArea {
}
}
pub fn map_one(&mut self, page_table: &mut PageTable, vpn: VirtPageNum) {
let pte_flags = PTEFlags::from_bits(self.map_perm.bits).unwrap();
let mut pte_flags = PTEFlags::from_bits(self.map_perm.bits).unwrap();
let mut ppn = PhysPageNum(0);
match self.map_type {
MapType::Identical => {
@ -162,8 +260,31 @@ impl MapArea {
self.unmap_one(page_table, vpn);
}
}
/// data: start-aligned but maybe with shorted length
/// assume that all frames were cleared before
pub fn copy_data(&mut self, page_table: &mut PageTable, data: &[u8]) {
assert_eq!(self.map_type, MapType::Framed);
let mut start: usize = 0;
let mut current_vpn = self.vpn_range.get_start();
let len = data.len();
loop {
let src = &data[start..len.min(start + PAGE_SIZE)];
let dst = &mut page_table
.translate(current_vpn)
.unwrap()
.ppn()
.get_bytes_array()[..src.len()];
dst.copy_from_slice(src);
start += PAGE_SIZE;
if start >= len {
break;
}
current_vpn.step();
}
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum MapType {
Identical,
Framed,
@ -174,6 +295,7 @@ bitflags! {
const R = 1 << 1;
const W = 1 << 2;
const X = 1 << 3;
const U = 1 << 4;
}
}

View file

@ -5,11 +5,11 @@ mod page_table;
mod memory_set;
use page_table::{PageTable, PTEFlags};
use address::VPNRange;
use address::{VPNRange, StepByOne};
pub use address::{PhysAddr, VirtAddr, PhysPageNum, VirtPageNum};
pub use frame_allocator::{FrameTracker, frame_alloc};
pub use page_table::{PageTableEntry};
pub use memory_set::{MemorySet, KERNEL_SPACE};
pub use memory_set::{MemorySet, KERNEL_SPACE, MapPermission};
pub use memory_set::remap_test;
pub fn init() {

View file

@ -68,7 +68,14 @@ impl PageTable {
frames: vec![frame],
}
}
fn find_pte(&mut self, vpn: VirtPageNum, create: bool) -> Option<&mut PageTableEntry> {
/// Temporarily used to get arguments from user space.
pub fn from_root_ppn(root_ppn: PhysPageNum) -> Self {
Self {
root_ppn,
frames: Vec::new(),
}
}
fn find_pte_create(&mut self, vpn: VirtPageNum) -> Option<&mut PageTableEntry> {
let idxs = vpn.indexes();
let mut ppn = self.root_ppn;
let mut result: Option<&mut PageTableEntry> = None;
@ -79,9 +86,6 @@ impl PageTable {
break;
}
if !pte.is_valid() {
if !create {
return None;
}
let frame = frame_alloc().unwrap();
*pte = PageTableEntry::new(frame.ppn, PTEFlags::V);
self.frames.push(frame);
@ -90,19 +94,36 @@ impl PageTable {
}
result
}
fn find_pte(&self, vpn: VirtPageNum) -> Option<&PageTableEntry> {
let idxs = vpn.indexes();
let mut ppn = self.root_ppn;
let mut result: Option<&PageTableEntry> = None;
for i in 0..3 {
let pte = &ppn.get_pte_array()[idxs[i]];
if i == 2 {
result = Some(pte);
break;
}
if !pte.is_valid() {
return None;
}
ppn = pte.ppn();
}
result
}
pub fn map(&mut self, vpn: VirtPageNum, ppn: PhysPageNum, flags: PTEFlags) {
//println!("mapping {:?} {:?}", vpn, ppn);
let pte = self.find_pte(vpn, true).unwrap();
let pte = self.find_pte_create(vpn).unwrap();
assert!(!pte.is_valid(), "vpn {:?} is mapped before mapping", vpn);
*pte = PageTableEntry::new(ppn, flags | PTEFlags::V);
}
pub fn unmap(&mut self, vpn: VirtPageNum) {
let pte = self.find_pte(vpn, false).unwrap();
let pte = self.find_pte_create(vpn).unwrap();
assert!(pte.is_valid(), "vpn {:?} is invalid before unmapping", vpn);
*pte = PageTableEntry::empty();
}
pub fn translate(&mut self, vpn: VirtPageNum) -> Option<PageTableEntry> {
self.find_pte(vpn, false)
pub fn translate(&self, vpn: VirtPageNum) -> Option<PageTableEntry> {
self.find_pte(vpn)
.map(|pte| {pte.clone()})
}
pub fn token(&self) -> usize {