Skip to content

Commit

Permalink
td-payload: add new shared memory init function with private shadow
Browse files Browse the repository at this point in the history
To keep the original API behavior unchanged. The extended API init the
shared memory allocator with a private shadow start address. If the
private shadow is not available, the method `copy_to_private_shadow` will
return None.

As the `shadow_start` may be lower or higher than start of shared memory,
the way of allocating private shadow is changed to use the offset of the
allocated shared address to the start of shared allocator.

Signed-off-by: Jiaqi Gao <jiaqi.gao@intel.com>
  • Loading branch information
gaojiaqi7 authored and jyao1 committed Sep 4, 2024
1 parent 9dea7d2 commit 66f0aaf
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 21 deletions.
18 changes: 14 additions & 4 deletions td-payload/src/arch/x86_64/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@ use crate::{
arch::{gdt, idt},
hob::{self, get_hob},
mm::{
get_usable, heap::init_heap, init_ram, layout::RuntimeLayout,
page_table::init_pt_frame_allocator, shared::init_shared_memory,
get_usable,
heap::init_heap,
init_ram,
layout::RuntimeLayout,
page_table::init_pt_frame_allocator,
shared::{init_shared_memory, init_shared_memory_with_shadow},
},
};

Expand All @@ -22,7 +26,7 @@ use super::{
idt::{PAGE_FAULT_EXCEPTION, PAGE_FAULT_IST},
};

pub fn pre_init(hob: u64, layout: &RuntimeLayout) {
pub fn pre_init(hob: u64, layout: &RuntimeLayout, use_shared_shadow: bool) {
let hob = hob::init(hob).expect("Invalid payload HOB");
let memory_map = init_ram(hob).expect("Failed to parse E820 table from payload HOB");

Expand All @@ -35,7 +39,13 @@ pub fn pre_init(hob: u64, layout: &RuntimeLayout) {
init_heap(heap, layout.heap_size);

let shared = get_usable(layout.shared_memory_size).expect("Failed to allocate shared memory");
init_shared_memory(shared, layout.shared_memory_size);
if use_shared_shadow {
let shadow =
get_usable(layout.shared_memory_size).expect("Failed to allocate shared shadow");
init_shared_memory_with_shadow(shared, layout.shared_memory_size, shadow);
} else {
init_shared_memory(shared, layout.shared_memory_size);
}

// Init Global Descriptor Table and Task State Segment
gdt::init_gdt();
Expand Down
2 changes: 1 addition & 1 deletion td-payload/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub extern "C" fn _start(hob: u64, _payload: u64) -> ! {

let layout = RuntimeLayout::default();

arch::init::pre_init(hob, &layout);
arch::init::pre_init(hob, &layout, false);
arch::init::init(&layout, main);
}

Expand Down
35 changes: 20 additions & 15 deletions td-payload/src/mm/shared.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,35 +10,38 @@ use super::SIZE_4K;
use crate::arch::shared::decrypt;

static SHARED_MEMORY_ALLOCATOR: LockedHeap = LockedHeap::empty();
static SHADOW_OFFSET: Once<usize> = Once::new();
static SHARED_START: Once<usize> = Once::new();
static SHADOW_START: Once<usize> = Once::new();

pub fn init_shared_memory(start: u64, size: usize) {
if size % SIZE_4K != 0 {
panic!("Failed to initialize shared memory: size needs to be aligned with 0x1000");
}
let shared_size = size / 2;

// Set the shared memory region to be shared
decrypt(start, shared_size);
decrypt(start, size);
// Initialize the shared memory allocator
unsafe {
SHARED_MEMORY_ALLOCATOR
.lock()
.init(start as *mut u8, shared_size);
SHARED_MEMORY_ALLOCATOR.lock().init(start as *mut u8, size);
}
SHADOW_OFFSET.call_once(|| shared_size);
}

pub fn init_shared_memory_with_shadow(start: u64, size: usize, shadow_start: u64) {
init_shared_memory(start, size);
SHARED_START.call_once(|| start as usize);
SHADOW_START.call_once(|| shadow_start as usize);
}

pub struct SharedMemory {
addr: usize,
shadow_addr: usize,
shadow_addr: Option<usize>,
size: usize,
}

impl SharedMemory {
pub fn new(num_page: usize) -> Option<Self> {
let addr = unsafe { alloc_shared_pages(num_page)? };
let shadow_addr = alloc_private_shadow_pages(addr)?;
let shadow_addr = alloc_private_shadow_pages(addr);

Some(Self {
addr,
Expand All @@ -47,12 +50,13 @@ impl SharedMemory {
})
}

pub fn copy_to_private_shadow(&mut self) -> &[u8] {
let shadow =
unsafe { core::slice::from_raw_parts_mut(self.shadow_addr as *mut u8, self.size) };
shadow.copy_from_slice(self.as_bytes());
pub fn copy_to_private_shadow(&mut self) -> Option<&[u8]> {
self.shadow_addr.map(|addr| {
let shadow = unsafe { core::slice::from_raw_parts_mut(addr as *mut u8, self.size) };
shadow.copy_from_slice(self.as_bytes());

shadow
&shadow[..]
})
}

pub fn as_bytes(&self) -> &[u8] {
Expand Down Expand Up @@ -110,5 +114,6 @@ pub unsafe fn free_shared_page(addr: usize) {
}

fn alloc_private_shadow_pages(shared_addr: usize) -> Option<usize> {
Some(shared_addr + SHADOW_OFFSET.get()?)
let offset = shared_addr.checked_sub(*SHARED_START.get()?)?;
Some(SHADOW_START.get()? + offset)
}
2 changes: 1 addition & 1 deletion tests/test-td-payload/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub extern "C" fn _start(hob: u64, _payload: u64) -> ! {
shadow_stack_size: layout::DEFAULT_SHADOW_STACK_SIZE,
};

arch::init::pre_init(hob, &layout);
arch::init::pre_init(hob, &layout, false);
arch::init::init(&layout, main);
}

Expand Down

0 comments on commit 66f0aaf

Please sign in to comment.