From 8569f7e4182e77a9957e5b0787b3aad2e7fd8a30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Mei=C3=9Fner?= Date: Tue, 20 Jun 2023 16:39:48 +0200 Subject: [PATCH] Fix - Gapped memory `vm_addr_end` (#472) * Adds test_gapped_map. * Fixes vm_addr_end to be double the length if region is gapped. * Adds missing break statement in slow path while loop. --- src/memory_region.rs | 49 +++++++++++++++++++++++++++++++++++++------- 1 file changed, 42 insertions(+), 7 deletions(-) diff --git a/src/memory_region.rs b/src/memory_region.rs index e152ccc1..2d3c116a 100644 --- a/src/memory_region.rs +++ b/src/memory_region.rs @@ -67,17 +67,19 @@ pub struct MemoryRegion { impl MemoryRegion { fn new(slice: &[u8], vm_addr: u64, vm_gap_size: u64, state: MemoryState) -> Self { + let mut vm_addr_end = vm_addr.saturating_add(slice.len() as u64); let mut vm_gap_shift = (std::mem::size_of::() as u8) .saturating_mul(8) .saturating_sub(1); if vm_gap_size > 0 { + vm_addr_end = vm_addr_end.saturating_add(slice.len() as u64); vm_gap_shift = vm_gap_shift.saturating_sub(vm_gap_size.leading_zeros() as u8); debug_assert_eq!(Some(vm_gap_size), 1_u64.checked_shl(vm_gap_shift as u32)); }; MemoryRegion { host_addr: Cell::new(slice.as_ptr() as u64), vm_addr, - vm_addr_end: vm_addr.saturating_add(slice.len() as u64), + vm_addr_end, len: slice.len() as u64, vm_gap_shift, state: Cell::new(state), @@ -151,7 +153,7 @@ impl fmt::Debug for MemoryRegion { self.host_addr, self.host_addr.get().saturating_add(self.len), self.vm_addr, - self.vm_addr.saturating_add(self.len), + self.vm_addr_end, self.len ) } @@ -242,7 +244,7 @@ impl<'a> UnalignedMemoryMapping<'a> { for index in 1..regions.len() { let first = ®ions[index.saturating_sub(1)]; let second = ®ions[index]; - if first.vm_addr.saturating_add(first.len) > second.vm_addr { + if first.vm_addr_end > second.vm_addr { return Err(EbpfError::InvalidMemoryRegion(index)); } } @@ -302,10 +304,7 @@ impl<'a> UnalignedMemoryMapping<'a> { // we check for index==0 above, and by construction if we get here index // must be contained in region let region = unsafe { self.regions.get_unchecked(index - 1) }; - cache.insert( - region.vm_addr..region.vm_addr.saturating_add(region.len), - index, - ); + cache.insert(region.vm_addr..region.vm_addr_end, index); Some(region) } } @@ -390,6 +389,8 @@ impl<'a> UnalignedMemoryMapping<'a> { Some(region) => region, None => break, }; + } else { + break; } } @@ -461,6 +462,8 @@ impl<'a> UnalignedMemoryMapping<'a> { Some(region) => region, None => break, }; + } else { + break; } } @@ -987,6 +990,38 @@ mod test { ); } + #[test] + fn test_gapped_map() { + for aligned_memory_mapping in [false, true] { + let config = Config { + aligned_memory_mapping, + ..Config::default() + }; + let mut mem1 = vec![0xff; 8]; + let m = MemoryMapping::new( + vec![ + MemoryRegion::new_readonly(&[0; 8], ebpf::MM_PROGRAM_START), + MemoryRegion::new_writable_gapped(&mut mem1, ebpf::MM_STACK_START, 2), + ], + &config, + ) + .unwrap(); + for frame in 0..4 { + let address = ebpf::MM_STACK_START + frame * 4; + assert!(m.region(AccessType::Load, address).is_ok()); + assert!(m.map(AccessType::Load, address, 2, 0).is_ok()); + assert_error!( + m.map(AccessType::Load, address + 2, 2, 0), + "AccessViolation" + ); + assert_eq!(m.load::(address, 0).unwrap(), 0xFFFF); + assert_error!(m.load::(address + 2, 0), "AccessViolation"); + assert!(m.store::(0xFFFF, address, 0).is_ok()); + assert_error!(m.store::(0xFFFF, address + 2, 0), "AccessViolation"); + } + } + } + #[test] fn test_unaligned_map_overlap() { let config = Config::default();