Skip to content

Commit

Permalink
Marking cast_ptr_to_slice and cast_ptr_to_slice_mut unsafe
Browse files Browse the repository at this point in the history
  • Loading branch information
mohanson committed Nov 27, 2024
1 parent f1ef1cc commit 7648b49
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 40 deletions.
31 changes: 19 additions & 12 deletions definitions/src/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,19 +166,26 @@ impl AsmCoreMachine {
}

impl AsmCoreMachine {
pub fn cast_ptr_to_slice(&self, ptr: u64, offset: usize, size: usize) -> &[u8] {
unsafe {
let ptr = ptr as *mut u8;
let ptr = ptr.add(offset);
std::slice::from_raw_parts(ptr, size)
}
/// Casts a raw pointer with an offset and size to a byte slice.
///
/// # Safety
///
/// - The `ptr` must be a valid memory address for reading `size` bytes.
/// - The `ptr + offset` must not overflow.
/// - The memory region specified by `ptr + offset` and `size` must remain
/// valid for the lifetime of the returned slice.
///
/// Failing to satisfy these conditions results in undefined behavior.
pub unsafe fn cast_ptr_to_slice(&self, ptr: u64, offset: usize, size: usize) -> &[u8] {
let ptr = ptr as *const u8;
let ptr = ptr.add(offset);
std::slice::from_raw_parts(ptr, size)
}

pub fn cast_ptr_to_slice_mut(&self, ptr: u64, offset: usize, size: usize) -> &mut [u8] {
unsafe {
let ptr = ptr as *mut u8;
let ptr = ptr.add(offset);
std::slice::from_raw_parts_mut(ptr, size)
}
/// Provides similar functionality to `cast_ptr_to_slice` but returns mut slice.
pub unsafe fn cast_ptr_to_slice_mut(&self, ptr: u64, offset: usize, size: usize) -> &mut [u8] {
let ptr = ptr as *mut u8;
let ptr = ptr.add(offset);
std::slice::from_raw_parts_mut(ptr, size)
}
}
64 changes: 36 additions & 28 deletions src/machine/asm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,9 @@ impl CoreMachine for Box<AsmCoreMachine> {
#[no_mangle]
pub extern "C" fn inited_memory(frame_index: u64, machine: &mut AsmCoreMachine) {
let addr_from = (frame_index << MEMORY_FRAME_SHIFTS) as usize;
let slice =
machine.cast_ptr_to_slice_mut(machine.memory_ptr, addr_from, 1 << MEMORY_FRAME_SHIFTS);
let slice = unsafe {
machine.cast_ptr_to_slice_mut(machine.memory_ptr, addr_from, 1 << MEMORY_FRAME_SHIFTS)
};
if machine.chaos_mode != 0 {
let mut gen = rand::rngs::StdRng::seed_from_u64(machine.chaos_seed.into());
gen.fill_bytes(slice);
Expand Down Expand Up @@ -236,9 +237,10 @@ impl<'a> FastMemory<'a> {
let page_indices = get_page_indices(addr, size);
for page in page_indices.0..=page_indices.1 {
let frame_index = page >> MEMORY_FRAME_PAGE_SHIFTS;
let slice = self
.0
.cast_ptr_to_slice_mut(self.0.frames_ptr, frame_index as usize, 1);
let slice = unsafe {
self.0
.cast_ptr_to_slice_mut(self.0.frames_ptr, frame_index as usize, 1)
};
slice[0] = 1;
self.0.set_flag(page, FLAG_DIRTY)?;
}
Expand All @@ -254,9 +256,10 @@ impl<'a> Memory for FastMemory<'a> {
return Ok(());
}
self.prepare_memory(addr, value.len() as u64)?;
let slice = self
.0
.cast_ptr_to_slice_mut(self.0.memory_ptr, addr as usize, value.len());
let slice = unsafe {
self.0
.cast_ptr_to_slice_mut(self.0.memory_ptr, addr as usize, value.len())
};
slice.copy_from_slice(value);
Ok(())
}
Expand All @@ -266,9 +269,10 @@ impl<'a> Memory for FastMemory<'a> {
return Ok(());
}
self.prepare_memory(addr, size)?;
let slice = self
.0
.cast_ptr_to_slice_mut(self.0.memory_ptr, addr as usize, size as usize);
let slice = unsafe {
self.0
.cast_ptr_to_slice_mut(self.0.memory_ptr, addr as usize, size as usize)
};
memset(slice, value);
Ok(())
}
Expand Down Expand Up @@ -361,9 +365,11 @@ impl Memory for Box<AsmCoreMachine> {
type REG = u64;

fn reset_memory(&mut self) -> Result<(), Error> {
let slice = self.cast_ptr_to_slice_mut(self.flags_ptr, 0, self.flags_size as usize);
let slice =
unsafe { self.cast_ptr_to_slice_mut(self.flags_ptr, 0, self.flags_size as usize) };
memset(slice, 0);
let slice = self.cast_ptr_to_slice_mut(self.frames_ptr, 0, self.frames_size as usize);
let slice =
unsafe { self.cast_ptr_to_slice_mut(self.frames_ptr, 0, self.frames_size as usize) };
memset(slice, 0);
self.load_reservation_address = u64::MAX;
self.last_read_frame = u64::MAX;
Expand Down Expand Up @@ -428,7 +434,7 @@ impl Memory for Box<AsmCoreMachine> {

fn fetch_flag(&mut self, page: u64) -> Result<u8, Error> {
if page < self.memory_pages() as u64 {
let slice = self.cast_ptr_to_slice(self.flags_ptr, page as usize, 1);
let slice = unsafe { self.cast_ptr_to_slice(self.flags_ptr, page as usize, 1) };
Ok(slice[0])
} else {
Err(Error::MemOutOfBound(
Expand All @@ -440,7 +446,7 @@ impl Memory for Box<AsmCoreMachine> {

fn set_flag(&mut self, page: u64, flag: u8) -> Result<(), Error> {
if page < self.memory_pages() as u64 {
let slice = self.cast_ptr_to_slice_mut(self.flags_ptr, page as usize, 1);
let slice = unsafe { self.cast_ptr_to_slice_mut(self.flags_ptr, page as usize, 1) };
slice[0] |= flag;
// Clear last write page cache
self.last_write_page = u64::MAX;
Expand All @@ -455,7 +461,7 @@ impl Memory for Box<AsmCoreMachine> {

fn clear_flag(&mut self, page: u64, flag: u8) -> Result<(), Error> {
if page < self.memory_pages() as u64 {
let slice = self.cast_ptr_to_slice_mut(self.flags_ptr, page as usize, 1);
let slice = unsafe { self.cast_ptr_to_slice_mut(self.flags_ptr, page as usize, 1) };
slice[0] &= !flag;
// Clear last write page cache
self.last_write_page = u64::MAX;
Expand Down Expand Up @@ -483,7 +489,8 @@ impl Memory for Box<AsmCoreMachine> {
check_memory(self, page);
self.set_flag(page, FLAG_DIRTY)?;
}
let slice = self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, value.len());
let slice =
unsafe { self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, value.len()) };
slice.copy_from_slice(value);
Ok(())
}
Expand All @@ -499,7 +506,8 @@ impl Memory for Box<AsmCoreMachine> {
check_memory(self, page);
self.set_flag(page, FLAG_DIRTY)?;
}
let slice = self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, size as usize);
let slice =
unsafe { self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, size as usize) };
memset(slice, value);
Ok(())
}
Expand All @@ -523,72 +531,72 @@ impl Memory for Box<AsmCoreMachine> {

fn execute_load16(&mut self, addr: u64) -> Result<u16, Error> {
check_memory_executable(self, addr, 2)?;
let slice = self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 2);
let slice = unsafe { self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 2) };
Ok(LittleEndian::read_u16(slice))
}

fn execute_load32(&mut self, addr: u64) -> Result<u32, Error> {
check_memory_executable(self, addr, 4)?;
let slice = self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 4);
let slice = unsafe { self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 4) };
Ok(LittleEndian::read_u32(slice))
}

fn load8(&mut self, addr: &u64) -> Result<u64, Error> {
let addr = *addr;
check_memory_inited(self, addr, 1)?;
let slice = self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 1);
let slice = unsafe { self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 1) };
Ok(u64::from(slice[0]))
}

fn load16(&mut self, addr: &u64) -> Result<u64, Error> {
let addr = *addr;
check_memory_inited(self, addr, 2)?;
let slice = self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 2);
let slice = unsafe { self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 2) };
Ok(u64::from(LittleEndian::read_u16(slice)))
}

fn load32(&mut self, addr: &u64) -> Result<u64, Error> {
let addr = *addr;
check_memory_inited(self, addr, 4)?;
let slice = self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 4);
let slice = unsafe { self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 4) };
Ok(u64::from(LittleEndian::read_u32(slice)))
}

fn load64(&mut self, addr: &u64) -> Result<u64, Error> {
let addr = *addr;
check_memory_inited(self, addr, 8)?;
let slice = self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 8);
let slice = unsafe { self.cast_ptr_to_slice(self.memory_ptr, addr as usize, 8) };
Ok(LittleEndian::read_u64(slice))
}

fn store8(&mut self, addr: &u64, value: &u64) -> Result<(), Error> {
let addr = *addr;
check_memory_writable(self, addr, 1)?;
let slice = self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 1);
let slice = unsafe { self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 1) };
slice[0] = *value as u8;
Ok(())
}

fn store16(&mut self, addr: &u64, value: &u64) -> Result<(), Error> {
let addr = *addr;
check_memory_writable(self, addr, 2)?;
let slice = self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 2);
let slice = unsafe { self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 2) };
LittleEndian::write_u16(slice, *value as u16);
Ok(())
}

fn store32(&mut self, addr: &u64, value: &u64) -> Result<(), Error> {
let addr = *addr;
check_memory_writable(self, addr, 4)?;
let slice = self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 4);
let slice = unsafe { self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 4) };
LittleEndian::write_u32(slice, *value as u32);
Ok(())
}

fn store64(&mut self, addr: &u64, value: &u64) -> Result<(), Error> {
let addr = *addr;
check_memory_writable(self, addr, 8)?;
let slice = self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 8);
let slice = unsafe { self.cast_ptr_to_slice_mut(self.memory_ptr, addr as usize, 8) };
LittleEndian::write_u64(slice, *value as u64);
Ok(())
}
Expand Down

0 comments on commit 7648b49

Please sign in to comment.