rust/src/memory.rs

307 lines
11 KiB
Rust
Raw Normal View History

2016-03-21 06:27:34 -05:00
use byteorder::{ByteOrder, NativeEndian, ReadBytesExt, WriteBytesExt};
use std::collections::{btree_map, BTreeMap, HashMap};
2016-03-13 15:36:25 -05:00
use std::collections::Bound::{Included, Excluded};
2016-03-15 00:03:31 -05:00
use std::mem;
2016-03-05 00:48:23 -06:00
use std::ptr;
2016-03-14 22:48:00 -05:00
use error::{EvalError, EvalResult};
use primval::PrimVal;
2016-03-05 00:48:23 -06:00
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct AllocId(u64);
2016-03-05 00:50:53 -06:00
#[derive(Debug)]
2016-03-05 00:48:23 -06:00
pub struct Allocation {
pub bytes: Box<[u8]>,
2016-03-13 15:36:25 -05:00
pub relocations: BTreeMap<usize, AllocId>,
2016-03-05 00:48:23 -06:00
// TODO(tsion): undef mask
}
2016-03-20 23:07:25 -05:00
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
2016-03-05 00:48:23 -06:00
pub struct Pointer {
pub alloc_id: AllocId,
pub offset: usize,
}
2016-03-23 22:40:58 -05:00
impl Pointer {
pub fn offset(self, i: isize) -> Self {
Pointer { offset: (self.offset as isize + i) as usize, ..self }
}
}
2016-03-20 23:07:25 -05:00
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
2016-03-05 00:48:23 -06:00
pub struct FieldRepr {
pub offset: usize,
2016-03-17 04:11:40 -05:00
pub size: usize,
2016-03-05 00:48:23 -06:00
}
2016-03-20 23:07:25 -05:00
#[derive(Clone, Debug, Eq, PartialEq)]
2016-03-05 00:48:23 -06:00
pub enum Repr {
2016-03-17 08:53:26 -05:00
/// Representation for a non-aggregate type such as a boolean, integer, character or pointer.
2016-03-17 04:36:06 -05:00
Primitive {
size: usize
},
2016-03-17 05:38:46 -05:00
/// The representation for aggregate types including structs, enums, and tuples.
Aggregate {
/// The size of the discriminant (an integer). Should be between 0 and 8. Always 0 for
/// structs and tuples.
2016-03-17 03:53:03 -05:00
discr_size: usize,
/// The size of the entire aggregate, including the discriminant.
size: usize,
2016-03-17 03:53:03 -05:00
2016-03-17 04:20:49 -05:00
/// The representations of the contents of each variant.
2016-03-17 05:38:46 -05:00
variants: Vec<Vec<FieldRepr>>,
},
2016-03-15 06:50:53 -05:00
Array {
2016-03-17 04:19:13 -05:00
elem_size: usize,
2016-03-15 06:50:53 -05:00
/// Number of elements.
length: usize,
},
2016-03-05 00:48:23 -06:00
}
2016-03-23 22:40:58 -05:00
impl Repr {
pub fn size(&self) -> usize {
match *self {
Repr::Primitive { size } => size,
Repr::Aggregate { size, .. } => size,
Repr::Array { elem_size, length } => elem_size * length,
}
}
}
pub struct Memory {
alloc_map: HashMap<u64, Allocation>,
next_id: u64,
pub pointer_size: usize,
}
2016-03-05 00:48:23 -06:00
impl Memory {
pub fn new() -> Self {
2016-03-17 08:53:26 -05:00
Memory {
alloc_map: HashMap::new(),
next_id: 0,
// TODO(tsion): Should this be host's or target's usize?
pointer_size: mem::size_of::<usize>(),
}
2016-03-05 00:48:23 -06:00
}
2016-03-07 07:19:43 -06:00
pub fn allocate(&mut self, size: usize) -> Pointer {
2016-03-05 00:48:23 -06:00
let id = AllocId(self.next_id);
let alloc = Allocation {
bytes: vec![0; size].into_boxed_slice(),
relocations: BTreeMap::new(),
};
2016-03-05 00:48:23 -06:00
self.alloc_map.insert(self.next_id, alloc);
self.next_id += 1;
Pointer {
2016-03-07 07:19:43 -06:00
alloc_id: id,
2016-03-05 00:48:23 -06:00
offset: 0,
}
}
2016-03-23 22:40:58 -05:00
////////////////////////////////////////////////////////////////////////////////
// Allocation accessors
////////////////////////////////////////////////////////////////////////////////
2016-03-05 00:48:23 -06:00
pub fn get(&self, id: AllocId) -> EvalResult<&Allocation> {
self.alloc_map.get(&id.0).ok_or(EvalError::DanglingPointerDeref)
}
pub fn get_mut(&mut self, id: AllocId) -> EvalResult<&mut Allocation> {
self.alloc_map.get_mut(&id.0).ok_or(EvalError::DanglingPointerDeref)
}
2016-03-23 22:40:58 -05:00
////////////////////////////////////////////////////////////////////////////////
// Byte accessors
////////////////////////////////////////////////////////////////////////////////
2016-03-23 20:44:05 -05:00
fn get_bytes_unchecked(&self, ptr: Pointer, size: usize) -> EvalResult<&[u8]> {
let alloc = try!(self.get(ptr.alloc_id));
if ptr.offset + size > alloc.bytes.len() {
return Err(EvalError::PointerOutOfBounds);
}
Ok(&alloc.bytes[ptr.offset..ptr.offset + size])
2016-03-05 00:48:23 -06:00
}
2016-03-23 20:44:05 -05:00
fn get_bytes_unchecked_mut(&mut self, ptr: Pointer, size: usize) -> EvalResult<&mut [u8]> {
let alloc = try!(self.get_mut(ptr.alloc_id));
if ptr.offset + size > alloc.bytes.len() {
return Err(EvalError::PointerOutOfBounds);
}
Ok(&mut alloc.bytes[ptr.offset..ptr.offset + size])
2016-03-05 00:48:23 -06:00
}
2016-03-23 20:44:05 -05:00
fn get_bytes(&self, ptr: Pointer, size: usize) -> EvalResult<&[u8]> {
if try!(self.relocations(ptr, size)).count() != 0 {
return Err(EvalError::ReadPointerAsBytes);
2016-03-21 06:27:34 -05:00
}
2016-03-23 20:44:05 -05:00
// TODO(tsion): Track and check for undef bytes.
self.get_bytes_unchecked(ptr, size)
}
fn get_bytes_mut(&mut self, ptr: Pointer, size: usize) -> EvalResult<&mut [u8]> {
try!(self.clear_relocations(ptr, size));
self.get_bytes_unchecked_mut(ptr, size)
2016-03-21 06:27:34 -05:00
}
2016-03-23 22:40:58 -05:00
////////////////////////////////////////////////////////////////////////////////
// Reading and writing
////////////////////////////////////////////////////////////////////////////////
2016-03-21 03:37:31 -05:00
pub fn copy(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> {
2016-03-21 06:27:34 -05:00
// TODO(tsion): Track and check for undef bytes.
try!(self.check_relocation_edges(src, size));
2016-03-23 20:44:05 -05:00
let src_bytes = try!(self.get_bytes_unchecked_mut(src, size)).as_mut_ptr();
2016-03-05 00:48:23 -06:00
let dest_bytes = try!(self.get_bytes_mut(dest, size)).as_mut_ptr();
// SAFE: The above indexing would have panicked if there weren't at least `size` bytes
// behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and
// `dest` could possibly overlap.
unsafe {
if src.alloc_id == dest.alloc_id {
ptr::copy(src_bytes, dest_bytes, size);
} else {
ptr::copy_nonoverlapping(src_bytes, dest_bytes, size);
}
}
2016-03-21 03:37:31 -05:00
self.copy_relocations(src, dest, size)
2016-03-05 00:48:23 -06:00
}
2016-03-19 00:19:39 -05:00
pub fn write_bytes(&mut self, ptr: Pointer, src: &[u8]) -> EvalResult<()> {
self.get_bytes_mut(ptr, src.len()).map(|dest| dest.clone_from_slice(src))
}
2016-03-13 15:36:25 -05:00
pub fn read_ptr(&self, ptr: Pointer) -> EvalResult<Pointer> {
2016-03-23 20:44:05 -05:00
let size = self.pointer_size;
let offset = try!(self.get_bytes_unchecked(ptr, size))
.read_uint::<NativeEndian>(size).unwrap() as usize;
2016-03-13 15:36:25 -05:00
let alloc = try!(self.get(ptr.alloc_id));
match alloc.relocations.get(&ptr.offset) {
Some(&alloc_id) => Ok(Pointer { alloc_id: alloc_id, offset: offset }),
None => Err(EvalError::ReadBytesAsPointer),
}
2016-03-13 15:36:25 -05:00
}
2016-03-21 06:27:34 -05:00
pub fn write_ptr(&mut self, dest: Pointer, ptr: Pointer) -> EvalResult<()> {
2016-03-13 15:36:25 -05:00
{
2016-03-17 08:53:26 -05:00
let size = self.pointer_size;
2016-03-21 06:27:34 -05:00
let mut bytes = try!(self.get_bytes_mut(dest, size));
bytes.write_uint::<NativeEndian>(ptr.offset as u64, size).unwrap();
2016-03-13 15:36:25 -05:00
}
2016-03-21 06:27:34 -05:00
try!(self.get_mut(dest.alloc_id)).relocations.insert(dest.offset, ptr.alloc_id);
2016-03-13 15:36:25 -05:00
Ok(())
}
2016-03-13 01:14:20 -06:00
pub fn write_primval(&mut self, ptr: Pointer, val: PrimVal) -> EvalResult<()> {
let pointer_size = self.pointer_size;
2016-03-13 01:14:20 -06:00
match val {
PrimVal::Bool(b) => self.write_bool(ptr, b),
2016-03-17 07:26:37 -05:00
PrimVal::I8(n) => self.write_int(ptr, n as i64, 1),
PrimVal::I16(n) => self.write_int(ptr, n as i64, 2),
PrimVal::I32(n) => self.write_int(ptr, n as i64, 4),
PrimVal::I64(n) => self.write_int(ptr, n as i64, 8),
PrimVal::U8(n) => self.write_uint(ptr, n as u64, 1),
PrimVal::U16(n) => self.write_uint(ptr, n as u64, 2),
PrimVal::U32(n) => self.write_uint(ptr, n as u64, 4),
PrimVal::U64(n) => self.write_uint(ptr, n as u64, 8),
PrimVal::IntegerPtr(n) => self.write_uint(ptr, n as u64, pointer_size),
PrimVal::AbstractPtr(_p) => unimplemented!(),
2016-03-13 01:14:20 -06:00
}
2016-03-07 04:44:03 -06:00
}
pub fn read_bool(&self, ptr: Pointer) -> EvalResult<bool> {
2016-03-07 04:44:03 -06:00
let bytes = try!(self.get_bytes(ptr, 1));
match bytes[0] {
0 => Ok(false),
1 => Ok(true),
_ => Err(EvalError::InvalidBool),
}
}
pub fn write_bool(&mut self, ptr: Pointer, b: bool) -> EvalResult<()> {
2016-03-17 07:39:29 -05:00
self.get_bytes_mut(ptr, 1).map(|bytes| bytes[0] = b as u8)
2016-03-05 00:48:23 -06:00
}
2016-03-13 01:14:20 -06:00
2016-03-17 04:12:15 -05:00
pub fn read_int(&self, ptr: Pointer, size: usize) -> EvalResult<i64> {
self.get_bytes(ptr, size).map(|mut b| b.read_int::<NativeEndian>(size).unwrap())
}
pub fn write_int(&mut self, ptr: Pointer, n: i64, size: usize) -> EvalResult<()> {
self.get_bytes_mut(ptr, size).map(|mut b| b.write_int::<NativeEndian>(n, size).unwrap())
2016-03-13 01:14:20 -06:00
}
2016-03-15 00:03:31 -05:00
2016-03-17 03:53:03 -05:00
pub fn read_uint(&self, ptr: Pointer, size: usize) -> EvalResult<u64> {
self.get_bytes(ptr, size).map(|mut b| b.read_uint::<NativeEndian>(size).unwrap())
}
pub fn write_uint(&mut self, ptr: Pointer, n: u64, size: usize) -> EvalResult<()> {
self.get_bytes_mut(ptr, size).map(|mut b| b.write_uint::<NativeEndian>(n, size).unwrap())
}
2016-03-21 00:24:27 -05:00
pub fn read_isize(&self, ptr: Pointer) -> EvalResult<i64> {
self.read_int(ptr, self.pointer_size)
}
pub fn write_isize(&mut self, ptr: Pointer, n: i64) -> EvalResult<()> {
let size = self.pointer_size;
self.write_int(ptr, n, size)
}
pub fn read_usize(&self, ptr: Pointer) -> EvalResult<u64> {
self.read_uint(ptr, self.pointer_size)
}
pub fn write_usize(&mut self, ptr: Pointer, n: u64) -> EvalResult<()> {
let size = self.pointer_size;
self.write_uint(ptr, n, size)
}
2016-03-05 00:48:23 -06:00
2016-03-23 22:40:58 -05:00
////////////////////////////////////////////////////////////////////////////////
// Relocations
////////////////////////////////////////////////////////////////////////////////
fn relocations(&self, ptr: Pointer, size: usize)
-> EvalResult<btree_map::Range<usize, AllocId>>
{
let start = ptr.offset.saturating_sub(self.pointer_size - 1);
let end = start + size;
Ok(try!(self.get(ptr.alloc_id)).relocations.range(Included(&start), Excluded(&end)))
2016-03-05 00:48:23 -06:00
}
2016-03-23 22:40:58 -05:00
fn clear_relocations(&mut self, ptr: Pointer, size: usize) -> EvalResult<()> {
let keys: Vec<_> = try!(self.relocations(ptr, size)).map(|(&k, _)| k).collect();
let alloc = try!(self.get_mut(ptr.alloc_id));
for k in keys {
alloc.relocations.remove(&k);
2016-03-05 00:48:23 -06:00
}
2016-03-23 22:40:58 -05:00
Ok(())
}
fn check_relocation_edges(&self, ptr: Pointer, size: usize) -> EvalResult<()> {
let overlapping_start = try!(self.relocations(ptr, 0)).count();
let overlapping_end = try!(self.relocations(ptr.offset(size as isize), 0)).count();
if overlapping_start + overlapping_end != 0 {
return Err(EvalError::ReadPointerAsBytes);
}
Ok(())
}
fn copy_relocations(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> {
let relocations: Vec<_> = try!(self.relocations(src, size))
.map(|(&offset, &alloc_id)| {
// Update relocation offsets for the new positions in the destination allocation.
(offset + dest.offset - src.offset, alloc_id)
})
.collect();
try!(self.get_mut(dest.alloc_id)).relocations.extend(relocations);
Ok(())
2016-03-05 00:48:23 -06:00
}
}