Implement a naive, slow version of undef mask copying.

This commit is contained in:
Scott Olson 2016-04-06 04:08:52 -06:00
parent 8a0aa9291a
commit c08ddaaa48
2 changed files with 23 additions and 2 deletions

View File

@ -225,8 +225,10 @@ impl Memory {
}
}
// TODO(tsion): Copy undef ranges from src to dest.
self.copy_relocations(src, dest, size)
try!(self.copy_undef_mask(src, dest, size));
try!(self.copy_relocations(src, dest, size));
Ok(())
}
pub fn write_bytes(&mut self, ptr: Pointer, src: &[u8]) -> EvalResult<()> {
@ -379,6 +381,20 @@ impl Memory {
// Undefined bytes
////////////////////////////////////////////////////////////////////////////////
// FIXME(tsino): This is a very naive, slow version.
fn copy_undef_mask(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> {
// The bits have to be saved locally before writing to dest in case src and dest overlap.
let mut v = Vec::with_capacity(size);
for i in 0..size {
let defined = try!(self.get(src.alloc_id)).undef_mask.get(src.offset + i);
v.push(defined);
}
for (i, defined) in v.into_iter().enumerate() {
try!(self.get_mut(dest.alloc_id)).undef_mask.set(dest.offset + i, defined);
}
Ok(())
}
fn check_defined(&self, ptr: Pointer, size: usize) -> EvalResult<()> {
let alloc = try!(self.get(ptr.alloc_id));
if !alloc.undef_mask.is_range_defined(ptr.offset, ptr.offset + size) {

View File

@ -47,3 +47,8 @@ fn match_opt_some() -> i8 {
None => 20,
}
}
#[miri_run]
fn two_nones() -> (Option<i16>, Option<i16>) {
(None, None)
}