Auto merge of #55674 - oli-obk:miri_engine_refactoring, r=RalfJung

Miri engine refactoring

r? @RalfJung

split out the "just moves stuff around" part of https://github.com/rust-lang/rust/pull/55293
This commit is contained in:
bors 2018-11-11 14:24:39 +00:00
commit a88613c869
10 changed files with 532 additions and 494 deletions

View File

@ -0,0 +1,233 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The virtual memory representation of the MIR interpreter
use super::{Pointer, EvalResult, AllocId};
use ty::layout::{Size, Align};
use syntax::ast::Mutability;
use std::iter;
use mir;
use std::ops::{Deref, DerefMut};
use rustc_data_structures::sorted_map::SortedMap;
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct Allocation<Tag=(),Extra=()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer
pub bytes: Vec<u8>,
/// Maps from byte addresses to extra data for each pointer.
/// Only the first byte of a pointer is inserted into the map; i.e.,
/// every entry in this map applies to `pointer_size` consecutive bytes starting
/// at the given offset.
pub relocations: Relocations<Tag>,
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
pub align: Align,
/// Whether the allocation is mutable.
/// Also used by codegen to determine if a static should be put into mutable memory,
/// which happens for `static mut` and `static` with interior mutability.
pub mutability: Mutability,
/// Extra state for the machine.
pub extra: Extra,
}
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Default + Clone {
/// Hook for performing extra checks on a memory read access.
///
/// Takes read-only access to the allocation so we can keep all the memory read
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
#[inline]
fn memory_read(
_alloc: &Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory write access.
#[inline]
fn memory_written(
_alloc: &mut Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
}
impl AllocationExtra<()> for () {}
impl<Tag, Extra: Default> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
let mut undef_mask = UndefMask::new(Size::ZERO);
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: Relocations::new(),
undef_mask,
align,
mutability: Mutability::Immutable,
extra: Extra::default(),
}
}
pub fn from_byte_aligned_bytes(slice: &[u8]) -> Self {
Allocation::from_bytes(slice, Align::from_bytes(1, 1).unwrap())
}
pub fn undef(size: Size, align: Align) -> Self {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
mutability: Mutability::Mutable,
extra: Extra::default(),
}
}
}
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
impl<Tag, Id> Relocations<Tag, Id> {
pub fn new() -> Self {
Relocations(SortedMap::new())
}
// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
Relocations(SortedMap::from_presorted_elements(r))
}
}
impl<Tag> Deref for Relocations<Tag> {
type Target = SortedMap<Size, (Tag, AllocId)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<Tag> DerefMut for Relocations<Tag> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
////////////////////////////////////////////////////////////////////////////////
type Block = u64;
const BLOCK_SIZE: u64 = 64;
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct UndefMask {
blocks: Vec<Block>,
len: Size,
}
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
impl UndefMask {
pub fn new(size: Size) -> Self {
let mut m = UndefMask {
blocks: vec![],
len: Size::ZERO,
};
m.grow(size, false);
m
}
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
///
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
/// at which the first undefined access begins.
#[inline]
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
if end > self.len {
return Err(self.len);
}
let idx = (start.bytes()..end.bytes())
.map(|i| Size::from_bytes(i))
.find(|&i| !self.get(i));
match idx {
Some(idx) => Err(idx),
None => Ok(())
}
}
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
let len = self.len;
if end > len {
self.grow(end - len, new_state);
}
self.set_range_inbounds(start, end, new_state);
}
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
for i in start.bytes()..end.bytes() {
self.set(Size::from_bytes(i), new_state);
}
}
#[inline]
pub fn get(&self, i: Size) -> bool {
let (block, bit) = bit_index(i);
(self.blocks[block] & 1 << bit) != 0
}
#[inline]
pub fn set(&mut self, i: Size, new_state: bool) {
let (block, bit) = bit_index(i);
if new_state {
self.blocks[block] |= 1 << bit;
} else {
self.blocks[block] &= !(1 << bit);
}
}
pub fn grow(&mut self, amount: Size, new_state: bool) {
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
if amount.bytes() > unused_trailing_bits {
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
assert_eq!(additional_blocks as usize as u64, additional_blocks);
self.blocks.extend(
iter::repeat(0).take(additional_blocks as usize),
);
}
let start = self.len;
self.len += amount;
self.set_range_inbounds(start, start + amount, new_state);
}
}
#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();
let a = bits / BLOCK_SIZE;
let b = bits % BLOCK_SIZE;
assert_eq!(a as usize as u64, a);
assert_eq!(b as usize as u64, b);
(a as usize, b as usize)
}

View File

@ -17,27 +17,32 @@ macro_rules! err {
mod error;
mod value;
mod allocation;
mod pointer;
pub use self::error::{
EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error,
FrameInfo, ConstEvalResult, ErrorHandled,
};
pub use self::value::{Scalar, ConstValue};
pub use self::value::{Scalar, ConstValue, ScalarMaybeUndef};
pub use self::allocation::{
Allocation, AllocationExtra,
Relocations, UndefMask,
};
pub use self::pointer::{Pointer, PointerArithmetic};
use std::fmt;
use mir;
use hir::def_id::DefId;
use ty::{self, TyCtxt, Instance};
use ty::layout::{self, Align, HasDataLayout, Size};
use ty::layout::{self, Size};
use middle::region;
use std::iter;
use std::io;
use std::ops::{Deref, DerefMut};
use std::hash::Hash;
use syntax::ast::Mutability;
use rustc_serialize::{Encoder, Decodable, Encodable};
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
use rustc_data_structures::tiny_list::TinyList;
@ -78,152 +83,6 @@ pub struct GlobalId<'tcx> {
pub promoted: Option<mir::Promoted>,
}
////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
////////////////////////////////////////////////////////////////////////////////
pub trait PointerArithmetic: layout::HasDataLayout {
// These are not supposed to be overridden.
#[inline(always)]
fn pointer_size(&self) -> Size {
self.data_layout().pointer_size
}
//// Trunace the given value to the pointer size; also return whether there was an overflow
#[inline]
fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
}
#[inline]
fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}
#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(u128::from(res));
(res, over1 || over2)
}
#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}
// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
}
}
impl<T: layout::HasDataLayout> PointerArithmetic for T {}
/// Pointer is generic over the type that represents a reference to Allocations,
/// thus making it possible for the most convenient representation to be used in
/// each context.
///
/// Defaults to the index based and loosely coupled AllocId.
///
/// Pointer is also generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub struct Pointer<Tag=(),Id=AllocId> {
pub alloc_id: Id,
pub offset: Size,
pub tag: Tag,
}
/// Produces a `Pointer` which points to the beginning of the Allocation
impl From<AllocId> for Pointer {
#[inline(always)]
fn from(alloc_id: AllocId) -> Self {
Pointer::new(alloc_id, Size::ZERO)
}
}
impl<'tcx> Pointer<()> {
#[inline(always)]
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
Pointer { alloc_id, offset, tag: () }
}
#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Pointer<Tag>
where Tag: Default
{
Pointer::new_with_tag(self.alloc_id, self.offset, Default::default())
}
}
impl<'tcx, Tag> Pointer<Tag> {
#[inline(always)]
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
Pointer { alloc_id, offset, tag }
}
#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag
))
}
#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}
#[inline(always)]
pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
self.overflowing_offset(i, cx).0
}
#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
self.tag,
))
}
#[inline]
pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}
#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i128::from(i), cx).0
}
#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
}
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
pub struct AllocId(pub u64);
@ -528,91 +387,6 @@ pub fn set_id_same_memory(&mut self, id: AllocId, mem: M) {
}
}
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct Allocation<Tag=(),Extra=()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer
pub bytes: Vec<u8>,
/// Maps from byte addresses to extra data for each pointer.
/// Only the first byte of a pointer is inserted into the map; i.e.,
/// every entry in this map applies to `pointer_size` consecutive bytes starting
/// at the given offset.
pub relocations: Relocations<Tag>,
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
pub align: Align,
/// Whether the allocation is mutable.
/// Also used by codegen to determine if a static should be put into mutable memory,
/// which happens for `static mut` and `static` with interior mutability.
pub mutability: Mutability,
/// Extra state for the machine.
pub extra: Extra,
}
impl<Tag, Extra: Default> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
let mut undef_mask = UndefMask::new(Size::ZERO);
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: Relocations::new(),
undef_mask,
align,
mutability: Mutability::Immutable,
extra: Extra::default(),
}
}
pub fn from_byte_aligned_bytes(slice: &[u8]) -> Self {
Allocation::from_bytes(slice, Align::from_bytes(1, 1).unwrap())
}
pub fn undef(size: Size, align: Align) -> Self {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
mutability: Mutability::Mutable,
extra: Extra::default(),
}
}
}
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
impl<Tag, Id> Relocations<Tag, Id> {
pub fn new() -> Self {
Relocations(SortedMap::new())
}
// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
Relocations(SortedMap::from_presorted_elements(r))
}
}
impl<Tag> Deref for Relocations<Tag> {
type Target = SortedMap<Size, (Tag, AllocId)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<Tag> DerefMut for Relocations<Tag> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
////////////////////////////////////////////////////////////////////////////////
// Methods to access integers in the target endianness
////////////////////////////////////////////////////////////////////////////////
@ -655,103 +429,3 @@ pub fn truncate(value: u128, size: Size) -> u128 {
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
(value << shift) >> shift
}
////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
////////////////////////////////////////////////////////////////////////////////
type Block = u64;
const BLOCK_SIZE: u64 = 64;
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct UndefMask {
blocks: Vec<Block>,
len: Size,
}
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
impl UndefMask {
pub fn new(size: Size) -> Self {
let mut m = UndefMask {
blocks: vec![],
len: Size::ZERO,
};
m.grow(size, false);
m
}
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
///
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
/// at which the first undefined access begins.
#[inline]
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
if end > self.len {
return Err(self.len);
}
let idx = (start.bytes()..end.bytes())
.map(|i| Size::from_bytes(i))
.find(|&i| !self.get(i));
match idx {
Some(idx) => Err(idx),
None => Ok(())
}
}
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
let len = self.len;
if end > len {
self.grow(end - len, new_state);
}
self.set_range_inbounds(start, end, new_state);
}
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
for i in start.bytes()..end.bytes() {
self.set(Size::from_bytes(i), new_state);
}
}
#[inline]
pub fn get(&self, i: Size) -> bool {
let (block, bit) = bit_index(i);
(self.blocks[block] & 1 << bit) != 0
}
#[inline]
pub fn set(&mut self, i: Size, new_state: bool) {
let (block, bit) = bit_index(i);
if new_state {
self.blocks[block] |= 1 << bit;
} else {
self.blocks[block] &= !(1 << bit);
}
}
pub fn grow(&mut self, amount: Size, new_state: bool) {
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
if amount.bytes() > unused_trailing_bits {
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
assert_eq!(additional_blocks as usize as u64, additional_blocks);
self.blocks.extend(
iter::repeat(0).take(additional_blocks as usize),
);
}
let start = self.len;
self.len += amount;
self.set_range_inbounds(start, start + amount, new_state);
}
}
#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();
let a = bits / BLOCK_SIZE;
let b = bits % BLOCK_SIZE;
assert_eq!(a as usize as u64, a);
assert_eq!(b as usize as u64, b);
(a as usize, b as usize)
}

View File

@ -0,0 +1,151 @@
use mir;
use ty::layout::{self, HasDataLayout, Size};
use super::{
AllocId, EvalResult,
};
////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
////////////////////////////////////////////////////////////////////////////////
pub trait PointerArithmetic: layout::HasDataLayout {
// These are not supposed to be overridden.
#[inline(always)]
fn pointer_size(&self) -> Size {
self.data_layout().pointer_size
}
//// Trunace the given value to the pointer size; also return whether there was an overflow
#[inline]
fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
}
#[inline]
fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}
#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(u128::from(res));
(res, over1 || over2)
}
#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}
// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
}
}
impl<T: layout::HasDataLayout> PointerArithmetic for T {}
/// Pointer is generic over the type that represents a reference to Allocations,
/// thus making it possible for the most convenient representation to be used in
/// each context.
///
/// Defaults to the index based and loosely coupled AllocId.
///
/// Pointer is also generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub struct Pointer<Tag=(),Id=AllocId> {
pub alloc_id: Id,
pub offset: Size,
pub tag: Tag,
}
/// Produces a `Pointer` which points to the beginning of the Allocation
impl From<AllocId> for Pointer {
#[inline(always)]
fn from(alloc_id: AllocId) -> Self {
Pointer::new(alloc_id, Size::ZERO)
}
}
impl<'tcx> Pointer<()> {
#[inline(always)]
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
Pointer { alloc_id, offset, tag: () }
}
#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Pointer<Tag>
where Tag: Default
{
Pointer::new_with_tag(self.alloc_id, self.offset, Default::default())
}
}
impl<'tcx, Tag> Pointer<Tag> {
#[inline(always)]
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
Pointer { alloc_id, offset, tag }
}
#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag
))
}
#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}
#[inline(always)]
pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
self.overflowing_offset(i, cx).0
}
#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
self.tag,
))
}
#[inline]
pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}
#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i128::from(i), cx).0
}
#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
}

View File

@ -392,3 +392,131 @@ fn from(ptr: Pointer<Tag>) -> Self {
Scalar::Ptr(ptr)
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
Scalar(Scalar<Tag, Id>),
Undef,
}
impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
#[inline(always)]
fn from(s: Scalar<Tag>) -> Self {
ScalarMaybeUndef::Scalar(s)
}
}
impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
}
}
}
impl<'tcx> ScalarMaybeUndef<()> {
#[inline]
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
where Tag: Default
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
}
impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
#[inline]
pub fn erase_tag(self) -> ScalarMaybeUndef
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
#[inline]
pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
}
}
#[inline(always)]
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
self.not_undef()?.to_ptr()
}
#[inline(always)]
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
self.not_undef()?.to_bits(target_size)
}
#[inline(always)]
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
self.not_undef()?.to_bool()
}
#[inline(always)]
pub fn to_char(self) -> EvalResult<'tcx, char> {
self.not_undef()?.to_char()
}
#[inline(always)]
pub fn to_f32(self) -> EvalResult<'tcx, f32> {
self.not_undef()?.to_f32()
}
#[inline(always)]
pub fn to_f64(self) -> EvalResult<'tcx, f64> {
self.not_undef()?.to_f64()
}
#[inline(always)]
pub fn to_u8(self) -> EvalResult<'tcx, u8> {
self.not_undef()?.to_u8()
}
#[inline(always)]
pub fn to_u32(self) -> EvalResult<'tcx, u32> {
self.not_undef()?.to_u32()
}
#[inline(always)]
pub fn to_u64(self) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_u64()
}
#[inline(always)]
pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_usize(cx)
}
#[inline(always)]
pub fn to_i8(self) -> EvalResult<'tcx, i8> {
self.not_undef()?.to_i8()
}
#[inline(always)]
pub fn to_i32(self) -> EvalResult<'tcx, i32> {
self.not_undef()?.to_i32()
}
#[inline(always)]
pub fn to_i64(self) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_i64()
}
#[inline(always)]
pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_isize(cx)
}
}
impl_stable_hash_for!(enum ::mir::interpret::ScalarMaybeUndef {
Scalar(v),
Undef
});

View File

@ -20,7 +20,7 @@
use rustc::ty::{self, layout::{Size, TyLayout}, query::TyCtxtAt};
use super::{
Allocation, AllocId, EvalResult, Scalar,
Allocation, AllocId, EvalResult, Scalar, AllocationExtra,
EvalContext, PlaceTy, MPlaceTy, OpTy, Pointer, MemoryKind,
};
@ -78,7 +78,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;
/// Extra data stored in every allocation.
type AllocExtra: ::std::fmt::Debug + Default + Clone;
type AllocExtra: AllocationExtra<Self::PointerTag>;
/// Memory's allocation map
type MemoryMap:
@ -174,26 +174,6 @@ fn box_alloc(
dest: PlaceTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx>;
/// Hook for performing extra checks on a memory read access.
#[inline]
fn memory_read(
_alloc: &Allocation<Self::PointerTag, Self::AllocExtra>,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory write access.
#[inline]
fn memory_written(
_alloc: &mut Allocation<Self::PointerTag, Self::AllocExtra>,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks when memory gets deallocated.
#[inline]
fn memory_deallocated(

View File

@ -28,7 +28,7 @@
use syntax::ast::Mutability;
use super::{
Pointer, AllocId, Allocation, ConstValue, GlobalId,
Pointer, AllocId, Allocation, ConstValue, GlobalId, AllocationExtra,
EvalResult, Scalar, EvalErrorKind, AllocType, PointerArithmetic,
Machine, AllocMap, MayLeak, ScalarMaybeUndef, ErrorHandled,
};
@ -637,7 +637,7 @@ fn get_bytes_internal(
}
let alloc = self.get(ptr.alloc_id)?;
M::memory_read(alloc, ptr, size)?;
AllocationExtra::memory_read(alloc, ptr, size)?;
assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
assert_eq!(size.bytes() as usize as u64, size.bytes());
@ -683,7 +683,7 @@ fn get_bytes_mut(
self.clear_relocations(ptr, size)?;
let alloc = self.get_mut(ptr.alloc_id)?;
M::memory_written(alloc, ptr, size)?;
AllocationExtra::memory_written(alloc, ptr, size)?;
assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
assert_eq!(size.bytes() as usize as u64, size.bytes());

View File

@ -12,7 +12,6 @@
//! All high-level functions to read from memory work on operands as sources.
use std::convert::TryInto;
use std::fmt;
use rustc::{mir, ty};
use rustc::ty::layout::{self, Size, LayoutOf, TyLayout, HasDataLayout, IntegerExt};
@ -23,130 +22,7 @@
EvalResult, EvalErrorKind
};
use super::{EvalContext, Machine, MemPlace, MPlaceTy, MemoryKind};
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
Scalar(Scalar<Tag, Id>),
Undef,
}
impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
#[inline(always)]
fn from(s: Scalar<Tag>) -> Self {
ScalarMaybeUndef::Scalar(s)
}
}
impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
}
}
}
impl<'tcx> ScalarMaybeUndef<()> {
#[inline]
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
where Tag: Default
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
}
impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
#[inline]
pub fn erase_tag(self) -> ScalarMaybeUndef
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
#[inline]
pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
}
}
#[inline(always)]
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
self.not_undef()?.to_ptr()
}
#[inline(always)]
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
self.not_undef()?.to_bits(target_size)
}
#[inline(always)]
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
self.not_undef()?.to_bool()
}
#[inline(always)]
pub fn to_char(self) -> EvalResult<'tcx, char> {
self.not_undef()?.to_char()
}
#[inline(always)]
pub fn to_f32(self) -> EvalResult<'tcx, f32> {
self.not_undef()?.to_f32()
}
#[inline(always)]
pub fn to_f64(self) -> EvalResult<'tcx, f64> {
self.not_undef()?.to_f64()
}
#[inline(always)]
pub fn to_u8(self) -> EvalResult<'tcx, u8> {
self.not_undef()?.to_u8()
}
#[inline(always)]
pub fn to_u32(self) -> EvalResult<'tcx, u32> {
self.not_undef()?.to_u32()
}
#[inline(always)]
pub fn to_u64(self) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_u64()
}
#[inline(always)]
pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_usize(cx)
}
#[inline(always)]
pub fn to_i8(self) -> EvalResult<'tcx, i8> {
self.not_undef()?.to_i8()
}
#[inline(always)]
pub fn to_i32(self) -> EvalResult<'tcx, i32> {
self.not_undef()?.to_i32()
}
#[inline(always)]
pub fn to_i64(self) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_i64()
}
#[inline(always)]
pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_isize(cx)
}
}
pub use rustc::mir::interpret::ScalarMaybeUndef;
/// A `Value` represents a single immediate self-contained Rust value.
///

View File

@ -24,7 +24,7 @@
GlobalId, AllocId, Allocation, Scalar, EvalResult, Pointer, PointerArithmetic
};
use super::{
EvalContext, Machine, AllocMap,
EvalContext, Machine, AllocMap, AllocationExtra,
Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
};
@ -264,6 +264,7 @@ impl<'a, 'mir, 'tcx, Tag, M> EvalContext<'a, 'mir, 'tcx, M>
Tag: ::std::fmt::Debug+Default+Copy+Eq+Hash+'static,
M: Machine<'a, 'mir, 'tcx, PointerTag=Tag>,
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<Tag, M::AllocExtra>)>,
M::AllocExtra: AllocationExtra<Tag>,
{
/// Take a value, which represents a (thin or fat) reference, and make it a place.
/// Alignment is just based on the type. This is the inverse of `create_ref`.

View File

@ -195,11 +195,6 @@ fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
}
}
impl_stable_hash_for!(enum ::interpret::ScalarMaybeUndef {
Scalar(v),
Undef
});
impl_snapshot_for!(enum ScalarMaybeUndef {
Scalar(s),
Undef,

View File

@ -17,7 +17,7 @@
use rustc::ty;
use rustc_data_structures::fx::FxHashSet;
use rustc::mir::interpret::{
Scalar, AllocType, EvalResult, EvalErrorKind
Scalar, AllocType, EvalResult, EvalErrorKind,
};
use super::{