add locking data structures and lock checks
This brings back some of the memory range ideas that were deleted with the packed refactoring.
This commit is contained in:
parent
d2a9235c63
commit
4372f1145f
@ -2,7 +2,7 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use rustc::mir;
|
||||
use rustc::ty::{FnSig, Ty, layout};
|
||||
use memory::{MemoryPointer, Kind};
|
||||
use memory::{MemoryPointer, LockInfo, AccessKind, Kind};
|
||||
use rustc_const_math::ConstMathErr;
|
||||
use syntax::codemap::Span;
|
||||
|
||||
@ -51,6 +51,12 @@ pub enum EvalError<'tcx> {
|
||||
required: u64,
|
||||
has: u64,
|
||||
},
|
||||
MemoryLockViolation {
|
||||
ptr: MemoryPointer,
|
||||
len: u64,
|
||||
access: AccessKind,
|
||||
lock: LockInfo,
|
||||
},
|
||||
CalledClosureAsFunction,
|
||||
VtableForArgumentlessMethod,
|
||||
ModifiedConstantMemory,
|
||||
@ -59,6 +65,7 @@ pub enum EvalError<'tcx> {
|
||||
TypeNotPrimitive(Ty<'tcx>),
|
||||
ReallocatedWrongMemoryKind(Kind, Kind),
|
||||
DeallocatedWrongMemoryKind(Kind, Kind),
|
||||
DeallocatedLockedMemory,
|
||||
ReallocateNonBasePtr,
|
||||
DeallocateNonBasePtr,
|
||||
IncorrectAllocationInformation,
|
||||
@ -97,6 +104,10 @@ impl<'tcx> Error for EvalError<'tcx> {
|
||||
"pointer offset outside bounds of allocation",
|
||||
InvalidNullPointerUsage =>
|
||||
"invalid use of NULL pointer",
|
||||
MemoryLockViolation { .. } =>
|
||||
"memory access conflicts with lock",
|
||||
DeallocatedLockedMemory =>
|
||||
"deallocated memory while a lock was held",
|
||||
ReadPointerAsBytes =>
|
||||
"a raw memory access tried to access part of a pointer value as raw bytes",
|
||||
ReadBytesAsPointer =>
|
||||
@ -196,6 +207,10 @@ impl<'tcx> fmt::Display for EvalError<'tcx> {
|
||||
if access { "memory access" } else { "pointer computed" },
|
||||
ptr.offset, ptr.alloc_id, allocation_size)
|
||||
},
|
||||
MemoryLockViolation { ptr, len, access, lock } => {
|
||||
write!(f, "{:?} access at {:?}, size {}, is in conflict with lock {:?}",
|
||||
access, ptr, len, lock)
|
||||
}
|
||||
NoMirFor(ref func) => write!(f, "no mir for `{}`", func),
|
||||
FunctionPointerTyMismatch(sig, got) =>
|
||||
write!(f, "tried to call a function with sig {} through a function pointer of type {}", sig, got),
|
||||
|
@ -169,6 +169,12 @@ impl<'a, 'tcx> EvalContext<'a, 'tcx> {
|
||||
&self.stack
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn cur_frame(&self) -> usize {
|
||||
assert!(self.stack.len() > 0);
|
||||
self.stack.len() - 1
|
||||
}
|
||||
|
||||
/// Returns true if the current frame or any parent frame is part of a ctfe.
|
||||
///
|
||||
/// Used to disable features in const eval, which do not have a rfc enabling
|
||||
@ -1551,9 +1557,8 @@ impl<'a, 'tcx> EvalContext<'a, 'tcx> {
|
||||
if let Lvalue::Local { frame, local } = lvalue {
|
||||
let mut allocs = Vec::new();
|
||||
let mut msg = format!("{:?}", local);
|
||||
let last_frame = self.stack.len() - 1;
|
||||
if frame != last_frame {
|
||||
write!(msg, " ({} frames up)", last_frame - frame).unwrap();
|
||||
if frame != self.cur_frame() {
|
||||
write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
|
||||
}
|
||||
write!(msg, ":").unwrap();
|
||||
|
||||
|
@ -211,7 +211,7 @@ impl<'a, 'tcx> EvalContext<'a, 'tcx> {
|
||||
use rustc::mir::Lvalue::*;
|
||||
let lvalue = match *mir_lvalue {
|
||||
Local(mir::RETURN_POINTER) => self.frame().return_lvalue,
|
||||
Local(local) => Lvalue::Local { frame: self.stack.len() - 1, local },
|
||||
Local(local) => Lvalue::Local { frame: self.cur_frame(), local },
|
||||
|
||||
Static(ref static_) => {
|
||||
let instance = ty::Instance::mono(self.tcx, static_.def_id);
|
||||
|
@ -1,15 +1,111 @@
|
||||
use byteorder::{ReadBytesExt, WriteBytesExt, LittleEndian, BigEndian};
|
||||
use std::collections::{btree_map, BTreeMap, HashMap, HashSet, VecDeque};
|
||||
use std::{fmt, iter, ptr, mem, io};
|
||||
use std::{fmt, iter, ptr, mem, io, ops};
|
||||
|
||||
use rustc::ty;
|
||||
use rustc::ty::layout::{self, TargetDataLayout, HasDataLayout};
|
||||
use syntax::ast::Mutability;
|
||||
use rustc::middle::region::CodeExtent;
|
||||
|
||||
use error::{EvalError, EvalResult};
|
||||
use value::{PrimVal, Pointer};
|
||||
use eval_context::EvalContext;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Locks
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
mod range {
|
||||
use super::*;
|
||||
|
||||
// The derived `Ord` impl sorts first by the first field, then, if the fields are the same
|
||||
// by the second field.
|
||||
// This is exactly what we need for our purposes, since a range query on a BTReeSet/BTreeMap will give us all
|
||||
// `MemoryRange`s whose `start` is <= than the one we're looking for, but not > the end of the range we're checking.
|
||||
// At the same time the `end` is irrelevant for the sorting and range searching, but used for the check.
|
||||
// This kind of search breaks, if `end < start`, so don't do that!
|
||||
#[derive(Eq, PartialEq, Ord, PartialOrd, Debug)]
|
||||
pub struct MemoryRange {
|
||||
start: u64,
|
||||
end: u64,
|
||||
}
|
||||
|
||||
impl MemoryRange {
|
||||
pub fn new(offset: u64, len: u64) -> MemoryRange {
|
||||
assert!(len > 0);
|
||||
MemoryRange {
|
||||
start: offset,
|
||||
end: offset + len,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range(offset: u64, len: u64) -> ops::Range<MemoryRange> {
|
||||
assert!(len > 0);
|
||||
// We select all elements that are within
|
||||
// the range given by the offset into the allocation and the length.
|
||||
// This is sound if "self.contains() || self.overlaps() == true" implies that self is in-range.
|
||||
let left = MemoryRange {
|
||||
start: 0,
|
||||
end: offset,
|
||||
};
|
||||
let right = MemoryRange {
|
||||
start: offset + len + 1,
|
||||
end: 0,
|
||||
};
|
||||
left..right
|
||||
}
|
||||
|
||||
pub fn contains(&self, offset: u64, len: u64) -> bool {
|
||||
assert!(len > 0);
|
||||
self.start <= offset && (offset + len) <= self.end
|
||||
}
|
||||
|
||||
pub fn overlaps(&self, offset: u64, len: u64) -> bool {
|
||||
assert!(len > 0);
|
||||
//let non_overlap = (offset + len) <= self.start || self.end <= offset;
|
||||
(offset + len) > self.start && self.end > offset
|
||||
}
|
||||
}
|
||||
}
|
||||
use self::range::*;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AccessKind {
|
||||
Read,
|
||||
Write,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DynamicLifetime {
|
||||
frame: usize,
|
||||
region: CodeExtent,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum LockStatus {
|
||||
Held,
|
||||
RecoverAfter(DynamicLifetime),
|
||||
}
|
||||
|
||||
/// Information about a lock that is or will be held.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct LockInfo {
|
||||
kind: AccessKind,
|
||||
lifetime: DynamicLifetime,
|
||||
status: LockStatus,
|
||||
}
|
||||
|
||||
impl LockInfo {
|
||||
fn access_permitted(&self, frame: usize, access: AccessKind) -> bool {
|
||||
use self::AccessKind::*;
|
||||
match (self.kind, access) {
|
||||
(Read, Read) => true, // Read access to read-locked region is okay, no matter who's holding the read lock.
|
||||
(Write, _) if self.lifetime.frame == frame => true, // All access is okay when we hold the write lock.
|
||||
_ => false, // Somebody else holding the write lock is not okay
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Allocations and pointers
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
@ -41,6 +137,8 @@ pub struct Allocation {
|
||||
/// allocation is modified or deallocated in the future.
|
||||
/// Helps guarantee that stack allocations aren't deallocated via `rust_deallocate`
|
||||
pub kind: Kind,
|
||||
/// Memory regions that are locked by some function
|
||||
locks: BTreeMap<MemoryRange, LockInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
@ -96,6 +194,10 @@ impl<'tcx> MemoryPointer {
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Top-level interpreter memory
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
pub type TlsKey = usize;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@ -104,10 +206,6 @@ pub struct TlsEntry<'tcx> {
|
||||
dtor: Option<ty::Instance<'tcx>>,
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Top-level interpreter memory
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
pub struct Memory<'a, 'tcx> {
|
||||
/// Actual memory allocations (arbitrary bytes, may contain pointers into other allocations).
|
||||
alloc_map: HashMap<AllocId, Allocation>,
|
||||
@ -151,6 +249,9 @@ pub struct Memory<'a, 'tcx> {
|
||||
/// alignment checking is currently enforced for read and/or write accesses.
|
||||
reads_are_aligned: bool,
|
||||
writes_are_aligned: bool,
|
||||
|
||||
/// The current stack frame. Used to check accesses against locks.
|
||||
cur_frame: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
@ -169,6 +270,7 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
next_thread_local: 0,
|
||||
reads_are_aligned: true,
|
||||
writes_are_aligned: true,
|
||||
cur_frame: usize::max_value(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -220,6 +322,7 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
align,
|
||||
kind,
|
||||
mutable: Mutability::Mutable,
|
||||
locks: BTreeMap::new(),
|
||||
};
|
||||
let id = self.next_id;
|
||||
self.next_id.0 += 1;
|
||||
@ -260,6 +363,9 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
if alloc.kind != kind {
|
||||
return Err(EvalError::DeallocatedWrongMemoryKind(alloc.kind, kind));
|
||||
}
|
||||
if !alloc.locks.is_empty() {
|
||||
return Err(EvalError::DeallocatedLockedMemory);
|
||||
}
|
||||
if let Some((size, align)) = size_and_align {
|
||||
if size != alloc.bytes.len() as u64 || align != alloc.align {
|
||||
return Err(EvalError::IncorrectAllocationInformation);
|
||||
@ -321,6 +427,23 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn check_locks(&self, ptr: MemoryPointer, len: u64, access: AccessKind) -> EvalResult<'tcx> {
|
||||
let alloc = self.get(ptr.alloc_id)?;
|
||||
for (range, lock) in alloc.locks.range(MemoryRange::range(ptr.offset, len)) {
|
||||
// Check if the lock is active, overlaps this access, and is in conflict with the access.
|
||||
if let LockStatus::Held = lock.status {
|
||||
if range.overlaps(ptr.offset, len) && !lock.access_permitted(self.cur_frame, access) {
|
||||
return Err(EvalError::MemoryLockViolation { ptr, len, access, lock: *lock });
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn set_cur_frame(&mut self, cur_frame: usize) {
|
||||
self.cur_frame = cur_frame;
|
||||
}
|
||||
|
||||
pub(crate) fn create_tls_key(&mut self, dtor: Option<ty::Instance<'tcx>>) -> TlsKey {
|
||||
let new_key = self.next_thread_local;
|
||||
self.next_thread_local += 1;
|
||||
@ -540,6 +663,7 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
if size == 0 {
|
||||
return Ok(&[]);
|
||||
}
|
||||
self.check_locks(ptr, size, AccessKind::Read)?;
|
||||
self.check_bounds(ptr.offset(size, self)?, true)?; // if ptr.offset is in bounds, then so is ptr (because offset checks for overflow)
|
||||
let alloc = self.get(ptr.alloc_id)?;
|
||||
assert_eq!(ptr.offset as usize as u64, ptr.offset);
|
||||
@ -556,6 +680,7 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
if size == 0 {
|
||||
return Ok(&mut []);
|
||||
}
|
||||
self.check_locks(ptr, size, AccessKind::Write)?;
|
||||
self.check_bounds(ptr.offset(size, self.layout)?, true)?; // if ptr.offset is in bounds, then so is ptr (because offset checks for overflow)
|
||||
let alloc = self.get_mut(ptr.alloc_id)?;
|
||||
assert_eq!(ptr.offset as usize as u64, ptr.offset);
|
||||
@ -694,6 +819,7 @@ impl<'a, 'tcx> Memory<'a, 'tcx> {
|
||||
return Err(EvalError::ReadPointerAsBytes);
|
||||
}
|
||||
self.check_defined(ptr, (size + 1) as u64)?;
|
||||
self.check_locks(ptr, (size + 1) as u64, AccessKind::Read)?;
|
||||
Ok(&alloc.bytes[offset..offset + size])
|
||||
},
|
||||
None => Err(EvalError::UnterminatedCString(ptr)),
|
||||
|
@ -34,6 +34,9 @@ impl<'a, 'tcx> EvalContext<'a, 'tcx> {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let cur_frame = self.cur_frame();
|
||||
self.memory.set_cur_frame(cur_frame);
|
||||
|
||||
let block = self.frame().block;
|
||||
let stmt_id = self.frame().stmt;
|
||||
let mir = self.mir();
|
||||
@ -116,7 +119,7 @@ impl<'a, 'tcx> EvalContext<'a, 'tcx> {
|
||||
// Mark locals as dead or alive.
|
||||
StorageLive(ref lvalue) | StorageDead(ref lvalue)=> {
|
||||
let (frame, local) = match self.eval_lvalue(lvalue)? {
|
||||
Lvalue::Local{ frame, local } if self.stack.len() == frame+1 => (frame, local),
|
||||
Lvalue::Local{ frame, local } if self.cur_frame() == frame => (frame, local),
|
||||
_ => return Err(EvalError::Unimplemented("Storage annotations must refer to locals of the topmost stack frame.".to_owned())) // FIXME maybe this should get its own error type
|
||||
};
|
||||
let old_val = match stmt.kind {
|
||||
|
Loading…
x
Reference in New Issue
Block a user