rust/src/lib.rs

603 lines
20 KiB
Rust
Raw Normal View History

2017-07-21 10:25:30 -05:00
#![feature(
rustc_private,
2017-12-14 04:03:55 -06:00
catch_expr,
2018-05-01 11:13:22 -05:00
inclusive_range_fields,
inclusive_range_methods,
2017-07-21 10:25:30 -05:00
)]
2018-07-10 10:32:38 -05:00
#![cfg_attr(feature = "cargo-clippy", allow(cast_lossless))]
2017-07-21 10:25:30 -05:00
#[macro_use]
extern crate log;
2018-01-06 09:21:24 -06:00
// From rustc.
#[macro_use]
2017-07-21 10:25:30 -05:00
extern crate rustc;
2017-12-14 04:03:55 -06:00
extern crate rustc_data_structures;
2018-05-03 17:29:13 -05:00
extern crate rustc_mir;
2018-05-01 11:13:22 -05:00
extern crate rustc_target;
2017-07-21 10:25:30 -05:00
extern crate syntax;
2017-12-14 04:03:55 -06:00
extern crate regex;
#[macro_use]
extern crate lazy_static;
2017-07-21 10:25:30 -05:00
2017-07-24 08:19:32 -05:00
use rustc::ty::{self, TyCtxt};
use rustc::ty::layout::{TyLayout, LayoutOf, Size};
2018-03-23 06:18:33 -05:00
use rustc::ty::subst::Subst;
2017-07-24 08:19:32 -05:00
use rustc::hir::def_id::DefId;
2017-07-21 10:25:30 -05:00
use rustc::mir;
use rustc_data_structures::fx::FxHasher;
use syntax::ast::Mutability;
use syntax::codemap::Span;
use std::collections::{HashMap, BTreeMap};
use std::hash::{Hash, Hasher};
2017-07-21 10:25:30 -05:00
pub use rustc::mir::interpret::*;
2017-12-14 04:03:55 -06:00
pub use rustc_mir::interpret::*;
2017-07-21 10:25:30 -05:00
mod fn_call;
mod operator;
mod intrinsic;
mod helpers;
mod memory;
2017-07-31 06:30:44 -05:00
mod tls;
2017-12-14 04:03:55 -06:00
mod locks;
mod range_map;
mod validation;
2017-07-24 08:19:32 -05:00
use fn_call::EvalContextExt as MissingFnsEvalContextExt;
use operator::EvalContextExt as OperatorEvalContextExt;
use intrinsic::EvalContextExt as IntrinsicEvalContextExt;
2017-08-01 04:11:57 -05:00
use tls::EvalContextExt as TlsEvalContextExt;
2017-12-14 04:03:55 -06:00
use locks::LockInfo;
use locks::MemoryExt as LockMemoryExt;
use validation::EvalContextExt as ValidationEvalContextExt;
use range_map::RangeMap;
use validation::{ValidationQuery, AbsPlace};
2017-07-24 08:19:32 -05:00
2018-05-26 10:07:34 -05:00
pub trait ScalarExt {
fn null() -> Self;
2018-05-30 07:29:32 -05:00
fn from_i32(i: i32) -> Self;
2018-05-26 10:07:34 -05:00
fn from_u128(i: u128) -> Self;
fn from_i128(i: i128) -> Self;
fn from_usize(i: u64, ptr_size: Size) -> Self;
fn from_isize(i: i64, ptr_size: Size) -> Self;
fn from_f32(f: f32) -> Self;
fn from_f64(f: f64) -> Self;
fn to_usize<'a, 'mir, 'tcx>(self, ecx: &rustc_mir::interpret::EvalContext<'a, 'mir, 'tcx, Evaluator<'tcx>>) -> EvalResult<'static, u64>;
2018-05-26 10:07:34 -05:00
fn is_null(self) -> EvalResult<'static, bool>;
/// HACK: this function just extracts all bits if `defined != 0`
/// Mainly used for args of C-functions and we should totally correctly fetch the size
/// of their arguments
2018-05-26 10:07:34 -05:00
fn to_bytes(self) -> EvalResult<'static, u128>;
}
impl ScalarExt for Scalar {
fn null() -> Self {
Scalar::Bits { bits: 0, defined: 128 }
}
2018-05-30 07:29:32 -05:00
fn from_i32(i: i32) -> Self {
Scalar::Bits { bits: i as u32 as u128, defined: 32 }
2018-05-26 10:07:34 -05:00
}
fn from_u128(i: u128) -> Self {
Scalar::Bits { bits: i, defined: 128 }
}
fn from_i128(i: i128) -> Self {
Scalar::Bits { bits: i as u128, defined: 128 }
}
fn from_usize(i: u64, ptr_size: Size) -> Self {
Scalar::Bits { bits: i as u128, defined: ptr_size.bits() as u8 }
}
fn from_isize(i: i64, ptr_size: Size) -> Self {
Scalar::Bits { bits: i as i128 as u128, defined: ptr_size.bits() as u8 }
}
fn from_f32(f: f32) -> Self {
Scalar::Bits { bits: f.to_bits() as u128, defined: 32 }
}
fn from_f64(f: f64) -> Self {
Scalar::Bits { bits: f.to_bits() as u128, defined: 64 }
}
fn to_usize<'a, 'mir, 'tcx>(self, ecx: &rustc_mir::interpret::EvalContext<'a, 'mir, 'tcx, Evaluator<'tcx>>) -> EvalResult<'static, u64> {
let b = self.to_bits(ecx.memory.pointer_size())?;
2018-05-26 10:07:34 -05:00
assert_eq!(b as u64 as u128, b);
Ok(b as u64)
}
fn is_null(self) -> EvalResult<'static, bool> {
match self {
Scalar::Bits { bits, defined } => {
if defined > 0 {
Ok(bits == 0)
} else {
err!(ReadUndefBytes)
}
}
Scalar::Ptr(_) => Ok(false)
}
}
fn to_bytes(self) -> EvalResult<'static, u128> {
match self {
Scalar::Bits { defined: 0, .. } => err!(ReadUndefBytes),
Scalar::Bits { bits, .. } => Ok(bits),
Scalar::Ptr(_) => err!(ReadPointerAsBytes),
}
}
}
2018-06-12 00:30:29 -05:00
pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>(
2017-07-21 10:25:30 -05:00
tcx: TyCtxt<'a, 'tcx, 'tcx>,
main_id: DefId,
start_wrapper: Option<DefId>,
2018-06-11 11:49:17 -05:00
) -> EvalResult<'tcx, (EvalContext<'a, 'mir, 'tcx, Evaluator<'tcx>>, Option<Pointer>)> {
let mut ecx = EvalContext::new(tcx.at(syntax::codemap::DUMMY_SP), ty::ParamEnv::reveal_all(), Default::default(), Default::default());
2017-07-21 10:25:30 -05:00
2018-06-11 11:49:17 -05:00
let main_instance = ty::Instance::mono(ecx.tcx.tcx, main_id);
let main_mir = ecx.load_mir(main_instance.def)?;
let mut cleanup_ptr = None; // Scalar to be deallocated when we are done
2017-07-21 10:25:30 -05:00
2018-06-11 11:49:17 -05:00
if !main_mir.return_ty().is_nil() || main_mir.arg_count != 0 {
return err!(Unimplemented(
"miri does not support main functions without `fn()` type signatures"
.to_owned(),
));
}
2017-07-21 10:25:30 -05:00
2018-06-11 11:49:17 -05:00
if let Some(start_id) = start_wrapper {
let main_ret_ty = ecx.tcx.fn_sig(main_id).output();
let main_ret_ty = main_ret_ty.no_late_bound_regions().unwrap();
let start_instance = ty::Instance::resolve(
ecx.tcx.tcx,
ty::ParamEnv::reveal_all(),
start_id,
ecx.tcx.mk_substs(
::std::iter::once(ty::subst::Kind::from(main_ret_ty)))
).unwrap();
let start_mir = ecx.load_mir(start_instance.def)?;
if start_mir.arg_count != 3 {
return err!(AbiViolation(format!(
"'start' lang item should have three arguments, but has {}",
start_mir.arg_count
)));
}
2017-07-21 10:25:30 -05:00
2018-06-11 11:49:17 -05:00
// Return value
let size = ecx.tcx.data_layout.pointer_size;
let align = ecx.tcx.data_layout.pointer_align;
2018-07-02 11:00:36 -05:00
let ret_ptr = ecx.memory_mut().allocate(size, align, MemoryKind::Stack)?;
2018-06-11 11:49:17 -05:00
cleanup_ptr = Some(ret_ptr);
// Push our stack frame
ecx.push_stack_frame(
start_instance,
start_mir.span,
start_mir,
Place::from_ptr(ret_ptr, align),
StackPopCleanup::None,
)?;
let mut args = ecx.frame().mir.args_iter();
// First argument: pointer to main()
let main_ptr = ecx.memory_mut().create_fn_alloc(main_instance);
let dest = ecx.eval_place(&mir::Place::Local(args.next().unwrap()))?;
let main_ty = main_instance.ty(ecx.tcx.tcx);
let main_ptr_ty = ecx.tcx.mk_fn_ptr(main_ty.fn_sig(ecx.tcx.tcx));
ecx.write_value(
ValTy {
value: Value::Scalar(Scalar::Ptr(main_ptr)),
ty: main_ptr_ty,
},
dest,
)?;
// Second argument (argc): 1
let dest = ecx.eval_place(&mir::Place::Local(args.next().unwrap()))?;
let ty = ecx.tcx.types.isize;
ecx.write_scalar(dest, Scalar::from_u128(1), ty)?;
// FIXME: extract main source file path
// Third argument (argv): &[b"foo"]
let dest = ecx.eval_place(&mir::Place::Local(args.next().unwrap()))?;
let ty = ecx.tcx.mk_imm_ptr(ecx.tcx.mk_imm_ptr(ecx.tcx.types.u8));
let foo = ecx.memory.allocate_bytes(b"foo\0");
let ptr_size = ecx.memory.pointer_size();
let ptr_align = ecx.tcx.data_layout.pointer_align;
2018-07-02 11:00:36 -05:00
let foo_ptr = ecx.memory.allocate(ptr_size, ptr_align, MemoryKind::Stack)?;
2018-06-11 11:49:17 -05:00
ecx.memory.write_scalar(foo_ptr.into(), ptr_align, Scalar::Ptr(foo), ptr_size, false)?;
ecx.memory.mark_static_initialized(foo_ptr.alloc_id, Mutability::Immutable)?;
ecx.write_ptr(dest, foo_ptr.into(), ty)?;
assert!(args.next().is_none(), "start lang item has more arguments than expected");
} else {
ecx.push_stack_frame(
main_instance,
main_mir.span,
main_mir,
Place::from_scalar_ptr(Scalar::from_u128(1), ty::layout::Align::from_bytes(1, 1).unwrap()),
StackPopCleanup::None,
)?;
// No arguments
let mut args = ecx.frame().mir.args_iter();
assert!(args.next().is_none(), "main function must not have arguments");
}
2018-06-11 11:49:17 -05:00
Ok((ecx, cleanup_ptr))
}
pub fn eval_main<'a, 'tcx: 'a>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
main_id: DefId,
start_wrapper: Option<DefId>,
) {
let (mut ecx, cleanup_ptr) = create_ecx(tcx, main_id, start_wrapper).expect("Couldn't create ecx");
2017-07-21 10:25:30 -05:00
2018-06-11 11:49:17 -05:00
let res: EvalResult = do catch {
2017-07-21 10:25:30 -05:00
while ecx.step()? {}
2017-08-01 04:11:57 -05:00
ecx.run_tls_dtors()?;
2017-07-21 10:25:30 -05:00
if let Some(cleanup_ptr) = cleanup_ptr {
ecx.memory_mut().deallocate(
cleanup_ptr,
None,
MemoryKind::Stack,
)?;
2017-07-21 10:25:30 -05:00
}
2018-06-11 11:49:17 -05:00
};
2017-07-21 10:25:30 -05:00
2018-06-11 11:49:17 -05:00
match res {
2017-07-21 10:25:30 -05:00
Ok(()) => {
let leaks = ecx.memory().leak_report();
if leaks != 0 {
// TODO: Prevent leaks which aren't supposed to be there
2018-03-23 06:18:33 -05:00
//tcx.sess.err("the evaluated program leaked memory");
2017-07-21 10:25:30 -05:00
}
}
Err(e) => {
if let Some(frame) = ecx.stack().last() {
let block = &frame.mir.basic_blocks()[frame.block];
let span = if frame.stmt < block.statements.len() {
block.statements[frame.stmt].source_info.span
} else {
block.terminator().source_info.span
};
2018-07-02 11:00:36 -05:00
let mut err = struct_error(ecx.tcx.tcx.at(span), "constant evaluation error");
let (frames, span) = ecx.generate_stacktrace(None);
err.span_label(span, e.to_string());
2018-07-02 11:00:36 -05:00
for FrameInfo { span, location, .. } in frames {
err.span_note(span, &format!("inside call to `{}`", location));
}
err.emit();
} else {
ecx.tcx.sess.err(&e.to_string());
}
for (i, frame) in ecx.stack().iter().enumerate() {
trace!("-------------------");
trace!("Frame {}", i);
trace!(" return: {:#?}", frame.return_place);
for (i, local) in frame.locals.iter().enumerate() {
if let Some(local) = local {
trace!(" local {}: {:?}", i, local);
}
}
}
2017-07-21 10:25:30 -05:00
}
}
}
#[derive(Clone, Default, PartialEq, Eq)]
2017-12-14 04:03:55 -06:00
pub struct Evaluator<'tcx> {
2017-07-21 10:25:30 -05:00
/// Environment variables set by `setenv`
/// Miri does not expose env vars from the host to the emulated program
2018-05-26 10:07:34 -05:00
pub(crate) env_vars: HashMap<Vec<u8>, Pointer>,
2017-12-14 04:03:55 -06:00
/// Places that were suspended by the validation subsystem, and will be recovered later
pub(crate) suspended: HashMap<DynamicLifetime, Vec<ValidationQuery<'tcx>>>,
2017-07-21 10:25:30 -05:00
}
impl<'tcx> Hash for Evaluator<'tcx> {
fn hash<H: Hasher>(&self, state: &mut H) {
let Evaluator {
env_vars,
suspended: _,
} = self;
env_vars.iter()
.map(|(env, ptr)| {
let mut h = FxHasher::default();
env.hash(&mut h);
ptr.hash(&mut h);
h.finish()
})
.fold(0u64, |acc, hash| acc.wrapping_add(hash))
.hash(state);
}
}
pub type TlsKey = u128;
2017-07-21 10:25:30 -05:00
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
2017-07-21 10:25:30 -05:00
pub struct TlsEntry<'tcx> {
2018-05-26 10:07:34 -05:00
data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread.
2017-07-21 10:25:30 -05:00
dtor: Option<ty::Instance<'tcx>>,
}
#[derive(Clone, Default, PartialEq, Eq)]
pub struct MemoryData<'tcx> {
2017-07-21 10:25:30 -05:00
/// The Key to use for the next thread-local allocation.
next_thread_local: TlsKey,
/// pthreads-style thread-local storage.
thread_local: BTreeMap<TlsKey, TlsEntry<'tcx>>,
2017-12-14 04:03:55 -06:00
/// Memory regions that are locked by some function
///
/// Only mutable (static mut, heap, stack) allocations have an entry in this map.
/// The entry is created when allocating the memory and deleted after deallocation.
2018-01-14 21:31:59 -06:00
locks: HashMap<AllocId, RangeMap<LockInfo<'tcx>>>,
2018-03-23 06:06:32 -05:00
2018-04-07 04:43:46 -05:00
statics: HashMap<GlobalId<'tcx>, AllocId>,
2017-07-21 10:25:30 -05:00
}
impl<'tcx> Hash for MemoryData<'tcx> {
fn hash<H: Hasher>(&self, state: &mut H) {
let MemoryData {
next_thread_local: _,
thread_local,
locks: _,
statics: _,
} = self;
thread_local.hash(state);
}
}
2018-01-14 11:59:13 -06:00
impl<'mir, 'tcx: 'mir> Machine<'mir, 'tcx> for Evaluator<'tcx> {
2017-07-21 10:25:30 -05:00
type MemoryData = MemoryData<'tcx>;
type MemoryKinds = memory::MemoryKind;
2017-07-21 10:25:30 -05:00
/// Returns Ok() when the function was handled, fail otherwise
fn eval_fn_call<'a>(
2018-01-14 11:59:13 -06:00
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
2017-07-21 10:25:30 -05:00
instance: ty::Instance<'tcx>,
2017-12-06 01:39:31 -06:00
destination: Option<(Place, mir::BasicBlock)>,
args: &[ValTy<'tcx>],
span: Span,
2017-07-21 10:25:30 -05:00
sig: ty::FnSig<'tcx>,
) -> EvalResult<'tcx, bool> {
ecx.eval_fn_call(instance, destination, args, span, sig)
2017-07-21 10:25:30 -05:00
}
fn call_intrinsic<'a>(
2018-01-14 11:59:13 -06:00
ecx: &mut rustc_mir::interpret::EvalContext<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[ValTy<'tcx>],
2017-12-06 01:39:31 -06:00
dest: Place,
2017-11-21 06:32:40 -06:00
dest_layout: TyLayout<'tcx>,
target: mir::BasicBlock,
) -> EvalResult<'tcx> {
2017-11-21 06:32:40 -06:00
ecx.call_intrinsic(instance, args, dest, dest_layout, target)
}
2017-08-01 04:11:57 -05:00
fn try_ptr_op<'a>(
2018-01-14 11:59:13 -06:00
ecx: &rustc_mir::interpret::EvalContext<'a, 'mir, 'tcx, Self>,
bin_op: mir::BinOp,
2018-05-26 10:07:34 -05:00
left: Scalar,
left_ty: ty::Ty<'tcx>,
2018-05-26 10:07:34 -05:00
right: Scalar,
right_ty: ty::Ty<'tcx>,
2018-05-26 10:07:34 -05:00
) -> EvalResult<'tcx, Option<(Scalar, bool)>> {
ecx.ptr_op(bin_op, left, left_ty, right, right_ty)
}
2018-01-14 11:59:13 -06:00
fn mark_static_initialized<'a>(
2018-04-07 04:43:46 -05:00
mem: &mut Memory<'a, 'mir, 'tcx, Self>,
id: AllocId,
2018-01-14 11:59:13 -06:00
_mutability: Mutability,
) -> EvalResult<'tcx, bool> {
2018-04-07 04:43:46 -05:00
use memory::MemoryKind::*;
match mem.get_alloc_kind(id) {
// FIXME: This could be allowed, but not for env vars set during miri execution
2018-04-07 04:43:46 -05:00
Some(MemoryKind::Machine(Env)) => err!(Unimplemented("statics can't refer to env vars".to_owned())),
2018-01-14 11:59:13 -06:00
_ => Ok(false), // TODO: What does the bool mean?
2018-04-07 04:43:46 -05:00
}
2018-01-14 11:59:13 -06:00
}
fn init_static<'a>(
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
cid: GlobalId<'tcx>,
) -> EvalResult<'tcx, AllocId> {
// Step 1: If the static has already been evaluated return the cached version
2018-04-07 04:43:46 -05:00
if let Some(alloc_id) = ecx.memory.data.statics.get(&cid) {
2018-03-23 06:18:33 -05:00
return Ok(*alloc_id);
2018-03-23 06:06:32 -05:00
}
2018-03-23 06:18:33 -05:00
let tcx = ecx.tcx.tcx;
// Step 2: Load mir
2018-03-23 06:18:33 -05:00
let mut mir = ecx.load_mir(cid.instance.def)?;
if let Some(index) = cid.promoted {
mir = &mir.promoted[index];
}
assert!(mir.arg_count == 0);
// Step 3: Allocate storage
2018-03-23 06:06:32 -05:00
let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
2018-03-23 06:18:33 -05:00
assert!(!layout.is_unsized());
let ptr = ecx.memory.allocate(
layout.size,
2018-03-23 06:06:32 -05:00
layout.align,
2018-07-02 11:00:36 -05:00
MemoryKind::Stack,
2018-03-23 06:06:32 -05:00
)?;
2018-03-23 06:18:33 -05:00
// Step 4: Cache allocation id for recursive statics
2018-04-07 04:43:46 -05:00
assert!(ecx.memory.data.statics.insert(cid, ptr.alloc_id).is_none());
2018-03-23 06:18:33 -05:00
// Step 5: Push stackframe to evaluate static
2018-03-23 06:18:33 -05:00
let cleanup = StackPopCleanup::None;
ecx.push_stack_frame(
cid.instance,
mir.span,
mir,
Place::from_ptr(ptr, layout.align),
cleanup,
)?;
// Step 6: Step until static has been initialized
let call_stackframe = ecx.stack().len();
while ecx.step()? && ecx.stack().len() >= call_stackframe {
if ecx.stack().len() == call_stackframe {
2018-04-07 03:44:19 -05:00
let frame = ecx.frame_mut();
let bb = &frame.mir.basic_blocks()[frame.block];
if bb.statements.len() == frame.stmt && !bb.is_cleanup {
2018-07-10 10:32:38 -05:00
if let ::rustc::mir::TerminatorKind::Return = bb.terminator().kind {
for (local, _local_decl) in mir.local_decls.iter_enumerated().skip(1) {
// Don't deallocate locals, because the return value might reference them
frame.storage_dead(local);
}
}
}
}
}
2018-03-23 06:18:33 -05:00
2018-04-07 04:43:46 -05:00
// TODO: Freeze immutable statics without copying them to the global static cache
// Step 7: Return the alloc
2018-03-23 06:18:33 -05:00
Ok(ptr.alloc_id)
}
fn box_alloc<'a>(
2018-01-14 11:59:13 -06:00
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
ty: ty::Ty<'tcx>,
2017-12-06 01:39:31 -06:00
dest: Place,
) -> EvalResult<'tcx> {
2017-12-06 08:03:24 -06:00
let layout = ecx.layout_of(ty)?;
// Call the `exchange_malloc` lang item
let malloc = ecx.tcx.lang_items().exchange_malloc_fn().unwrap();
2018-01-14 11:59:13 -06:00
let malloc = ty::Instance::mono(ecx.tcx.tcx, malloc);
let malloc_mir = ecx.load_mir(malloc.def)?;
ecx.push_stack_frame(
malloc,
malloc_mir.span,
malloc_mir,
dest,
// Don't do anything when we are done. The statement() function will increment
// the old stack frame's stmt counter to the next statement, which means that when
// exchange_malloc returns, we go on evaluating exactly where we want to be.
StackPopCleanup::None,
)?;
let mut args = ecx.frame().mir.args_iter();
let usize = ecx.tcx.types.usize;
// First argument: size
2017-12-06 01:39:31 -06:00
let dest = ecx.eval_place(&mir::Place::Local(args.next().unwrap()))?;
ecx.write_value(
ValTy {
2018-05-26 10:07:34 -05:00
value: Value::Scalar(Scalar::from_u128(match layout.size.bytes() {
0 => 1 as u128,
size => size as u128,
2018-07-10 10:32:38 -05:00
})),
ty: usize,
},
dest,
)?;
// Second argument: align
2017-12-06 01:39:31 -06:00
let dest = ecx.eval_place(&mir::Place::Local(args.next().unwrap()))?;
ecx.write_value(
ValTy {
2018-05-26 10:07:34 -05:00
value: Value::Scalar(Scalar::from_u128(layout.align.abi().into())),
ty: usize,
},
dest,
)?;
// No more arguments
assert!(args.next().is_none(), "exchange_malloc lang item has more arguments than expected");
Ok(())
}
fn global_item_with_linkage<'a>(
2018-04-17 07:26:17 -05:00
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
_mutability: Mutability,
) -> EvalResult<'tcx> {
2018-04-17 07:26:17 -05:00
panic!("remove this function from rustc");
}
2017-12-14 04:03:55 -06:00
fn check_locks<'a>(
2018-01-14 11:59:13 -06:00
mem: &Memory<'a, 'mir, 'tcx, Self>,
2018-05-26 10:07:34 -05:00
ptr: Pointer,
size: Size,
2017-12-14 04:03:55 -06:00
access: AccessKind,
) -> EvalResult<'tcx> {
mem.check_locks(ptr, size.bytes(), access)
2017-12-14 04:03:55 -06:00
}
fn add_lock<'a>(
2018-01-14 11:59:13 -06:00
mem: &mut Memory<'a, 'mir, 'tcx, Self>,
2018-01-14 21:31:59 -06:00
id: AllocId,
2017-12-14 04:03:55 -06:00
) {
mem.data.locks.insert(id, RangeMap::new());
}
fn free_lock<'a>(
2018-01-14 11:59:13 -06:00
mem: &mut Memory<'a, 'mir, 'tcx, Self>,
2018-01-14 21:31:59 -06:00
id: AllocId,
2017-12-14 04:03:55 -06:00
len: u64,
) -> EvalResult<'tcx> {
mem.data.locks
.remove(&id)
.expect("allocation has no corresponding locks")
.check(
Some(mem.cur_frame),
0,
len,
AccessKind::Read,
)
.map_err(|lock| {
EvalErrorKind::DeallocatedLockedMemory {
//ptr, FIXME
2018-05-26 10:07:34 -05:00
ptr: Pointer {
2017-12-14 04:03:55 -06:00
alloc_id: AllocId(0),
offset: Size::from_bytes(0),
2017-12-14 04:03:55 -06:00
},
lock: lock.active,
}.into()
})
}
fn end_region<'a>(
2018-01-14 11:59:13 -06:00
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
2017-12-14 04:03:55 -06:00
reg: Option<::rustc::middle::region::Scope>,
) -> EvalResult<'tcx> {
ecx.end_region(reg)
}
fn validation_op<'a>(
2018-05-07 03:49:54 -05:00
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
_op: ::rustc::mir::ValidationOp,
_operand: &::rustc::mir::ValidationOperand<'tcx, ::rustc::mir::Place<'tcx>>,
2017-12-14 04:03:55 -06:00
) -> EvalResult<'tcx> {
// FIXME: prevent this from ICEing
//ecx.validation_op(op, operand)
Ok(())
2017-12-14 04:03:55 -06:00
}
2017-07-21 10:25:30 -05:00
}