rust/src/machine.rs

679 lines
23 KiB
Rust
Raw Normal View History

2019-06-29 07:15:05 -05:00
//! Global machine state as well as implementation of the interpreter engine
//! `Machine` trait.
use std::borrow::Cow;
use std::cell::RefCell;
use std::num::NonZeroU64;
2020-03-01 03:26:24 -06:00
use std::rc::Rc;
2020-03-19 17:00:02 -05:00
use std::time::Instant;
use std::fmt;
use log::trace;
use rand::rngs::StdRng;
2020-03-01 03:22:13 -06:00
use rustc_ast::attr;
2020-04-05 16:03:44 -05:00
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::{
middle::codegen_fn_attrs::CodegenFnAttrFlags,
2020-04-05 16:03:44 -05:00
mir,
ty::{
self,
layout::{LayoutCx, LayoutError, TyAndLayout},
TyCtxt,
},
};
use rustc_span::{def_id::DefId, symbol::{sym, Symbol}};
2020-04-05 16:03:44 -05:00
use rustc_target::abi::{LayoutOf, Size};
use crate::*;
2020-04-09 14:06:33 -05:00
pub use crate::threads::{ThreadId, ThreadManager, ThreadState, ThreadLocalStorage};
2019-06-29 07:37:41 -05:00
// Some global facts about the emulated machine.
2019-10-07 08:39:59 -05:00
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
2019-06-29 07:37:41 -05:00
pub const NUM_CPUS: u64 = 1;
/// Extra data stored with each stack frame
#[derive(Debug)]
pub struct FrameData<'tcx> {
/// Extra data for Stacked Borrows.
pub call_id: stacked_borrows::CallId,
2019-11-19 07:51:08 -06:00
/// If this is Some(), then this is a special "catch unwind" frame (the frame of `try_fn`
/// called by `try`). When this frame is popped during unwinding a panic,
/// we stop unwinding, use the `CatchUnwindData` to handle catching.
pub catch_unwind: Option<CatchUnwindData<'tcx>>,
}
/// Extra memory kinds
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum MiriMemoryKind {
/// `__rust_alloc` memory.
Rust,
/// `malloc` memory.
C,
/// Windows `HeapAlloc` memory.
WinHeap,
/// Memory for args, errno, extern statics and other parts of the machine-managed environment.
/// This memory may leak.
2020-02-23 14:55:02 -06:00
Machine,
/// Memory for env vars. Separate from `Machine` because we clean it up and leak-check it.
Env,
2020-03-25 03:05:24 -05:00
/// Globals copied from `tcx`.
/// This memory may leak.
2020-03-25 03:05:24 -05:00
Global,
}
impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
#[inline(always)]
fn into(self) -> MemoryKind<MiriMemoryKind> {
MemoryKind::Machine(self)
}
}
impl MayLeak for MiriMemoryKind {
#[inline(always)]
fn may_leak(self) -> bool {
use self::MiriMemoryKind::*;
match self {
Rust | C | WinHeap | Env => false,
Machine | Global => true,
}
}
}
impl fmt::Display for MiriMemoryKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use self::MiriMemoryKind::*;
match self {
Rust => write!(f, "Rust heap"),
C => write!(f, "C heap"),
WinHeap => write!(f, "Windows heap"),
Machine => write!(f, "machine-managed memory"),
Env => write!(f, "environment variable"),
Global => write!(f, "global"),
}
}
}
/// Extra per-allocation data
#[derive(Debug, Clone)]
pub struct AllocExtra {
/// Stacked Borrows state is only added if it is enabled.
pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
}
/// Extra global memory data
#[derive(Clone, Debug)]
2020-03-08 11:54:47 -05:00
pub struct MemoryExtra {
pub stacked_borrows: Option<stacked_borrows::MemoryExtra>,
pub intptrcast: intptrcast::MemoryExtra,
pub tls: ThreadLocalStorage,
/// Mapping extern static names to their canonical allocation.
extern_statics: FxHashMap<Symbol, AllocId>,
2019-07-23 14:38:53 -05:00
/// The random number generator used for resolving non-determinism.
/// Needs to be queried by ptr_to_int, hence needs interior mutability.
2019-07-23 14:38:53 -05:00
pub(crate) rng: RefCell<StdRng>,
/// An allocation ID to report when it is being allocated
2020-04-14 18:00:56 -05:00
/// (helps for debugging memory leaks and use after free bugs).
tracked_alloc_id: Option<AllocId>,
2020-04-13 10:51:22 -05:00
/// Controls whether alignment of memory accesses is being checked.
check_alignment: bool,
}
2020-03-08 11:54:47 -05:00
impl MemoryExtra {
2020-04-13 10:51:22 -05:00
pub fn new(
rng: StdRng,
stacked_borrows: bool,
tracked_pointer_tag: Option<PtrId>,
tracked_alloc_id: Option<AllocId>,
check_alignment: bool,
) -> Self {
let stacked_borrows = if stacked_borrows {
Some(Rc::new(RefCell::new(stacked_borrows::GlobalState::new(tracked_pointer_tag))))
} else {
None
};
MemoryExtra {
stacked_borrows,
intptrcast: Default::default(),
2020-03-02 15:36:15 -06:00
extern_statics: FxHashMap::default(),
rng: RefCell::new(rng),
tracked_alloc_id,
2020-04-13 10:51:22 -05:00
check_alignment,
tls: Default::default(),
}
}
2020-03-28 04:07:23 -05:00
fn add_extern_static<'tcx, 'mir>(
this: &mut MiriEvalContext<'mir, 'tcx>,
name: &str,
ptr: Scalar<Tag>,
) {
let ptr = ptr.assert_ptr();
assert_eq!(ptr.offset, Size::ZERO);
this.memory
.extra
.extern_statics
.insert(Symbol::intern(name), ptr.alloc_id)
.unwrap_none();
}
/// Sets up the "extern statics" for this machine.
2020-03-08 11:54:47 -05:00
pub fn init_extern_statics<'tcx, 'mir>(
this: &mut MiriEvalContext<'mir, 'tcx>,
) -> InterpResult<'tcx> {
2020-03-25 03:15:52 -05:00
match this.tcx.sess.target.target.target_os.as_str() {
2020-03-07 14:33:27 -06:00
"linux" => {
// "__cxa_thread_atexit_impl"
// This should be all-zero, pointer-sized.
2020-04-18 10:53:54 -05:00
let layout = this.machine.layouts.usize;
2020-03-04 11:15:14 -06:00
let place = this.allocate(layout, MiriMemoryKind::Machine.into());
2020-03-28 11:38:38 -05:00
this.write_scalar(Scalar::from_machine_usize(0, this), place.into())?;
2020-03-28 04:07:23 -05:00
Self::add_extern_static(this, "__cxa_thread_atexit_impl", place.ptr);
2020-03-07 14:33:27 -06:00
// "environ"
2020-03-28 04:07:23 -05:00
Self::add_extern_static(this, "environ", this.machine.env_vars.environ.unwrap().ptr);
}
"windows" => {
// "_tls_used"
// This is some obscure hack that is part of the Windows TLS story. It's a `u8`.
2020-04-18 10:53:54 -05:00
let layout = this.machine.layouts.u8;
2020-03-28 04:07:23 -05:00
let place = this.allocate(layout, MiriMemoryKind::Machine.into());
this.write_scalar(Scalar::from_u8(0), place.into())?;
Self::add_extern_static(this, "_tls_used", place.ptr);
}
2020-03-22 02:51:15 -05:00
_ => {} // No "extern statics" supported on this target
}
Ok(())
}
}
2020-04-05 16:03:44 -05:00
/// Precomputed layouts of primitive types
2020-04-18 10:53:54 -05:00
pub struct PrimitiveLayouts<'tcx> {
pub unit: TyAndLayout<'tcx>,
pub i8: TyAndLayout<'tcx>,
pub i32: TyAndLayout<'tcx>,
pub isize: TyAndLayout<'tcx>,
pub u8: TyAndLayout<'tcx>,
pub u32: TyAndLayout<'tcx>,
pub usize: TyAndLayout<'tcx>,
}
impl<'mir, 'tcx: 'mir> PrimitiveLayouts<'tcx> {
2020-04-05 16:03:44 -05:00
fn new(layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Result<Self, LayoutError<'tcx>> {
Ok(Self {
2020-04-18 10:53:54 -05:00
unit: layout_cx.layout_of(layout_cx.tcx.mk_unit())?,
i8: layout_cx.layout_of(layout_cx.tcx.types.i8)?,
2020-04-05 16:03:44 -05:00
i32: layout_cx.layout_of(layout_cx.tcx.types.i32)?,
2020-04-18 10:53:54 -05:00
isize: layout_cx.layout_of(layout_cx.tcx.types.isize)?,
u8: layout_cx.layout_of(layout_cx.tcx.types.u8)?,
2020-04-05 16:03:44 -05:00
u32: layout_cx.layout_of(layout_cx.tcx.types.u32)?,
2020-04-18 10:53:54 -05:00
usize: layout_cx.layout_of(layout_cx.tcx.types.usize)?,
2020-04-05 16:03:44 -05:00
})
}
}
/// The machine itself.
pub struct Evaluator<'mir, 'tcx> {
/// Environment variables set by `setenv`.
/// Miri does not expose env vars from the host to the emulated program.
2020-03-08 11:54:47 -05:00
pub(crate) env_vars: EnvVars<'tcx>,
/// Program arguments (`Option` because we can only initialize them after creating the ecx).
/// These are *pointers* to argc/argv because macOS.
/// We also need the full command line as one string because of Windows.
2019-11-02 05:50:21 -05:00
pub(crate) argc: Option<Scalar<Tag>>,
pub(crate) argv: Option<Scalar<Tag>>,
pub(crate) cmd_line: Option<Scalar<Tag>>,
/// Last OS error location in memory. It is a 32-bit integer.
2019-10-12 20:58:02 -05:00
pub(crate) last_error: Option<MPlaceTy<'tcx, Tag>>,
/// TLS state.
pub(crate) tls: TlsData<'tcx>,
/// If enabled, the `env_vars` field is populated with the host env vars during initialization
/// and random number generation is delegated to the host.
pub(crate) communicate: bool,
2019-09-24 17:28:00 -05:00
/// Whether to enforce the validity invariant.
pub(crate) validate: bool,
2019-09-24 17:28:00 -05:00
pub(crate) file_handler: FileHandler,
2020-01-25 12:57:15 -06:00
pub(crate) dir_handler: DirHandler,
/// The temporary used for storing the argument of
/// the call to `miri_start_panic` (the panic payload) when unwinding.
/// This is pointer-sized, and matches the `Payload` type in `src/libpanic_unwind/miri.rs`.
pub(crate) panic_payload: Option<Scalar<Tag>>,
2020-03-19 17:00:02 -05:00
/// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
pub(crate) time_anchor: Instant,
/// The set of threads.
2020-04-09 14:06:33 -05:00
pub(crate) threads: ThreadManager<'mir, 'tcx>,
2020-04-05 16:03:44 -05:00
/// Precomputed `TyLayout`s for primitive data types that are commonly used inside Miri.
pub(crate) layouts: PrimitiveLayouts<'tcx>,
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
2020-04-05 16:03:44 -05:00
pub(crate) fn new(
communicate: bool,
validate: bool,
layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>,
) -> Self {
let layouts = PrimitiveLayouts::new(layout_cx)
.expect("Couldn't get layouts of primitive types");
Evaluator {
2019-08-13 16:17:41 -05:00
// `env_vars` could be initialized properly here if `Memory` were available before
// calling this method.
2019-08-14 10:24:35 -05:00
env_vars: EnvVars::default(),
argc: None,
argv: None,
cmd_line: None,
2019-10-03 10:21:55 -05:00
last_error: None,
tls: TlsData::default(),
communicate,
validate,
2019-09-24 17:28:00 -05:00
file_handler: Default::default(),
2020-01-25 12:57:15 -06:00
dir_handler: Default::default(),
2019-12-23 05:56:23 -06:00
panic_payload: None,
2020-03-19 17:00:02 -05:00
time_anchor: Instant::now(),
2020-04-05 16:03:44 -05:00
layouts,
threads: Default::default(),
}
}
}
2019-07-05 16:47:10 -05:00
/// A rustc InterpCx for Miri.
pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>;
/// A little trait that's useful to be inherited by extension traits.
pub trait MiriEvalContextExt<'mir, 'tcx> {
fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
}
impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
#[inline(always)]
fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
self
}
#[inline(always)]
fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
self
}
}
/// Machine hook implementations.
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
2020-03-25 03:05:24 -05:00
type MemoryKind = MiriMemoryKind;
type FrameExtra = FrameData<'tcx>;
2020-03-08 11:54:47 -05:00
type MemoryExtra = MemoryExtra;
type AllocExtra = AllocExtra;
type PointerTag = Tag;
type ExtraFnVal = Dlsym;
2019-12-23 05:56:23 -06:00
type MemoryMap =
MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
2020-03-25 03:05:24 -05:00
const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
2020-04-13 10:51:22 -05:00
#[inline(always)]
fn enforce_alignment(memory_extra: &MemoryExtra) -> bool {
memory_extra.check_alignment
}
2019-08-05 08:49:19 -05:00
#[inline(always)]
fn stack<'a>(
ecx: &'a InterpCx<'mir, 'tcx, Self>
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
ecx.active_thread_stack()
}
fn stack_mut<'a>(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
ecx.active_thread_stack_mut()
}
#[inline(always)]
2019-07-05 16:47:10 -05:00
fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
ecx.machine.validate
}
#[inline(always)]
2019-12-04 16:31:39 -06:00
fn find_mir_or_eval_fn(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Tag>],
2019-11-25 15:48:31 -06:00
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
2019-12-04 16:31:39 -06:00
ecx.find_mir_or_eval_fn(instance, args, ret, unwind)
}
#[inline(always)]
fn call_extra_fn(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
fn_val: Dlsym,
args: &[OpTy<'tcx, Tag>],
2019-11-25 15:48:31 -06:00
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
_unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
2019-11-25 15:48:31 -06:00
ecx.call_dlsym(fn_val, args, ret)
}
#[inline(always)]
fn call_intrinsic(
2019-07-05 16:47:10 -05:00
ecx: &mut rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Tag>],
2019-11-25 15:48:31 -06:00
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
ecx.call_intrinsic(instance, args, ret, unwind)
}
#[inline(always)]
fn assert_panic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
2020-02-13 07:01:35 -06:00
msg: &mir::AssertMessage<'tcx>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
2020-03-11 14:05:44 -05:00
ecx.assert_panic(msg, unwind)
}
2020-03-12 14:46:58 -05:00
#[inline(always)]
fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx, !> {
2020-03-18 06:16:37 -05:00
throw_machine_stop!(TerminationInfo::Abort(None))
2020-03-12 14:46:58 -05:00
}
#[inline(always)]
fn binary_ptr_op(
2019-07-05 16:47:10 -05:00
ecx: &rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Tag>,
right: ImmTy<'tcx, Tag>,
2020-04-02 17:05:35 -05:00
) -> InterpResult<'tcx, (Scalar<Tag>, bool, ty::Ty<'tcx>)> {
ecx.binary_ptr_op(bin_op, left, right)
}
fn box_alloc(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
dest: PlaceTy<'tcx, Tag>,
) -> InterpResult<'tcx> {
trace!("box_alloc for {:?}", dest.layout.ty);
let layout = ecx.layout_of(dest.layout.ty.builtin_deref(false).unwrap().ty)?;
// First argument: `size`.
// (`0` is allowed here -- this is expected to be handled by the lang item).
let size = Scalar::from_machine_usize(layout.size.bytes(), ecx);
// Second argument: `align`.
let align = Scalar::from_machine_usize(layout.align.abi.bytes(), ecx);
// Call the `exchange_malloc` lang item.
let malloc = ecx.tcx.lang_items().exchange_malloc_fn().unwrap();
let malloc = ty::Instance::mono(ecx.tcx.tcx, malloc);
ecx.call_function(
malloc,
&[size.into(), align.into()],
Some(dest),
// Don't do anything when we are done. The `statement()` function will increment
// the old stack frame's stmt counter to the next statement, which means that when
// `exchange_malloc` returns, we go on evaluating exactly where we want to be.
StackPopCleanup::None { cleanup: true },
)?;
Ok(())
}
fn access_local(
ecx: &InterpCx<'mir, 'tcx, Self>,
frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
local: mir::Local,
) -> InterpResult<'tcx, Operand<Self::PointerTag>> {
match frame.body.local_decls[local].local_info {
mir::LocalInfo::StaticRef { def_id, is_thread_local: true } => {
let static_alloc_id = ecx.tcx.alloc_map.lock().create_static_alloc(def_id);
let alloc_id = ecx.memory.extra.tls.get_or_register_allocation(*ecx.memory.tcx, static_alloc_id);
let tag = Self::tag_global_base_pointer(&ecx.memory.extra, alloc_id);
let pointer: Pointer = alloc_id.into();
let pointer = pointer.with_tag(tag);
let scalar: Scalar<_> = pointer.into();
let scalar: ScalarMaybeUndef<_> = scalar.into();
let immediate: Immediate<_> = scalar.into();
Ok(
Operand::Immediate(immediate)
)
},
_ => frame.locals[local].access(),
}
}
fn canonical_alloc_id(mem: &Memory<'mir, 'tcx, Self>, id: AllocId) -> AllocId {
let tcx = mem.tcx;
let alloc = tcx.alloc_map.lock().get(id);
fn is_thread_local<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
}
match alloc {
Some(GlobalAlloc::Static(def_id)) if tcx.is_foreign_item(def_id) => {
if is_thread_local(*tcx, def_id) {
unimplemented!("Foreign thread local statics are not supported yet.");
}
// Figure out if this is an extern static, and if yes, which one.
let attrs = tcx.get_attrs(def_id);
let link_name = match attr::first_attr_value_str_by_name(&attrs, sym::link_name) {
Some(name) => name,
None => tcx.item_name(def_id),
};
// Check if we know this one.
if let Some(canonical_id) = mem.extra.extern_statics.get(&link_name) {
trace!("canonical_alloc_id: {:?} ({}) -> {:?}", id, link_name, canonical_id);
*canonical_id
} else {
// Return original id; `Memory::get_static_alloc` will throw an error.
id
}
},
Some(GlobalAlloc::Static(def_id)) if is_thread_local(*tcx, def_id) => {
// We have a thread local, so we need to get a unique allocation id for it.
mem.extra.tls.get_or_register_allocation(*tcx, id)
},
_ => {
// No need to canonicalize anything.
id
}
}
}
2020-04-01 18:28:33 -05:00
#[inline(always)]
fn resolve_maybe_global_alloc(
tcx: ty::query::TyCtxtAt<'tcx>,
extra: &Self::MemoryExtra,
id: AllocId,
) -> Option<mir::interpret::GlobalAlloc<'tcx>> {
extra.tls.resolve_allocation(*tcx, id)
}
2019-11-29 12:50:37 -06:00
fn init_allocation_extra<'b>(
2020-03-08 11:54:47 -05:00
memory_extra: &MemoryExtra,
id: AllocId,
alloc: Cow<'b, Allocation>,
2020-03-25 03:05:24 -05:00
kind: Option<MemoryKind<Self::MemoryKind>>,
2019-12-01 03:18:41 -06:00
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
if Some(id) == memory_extra.tracked_alloc_id {
register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id));
}
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
let alloc = alloc.into_owned();
2020-03-01 03:26:24 -06:00
let (stacks, base_tag) =
2020-04-12 03:32:36 -05:00
if let Some(stacked_borrows) = &memory_extra.stacked_borrows {
2020-03-01 03:26:24 -06:00
let (stacks, base_tag) =
Stacks::new_allocation(id, alloc.size, Rc::clone(stacked_borrows), kind);
(Some(stacks), base_tag)
} else {
// No stacks, no tag.
(None, Tag::Untagged)
};
let mut stacked_borrows = memory_extra.stacked_borrows.as_ref().map(|sb| sb.borrow_mut());
2019-09-17 05:30:14 -05:00
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.with_tags_and_extra(
2019-10-07 08:39:59 -05:00
|alloc| {
2020-04-12 03:32:36 -05:00
if let Some(stacked_borrows) = &mut stacked_borrows {
2020-03-25 03:05:24 -05:00
// Only globals may already contain pointers at this point
assert_eq!(kind, MiriMemoryKind::Global.into());
stacked_borrows.global_base_ptr(alloc)
} else {
Tag::Untagged
2019-10-07 08:39:59 -05:00
}
2019-09-05 11:17:58 -05:00
},
2019-12-23 05:56:23 -06:00
AllocExtra { stacked_borrows: stacks },
2019-09-05 11:17:58 -05:00
);
2019-12-01 03:18:41 -06:00
(Cow::Owned(alloc), base_tag)
}
2020-04-14 18:00:56 -05:00
#[inline(always)]
fn before_deallocation(
memory_extra: &mut Self::MemoryExtra,
id: AllocId,
) -> InterpResult<'tcx> {
if Some(id) == memory_extra.tracked_alloc_id {
register_diagnostic(NonHaltingDiagnostic::FreedAlloc(id));
}
Ok(())
}
#[inline(always)]
2020-03-25 03:05:24 -05:00
fn tag_global_base_pointer(memory_extra: &MemoryExtra, id: AllocId) -> Self::PointerTag {
2020-04-12 03:32:36 -05:00
if let Some(stacked_borrows) = &memory_extra.stacked_borrows {
2020-03-25 03:05:24 -05:00
stacked_borrows.borrow_mut().global_base_ptr(id)
} else {
Tag::Untagged
}
}
#[inline(always)]
fn retag(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
kind: mir::RetagKind,
place: PlaceTy<'tcx, Tag>,
) -> InterpResult<'tcx> {
2020-04-13 10:31:19 -05:00
if ecx.memory.extra.stacked_borrows.is_some() {
ecx.retag(kind, place)
2020-04-13 10:31:19 -05:00
} else {
Ok(())
}
}
#[inline(always)]
2020-04-13 09:08:12 -05:00
fn init_frame_extra(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
frame: Frame<'mir, 'tcx, Tag>,
) -> InterpResult<'tcx, Frame<'mir, 'tcx, Tag, FrameData<'tcx>>> {
2020-03-01 03:26:24 -06:00
let stacked_borrows = ecx.memory.extra.stacked_borrows.as_ref();
let call_id = stacked_borrows.map_or(NonZeroU64::new(1).unwrap(), |stacked_borrows| {
stacked_borrows.borrow_mut().new_call()
});
2020-04-13 09:08:12 -05:00
let extra = FrameData { call_id, catch_unwind: None };
Ok(frame.with_extra(extra))
}
#[inline(always)]
fn stack<'a>(
ecx: &'a InterpCx<'mir, 'tcx, Self>,
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
&ecx.machine.stack
}
#[inline(always)]
fn stack_mut<'a>(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
&mut ecx.machine.stack
}
2020-04-13 10:31:19 -05:00
#[inline(always)]
fn after_stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
if ecx.memory.extra.stacked_borrows.is_some() {
ecx.retag_return_place()
} else {
Ok(())
}
}
#[inline(always)]
2020-04-13 09:08:12 -05:00
fn after_stack_pop(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
2020-04-13 09:08:12 -05:00
frame: Frame<'mir, 'tcx, Tag, FrameData<'tcx>>,
2019-12-23 05:56:23 -06:00
unwinding: bool,
) -> InterpResult<'tcx, StackPopJump> {
2020-04-13 09:08:12 -05:00
ecx.handle_stack_pop(frame.extra, unwinding)
}
2019-07-23 14:38:53 -05:00
#[inline(always)]
fn int_to_ptr(
memory: &Memory<'mir, 'tcx, Self>,
int: u64,
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
2019-07-23 14:38:53 -05:00
intptrcast::GlobalState::int_to_ptr(int, memory)
}
2019-07-23 14:38:53 -05:00
#[inline(always)]
fn ptr_to_int(
memory: &Memory<'mir, 'tcx, Self>,
ptr: Pointer<Self::PointerTag>,
) -> InterpResult<'tcx, u64> {
2019-07-23 14:38:53 -05:00
intptrcast::GlobalState::ptr_to_int(ptr, memory)
}
}
impl AllocationExtra<Tag> for AllocExtra {
#[inline(always)]
fn memory_read<'tcx>(
alloc: &Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
2020-04-12 03:32:36 -05:00
if let Some(stacked_borrows) = &alloc.extra.stacked_borrows {
stacked_borrows.memory_read(ptr, size)
} else {
Ok(())
}
}
#[inline(always)]
fn memory_written<'tcx>(
alloc: &mut Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
2020-04-12 03:32:36 -05:00
if let Some(stacked_borrows) = &mut alloc.extra.stacked_borrows {
stacked_borrows.memory_written(ptr, size)
} else {
Ok(())
}
}
#[inline(always)]
fn memory_deallocated<'tcx>(
alloc: &mut Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
2020-04-12 03:32:36 -05:00
if let Some(stacked_borrows) = &mut alloc.extra.stacked_borrows {
stacked_borrows.memory_deallocated(ptr, size)
} else {
Ok(())
}
}
}