rust/src/machine.rs

511 lines
17 KiB
Rust
Raw Normal View History

2019-06-29 07:15:05 -05:00
//! Global machine state as well as implementation of the interpreter engine
//! `Machine` trait.
use std::borrow::Cow;
use std::cell::RefCell;
use std::num::NonZeroU64;
2020-03-01 03:26:24 -06:00
use std::rc::Rc;
2020-03-19 17:00:02 -05:00
use std::time::Instant;
use log::trace;
use rand::rngs::StdRng;
2020-03-02 15:36:15 -06:00
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::mir;
use rustc_middle::ty::{
2019-12-23 05:56:23 -06:00
self,
layout::{LayoutOf, Size},
Ty,
2019-12-23 05:56:23 -06:00
};
2020-03-01 03:22:13 -06:00
use rustc_ast::attr;
2020-03-02 15:30:20 -06:00
use rustc_span::{source_map::Span, symbol::{sym, Symbol}};
use crate::*;
2019-06-29 07:37:41 -05:00
// Some global facts about the emulated machine.
2019-10-07 08:39:59 -05:00
pub const PAGE_SIZE: u64 = 4 * 1024; // FIXME: adjust to target architecture
pub const STACK_ADDR: u64 = 32 * PAGE_SIZE; // not really about the "stack", but where we start assigning integer addresses to allocations
pub const STACK_SIZE: u64 = 16 * PAGE_SIZE; // whatever
2019-06-29 07:37:41 -05:00
pub const NUM_CPUS: u64 = 1;
/// Extra data stored with each stack frame
#[derive(Debug)]
pub struct FrameData<'tcx> {
/// Extra data for Stacked Borrows.
pub call_id: stacked_borrows::CallId,
2019-11-19 07:51:08 -06:00
/// If this is Some(), then this is a special "catch unwind" frame (the frame of `try_fn`
/// called by `try`). When this frame is popped during unwinding a panic,
/// we stop unwinding, use the `CatchUnwindData` to handle catching.
pub catch_unwind: Option<CatchUnwindData<'tcx>>,
}
/// Extra memory kinds
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum MiriMemoryKind {
/// `__rust_alloc` memory.
Rust,
/// `malloc` memory.
C,
/// Windows `HeapAlloc` memory.
WinHeap,
/// Memory for args, errno, extern statics and other parts of the machine-managed environment.
/// This memory may leak.
2020-02-23 14:55:02 -06:00
Machine,
/// Memory for env vars. Separate from `Machine` because we clean it up and leak-check it.
Env,
2020-03-25 03:05:24 -05:00
/// Globals copied from `tcx`.
/// This memory may leak.
2020-03-25 03:05:24 -05:00
Global,
}
impl Into<MemoryKind<MiriMemoryKind>> for MiriMemoryKind {
#[inline(always)]
fn into(self) -> MemoryKind<MiriMemoryKind> {
MemoryKind::Machine(self)
}
}
/// Extra per-allocation data
#[derive(Debug, Clone)]
pub struct AllocExtra {
/// Stacked Borrows state is only added if it is enabled.
pub stacked_borrows: Option<stacked_borrows::AllocExtra>,
}
/// Extra global memory data
#[derive(Clone, Debug)]
2020-03-08 11:54:47 -05:00
pub struct MemoryExtra {
pub stacked_borrows: Option<stacked_borrows::MemoryExtra>,
pub intptrcast: intptrcast::MemoryExtra,
/// Mapping extern static names to their canonical allocation.
extern_statics: FxHashMap<Symbol, AllocId>,
2019-07-23 14:38:53 -05:00
/// The random number generator used for resolving non-determinism.
/// Needs to be queried by ptr_to_int, hence needs interior mutability.
2019-07-23 14:38:53 -05:00
pub(crate) rng: RefCell<StdRng>,
/// An allocation ID to report when it is being allocated
/// (helps for debugging memory leaks).
tracked_alloc_id: Option<AllocId>,
}
2020-03-08 11:54:47 -05:00
impl MemoryExtra {
pub fn new(rng: StdRng, stacked_borrows: bool, tracked_pointer_tag: Option<PtrId>, tracked_alloc_id: Option<AllocId>) -> Self {
let stacked_borrows = if stacked_borrows {
Some(Rc::new(RefCell::new(stacked_borrows::GlobalState::new(tracked_pointer_tag))))
} else {
None
};
MemoryExtra {
stacked_borrows,
intptrcast: Default::default(),
2020-03-02 15:36:15 -06:00
extern_statics: FxHashMap::default(),
rng: RefCell::new(rng),
tracked_alloc_id,
}
}
2020-03-28 04:07:23 -05:00
fn add_extern_static<'tcx, 'mir>(
this: &mut MiriEvalContext<'mir, 'tcx>,
name: &str,
ptr: Scalar<Tag>,
) {
let ptr = ptr.assert_ptr();
assert_eq!(ptr.offset, Size::ZERO);
this.memory
.extra
.extern_statics
.insert(Symbol::intern(name), ptr.alloc_id)
.unwrap_none();
}
/// Sets up the "extern statics" for this machine.
2020-03-08 11:54:47 -05:00
pub fn init_extern_statics<'tcx, 'mir>(
this: &mut MiriEvalContext<'mir, 'tcx>,
) -> InterpResult<'tcx> {
2020-03-25 03:15:52 -05:00
match this.tcx.sess.target.target.target_os.as_str() {
2020-03-07 14:33:27 -06:00
"linux" => {
// "__cxa_thread_atexit_impl"
// This should be all-zero, pointer-sized.
2020-03-04 11:15:14 -06:00
let layout = this.layout_of(this.tcx.types.usize)?;
let place = this.allocate(layout, MiriMemoryKind::Machine.into());
2020-03-28 11:38:38 -05:00
this.write_scalar(Scalar::from_machine_usize(0, this), place.into())?;
2020-03-28 04:07:23 -05:00
Self::add_extern_static(this, "__cxa_thread_atexit_impl", place.ptr);
2020-03-07 14:33:27 -06:00
// "environ"
2020-03-28 04:07:23 -05:00
Self::add_extern_static(this, "environ", this.machine.env_vars.environ.unwrap().ptr);
}
"windows" => {
// "_tls_used"
// This is some obscure hack that is part of the Windows TLS story. It's a `u8`.
let layout = this.layout_of(this.tcx.types.u8)?;
let place = this.allocate(layout, MiriMemoryKind::Machine.into());
this.write_scalar(Scalar::from_u8(0), place.into())?;
Self::add_extern_static(this, "_tls_used", place.ptr);
}
2020-03-22 02:51:15 -05:00
_ => {} // No "extern statics" supported on this target
}
Ok(())
}
}
/// The machine itself.
pub struct Evaluator<'tcx> {
/// Environment variables set by `setenv`.
/// Miri does not expose env vars from the host to the emulated program.
2020-03-08 11:54:47 -05:00
pub(crate) env_vars: EnvVars<'tcx>,
/// Program arguments (`Option` because we can only initialize them after creating the ecx).
/// These are *pointers* to argc/argv because macOS.
/// We also need the full command line as one string because of Windows.
2019-11-02 05:50:21 -05:00
pub(crate) argc: Option<Scalar<Tag>>,
pub(crate) argv: Option<Scalar<Tag>>,
pub(crate) cmd_line: Option<Scalar<Tag>>,
/// Last OS error location in memory. It is a 32-bit integer.
2019-10-12 20:58:02 -05:00
pub(crate) last_error: Option<MPlaceTy<'tcx, Tag>>,
/// TLS state.
pub(crate) tls: TlsData<'tcx>,
/// If enabled, the `env_vars` field is populated with the host env vars during initialization
/// and random number generation is delegated to the host.
pub(crate) communicate: bool,
2019-09-24 17:28:00 -05:00
/// Whether to enforce the validity invariant.
pub(crate) validate: bool,
2019-09-24 17:28:00 -05:00
pub(crate) file_handler: FileHandler,
2020-01-25 12:57:15 -06:00
pub(crate) dir_handler: DirHandler,
/// The temporary used for storing the argument of
/// the call to `miri_start_panic` (the panic payload) when unwinding.
/// This is pointer-sized, and matches the `Payload` type in `src/libpanic_unwind/miri.rs`.
pub(crate) panic_payload: Option<Scalar<Tag>>,
2020-03-19 17:00:02 -05:00
/// The "time anchor" for this machine's monotone clock (for `Instant` simulation).
pub(crate) time_anchor: Instant,
}
impl<'tcx> Evaluator<'tcx> {
pub(crate) fn new(communicate: bool, validate: bool) -> Self {
Evaluator {
2019-08-13 16:17:41 -05:00
// `env_vars` could be initialized properly here if `Memory` were available before
// calling this method.
2019-08-14 10:24:35 -05:00
env_vars: EnvVars::default(),
argc: None,
argv: None,
cmd_line: None,
2019-10-03 10:21:55 -05:00
last_error: None,
tls: TlsData::default(),
communicate,
validate,
2019-09-24 17:28:00 -05:00
file_handler: Default::default(),
2020-01-25 12:57:15 -06:00
dir_handler: Default::default(),
2019-12-23 05:56:23 -06:00
panic_payload: None,
2020-03-19 17:00:02 -05:00
time_anchor: Instant::now(),
}
}
}
2019-07-05 16:47:10 -05:00
/// A rustc InterpCx for Miri.
pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'tcx>>;
/// A little trait that's useful to be inherited by extension traits.
pub trait MiriEvalContextExt<'mir, 'tcx> {
fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
}
impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
#[inline(always)]
fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
self
}
#[inline(always)]
fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
self
}
}
/// Machine hook implementations.
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
2020-03-25 03:05:24 -05:00
type MemoryKind = MiriMemoryKind;
type FrameExtra = FrameData<'tcx>;
2020-03-08 11:54:47 -05:00
type MemoryExtra = MemoryExtra;
type AllocExtra = AllocExtra;
type PointerTag = Tag;
type ExtraFnVal = Dlsym;
2019-12-23 05:56:23 -06:00
type MemoryMap =
MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
2020-03-25 03:05:24 -05:00
const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
2019-08-05 08:49:19 -05:00
const CHECK_ALIGN: bool = true;
#[inline(always)]
2019-07-05 16:47:10 -05:00
fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
ecx.machine.validate
}
#[inline(always)]
2019-12-04 16:31:39 -06:00
fn find_mir_or_eval_fn(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
_span: Span,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Tag>],
2019-11-25 15:48:31 -06:00
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
2019-12-04 16:31:39 -06:00
ecx.find_mir_or_eval_fn(instance, args, ret, unwind)
}
#[inline(always)]
fn call_extra_fn(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
fn_val: Dlsym,
args: &[OpTy<'tcx, Tag>],
2019-11-25 15:48:31 -06:00
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
_unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
2019-11-25 15:48:31 -06:00
ecx.call_dlsym(fn_val, args, ret)
}
#[inline(always)]
fn call_intrinsic(
2019-07-05 16:47:10 -05:00
ecx: &mut rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
2019-10-30 04:16:58 -05:00
span: Span,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Tag>],
2019-11-25 15:48:31 -06:00
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
2019-11-25 15:48:31 -06:00
ecx.call_intrinsic(span, instance, args, ret, unwind)
}
#[inline(always)]
fn assert_panic(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
2020-02-13 07:01:35 -06:00
msg: &mir::AssertMessage<'tcx>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
2020-03-11 14:05:44 -05:00
ecx.assert_panic(msg, unwind)
}
2020-03-12 14:46:58 -05:00
#[inline(always)]
fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx, !> {
2020-03-18 06:16:37 -05:00
throw_machine_stop!(TerminationInfo::Abort(None))
2020-03-12 14:46:58 -05:00
}
#[inline(always)]
fn binary_ptr_op(
2019-07-05 16:47:10 -05:00
ecx: &rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Tag>,
right: ImmTy<'tcx, Tag>,
2019-08-10 14:19:25 -05:00
) -> InterpResult<'tcx, (Scalar<Tag>, bool, Ty<'tcx>)> {
ecx.binary_ptr_op(bin_op, left, right)
}
fn box_alloc(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
dest: PlaceTy<'tcx, Tag>,
) -> InterpResult<'tcx> {
trace!("box_alloc for {:?}", dest.layout.ty);
let layout = ecx.layout_of(dest.layout.ty.builtin_deref(false).unwrap().ty)?;
// First argument: `size`.
// (`0` is allowed here -- this is expected to be handled by the lang item).
let size = Scalar::from_machine_usize(layout.size.bytes(), ecx);
// Second argument: `align`.
let align = Scalar::from_machine_usize(layout.align.abi.bytes(), ecx);
// Call the `exchange_malloc` lang item.
let malloc = ecx.tcx.lang_items().exchange_malloc_fn().unwrap();
let malloc = ty::Instance::mono(ecx.tcx.tcx, malloc);
ecx.call_function(
malloc,
&[size.into(), align.into()],
Some(dest),
// Don't do anything when we are done. The `statement()` function will increment
// the old stack frame's stmt counter to the next statement, which means that when
// `exchange_malloc` returns, we go on evaluating exactly where we want to be.
StackPopCleanup::None { cleanup: true },
)?;
Ok(())
}
fn canonical_alloc_id(mem: &Memory<'mir, 'tcx, Self>, id: AllocId) -> AllocId {
let tcx = mem.tcx;
// Figure out if this is an extern static, and if yes, which one.
let def_id = match tcx.alloc_map.lock().get(id) {
Some(GlobalAlloc::Static(def_id)) if tcx.is_foreign_item(def_id) => def_id,
_ => {
// No need to canonicalize anything.
return id;
}
};
let attrs = tcx.get_attrs(def_id);
let link_name = match attr::first_attr_value_str_by_name(&attrs, sym::link_name) {
2020-03-02 15:30:20 -06:00
Some(name) => name,
None => tcx.item_name(def_id),
};
// Check if we know this one.
2020-03-02 15:30:20 -06:00
if let Some(canonical_id) = mem.extra.extern_statics.get(&link_name) {
trace!("canonical_alloc_id: {:?} ({}) -> {:?}", id, link_name, canonical_id);
*canonical_id
} else {
// Return original id; `Memory::get_static_alloc` will throw an error.
id
}
}
2019-11-29 12:50:37 -06:00
fn init_allocation_extra<'b>(
2020-03-08 11:54:47 -05:00
memory_extra: &MemoryExtra,
id: AllocId,
alloc: Cow<'b, Allocation>,
2020-03-25 03:05:24 -05:00
kind: Option<MemoryKind<Self::MemoryKind>>,
2019-12-01 03:18:41 -06:00
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag) {
if Some(id) == memory_extra.tracked_alloc_id {
register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id));
}
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
let alloc = alloc.into_owned();
2020-03-01 03:26:24 -06:00
let (stacks, base_tag) =
if let Some(stacked_borrows) = memory_extra.stacked_borrows.as_ref() {
let (stacks, base_tag) =
Stacks::new_allocation(id, alloc.size, Rc::clone(stacked_borrows), kind);
(Some(stacks), base_tag)
} else {
// No stacks, no tag.
(None, Tag::Untagged)
};
let mut stacked_borrows = memory_extra.stacked_borrows.as_ref().map(|sb| sb.borrow_mut());
2019-09-17 05:30:14 -05:00
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.with_tags_and_extra(
2019-10-07 08:39:59 -05:00
|alloc| {
if let Some(stacked_borrows) = stacked_borrows.as_mut() {
2020-03-25 03:05:24 -05:00
// Only globals may already contain pointers at this point
assert_eq!(kind, MiriMemoryKind::Global.into());
stacked_borrows.global_base_ptr(alloc)
} else {
Tag::Untagged
2019-10-07 08:39:59 -05:00
}
2019-09-05 11:17:58 -05:00
},
2019-12-23 05:56:23 -06:00
AllocExtra { stacked_borrows: stacks },
2019-09-05 11:17:58 -05:00
);
2019-12-01 03:18:41 -06:00
(Cow::Owned(alloc), base_tag)
}
#[inline(always)]
2020-03-25 03:05:24 -05:00
fn tag_global_base_pointer(memory_extra: &MemoryExtra, id: AllocId) -> Self::PointerTag {
if let Some(stacked_borrows) = memory_extra.stacked_borrows.as_ref() {
2020-03-25 03:05:24 -05:00
stacked_borrows.borrow_mut().global_base_ptr(id)
} else {
Tag::Untagged
}
}
#[inline(always)]
fn retag(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
kind: mir::RetagKind,
place: PlaceTy<'tcx, Tag>,
) -> InterpResult<'tcx> {
if ecx.memory.extra.stacked_borrows.is_none() {
// No tracking.
2019-10-07 08:39:59 -05:00
Ok(())
} else {
ecx.retag(kind, place)
}
}
#[inline(always)]
2019-12-23 05:56:23 -06:00
fn stack_push(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx, FrameData<'tcx>> {
2020-03-01 03:26:24 -06:00
let stacked_borrows = ecx.memory.extra.stacked_borrows.as_ref();
let call_id = stacked_borrows.map_or(NonZeroU64::new(1).unwrap(), |stacked_borrows| {
stacked_borrows.borrow_mut().new_call()
});
Ok(FrameData { call_id, catch_unwind: None })
}
#[inline(always)]
fn stack_pop(
2019-07-05 16:47:10 -05:00
ecx: &mut InterpCx<'mir, 'tcx, Self>,
extra: FrameData<'tcx>,
2019-12-23 05:56:23 -06:00
unwinding: bool,
) -> InterpResult<'tcx, StackPopJump> {
ecx.handle_stack_pop(extra, unwinding)
}
2019-07-23 14:38:53 -05:00
#[inline(always)]
fn int_to_ptr(
memory: &Memory<'mir, 'tcx, Self>,
int: u64,
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
2019-07-23 14:38:53 -05:00
intptrcast::GlobalState::int_to_ptr(int, memory)
}
2019-07-23 14:38:53 -05:00
#[inline(always)]
fn ptr_to_int(
memory: &Memory<'mir, 'tcx, Self>,
ptr: Pointer<Self::PointerTag>,
) -> InterpResult<'tcx, u64> {
2019-07-23 14:38:53 -05:00
intptrcast::GlobalState::ptr_to_int(ptr, memory)
}
}
impl AllocationExtra<Tag> for AllocExtra {
#[inline(always)]
fn memory_read<'tcx>(
alloc: &Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
if let Some(ref stacked_borrows) = alloc.extra.stacked_borrows {
stacked_borrows.memory_read(ptr, size)
} else {
Ok(())
}
}
#[inline(always)]
fn memory_written<'tcx>(
alloc: &mut Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
if let Some(ref mut stacked_borrows) = alloc.extra.stacked_borrows {
stacked_borrows.memory_written(ptr, size)
} else {
Ok(())
}
}
#[inline(always)]
fn memory_deallocated<'tcx>(
alloc: &mut Allocation<Tag, AllocExtra>,
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
if let Some(ref mut stacked_borrows) = alloc.extra.stacked_borrows {
stacked_borrows.memory_deallocated(ptr, size)
} else {
Ok(())
}
}
}
impl MayLeak for MiriMemoryKind {
#[inline(always)]
fn may_leak(self) -> bool {
use self::MiriMemoryKind::*;
match self {
Rust | C | WinHeap | Env => false,
2020-03-25 03:05:24 -05:00
Machine | Global => true,
}
}
}