2018-08-15 21:01:40 +02:00
|
|
|
use rustc::ty;
|
|
|
|
use rustc::ty::layout::{Align, LayoutOf, Size};
|
2017-07-28 09:52:19 +02:00
|
|
|
use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
|
|
|
|
use rustc::mir;
|
|
|
|
use syntax::attr;
|
2018-08-20 16:27:23 +02:00
|
|
|
use syntax::source_map::Span;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
use std::mem;
|
|
|
|
|
2017-12-14 11:03:55 +01:00
|
|
|
use super::*;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
2017-07-31 13:30:44 +02:00
|
|
|
use tls::MemoryExt;
|
|
|
|
|
2017-08-09 14:53:22 +02:00
|
|
|
use super::memory::MemoryKind;
|
2017-07-28 16:48:43 +02:00
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
pub trait EvalContextExt<'tcx> {
|
2018-07-15 21:03:52 +02:00
|
|
|
fn call_foreign_item(
|
2017-07-28 09:52:19 +02:00
|
|
|
&mut self,
|
|
|
|
def_id: DefId,
|
2018-08-15 21:01:40 +02:00
|
|
|
args: &[OpTy<'tcx>],
|
|
|
|
dest: PlaceTy<'tcx>,
|
2017-07-28 09:52:19 +02:00
|
|
|
dest_block: mir::BasicBlock,
|
|
|
|
) -> EvalResult<'tcx>;
|
|
|
|
|
|
|
|
fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
|
|
|
|
|
|
|
|
fn call_missing_fn(
|
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
2018-08-15 21:01:40 +02:00
|
|
|
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
args: &[OpTy<'tcx>],
|
2017-07-28 09:52:19 +02:00
|
|
|
path: String,
|
|
|
|
) -> EvalResult<'tcx>;
|
|
|
|
|
|
|
|
fn eval_fn_call(
|
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
2018-08-15 21:01:40 +02:00
|
|
|
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
args: &[OpTy<'tcx>],
|
2017-07-28 09:52:19 +02:00
|
|
|
span: Span,
|
|
|
|
) -> EvalResult<'tcx, bool>;
|
2017-11-21 13:32:40 +01:00
|
|
|
|
2018-08-15 21:01:40 +02:00
|
|
|
fn write_null(&mut self, dest: PlaceTy<'tcx>) -> EvalResult<'tcx>;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
2018-01-14 18:59:13 +01:00
|
|
|
impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
|
2017-07-28 09:52:19 +02:00
|
|
|
fn eval_fn_call(
|
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
2018-08-15 21:01:40 +02:00
|
|
|
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
args: &[OpTy<'tcx>],
|
2017-07-28 09:52:19 +02:00
|
|
|
span: Span,
|
|
|
|
) -> EvalResult<'tcx, bool> {
|
2018-08-15 21:01:40 +02:00
|
|
|
trace!("eval_fn_call: {:#?}, {:?}", instance, destination.map(|(place, bb)| (*place, bb)));
|
2017-07-28 09:52:19 +02:00
|
|
|
|
2018-03-17 18:57:18 +01:00
|
|
|
let def_id = instance.def_id();
|
|
|
|
let item_path = self.tcx.absolute_item_path_str(def_id);
|
|
|
|
match &*item_path {
|
|
|
|
"std::sys::unix::thread::guard::init" | "std::sys::unix::thread::guard::current" => {
|
|
|
|
// Return None, as it doesn't make sense to return Some, because miri detects stack overflow itself.
|
2018-08-15 21:01:40 +02:00
|
|
|
let (return_place, return_to_block) = destination.unwrap();
|
|
|
|
match return_place.layout.ty.sty {
|
2018-08-23 09:29:27 +02:00
|
|
|
ty::Adt(ref adt_def, _) => {
|
2018-03-17 18:57:18 +01:00
|
|
|
assert!(adt_def.is_enum(), "Unexpected return type for {}", item_path);
|
2018-04-07 11:43:46 +02:00
|
|
|
let none_variant_index = adt_def.variants.iter().position(|def| {
|
2018-03-17 18:57:18 +01:00
|
|
|
def.name.as_str() == "None"
|
2018-04-07 11:43:46 +02:00
|
|
|
}).expect("No None variant");
|
2018-08-15 21:01:40 +02:00
|
|
|
|
|
|
|
self.write_discriminant_value(none_variant_index, return_place)?;
|
2018-03-18 13:18:41 +01:00
|
|
|
self.goto_block(return_to_block);
|
2018-03-17 18:57:18 +01:00
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
_ => panic!("Unexpected return type for {}", item_path)
|
|
|
|
}
|
|
|
|
}
|
2018-04-04 15:54:40 +02:00
|
|
|
"std::sys::unix::fast_thread_local::register_dtor" => {
|
|
|
|
// TODO: register the dtor
|
|
|
|
let (_return_place, return_to_block) = destination.unwrap();
|
|
|
|
self.goto_block(return_to_block);
|
|
|
|
return Ok(true);
|
|
|
|
}
|
2018-03-17 18:57:18 +01:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
2018-05-30 17:43:27 +02:00
|
|
|
if self.tcx.lang_items().align_offset_fn() == Some(instance.def.def_id()) {
|
|
|
|
// FIXME: return a real value in case the target allocation has an
|
|
|
|
// alignment bigger than the one requested
|
|
|
|
let n = u128::max_value();
|
|
|
|
let (dest, return_to_block) = destination.unwrap();
|
2018-08-15 21:01:40 +02:00
|
|
|
let n = self.truncate(n, dest.layout);
|
|
|
|
self.write_scalar(Scalar::from_uint(n, dest.layout.size), dest)?;
|
2018-05-30 17:43:27 +02:00
|
|
|
self.goto_block(return_to_block);
|
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
let mir = match self.load_mir(instance.def) {
|
|
|
|
Ok(mir) => mir,
|
2017-08-10 08:48:38 -07:00
|
|
|
Err(EvalError { kind: EvalErrorKind::NoMirFor(path), .. }) => {
|
|
|
|
self.call_missing_fn(
|
|
|
|
instance,
|
|
|
|
destination,
|
2017-08-24 14:41:49 +02:00
|
|
|
args,
|
2017-08-10 08:48:38 -07:00
|
|
|
path,
|
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
return Ok(true);
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-07-28 09:52:19 +02:00
|
|
|
Err(other) => return Err(other),
|
|
|
|
};
|
2017-08-10 08:48:38 -07:00
|
|
|
|
2017-12-06 08:39:31 +01:00
|
|
|
let (return_place, return_to_block) = match destination {
|
2018-08-15 21:01:40 +02:00
|
|
|
Some((place, block)) => (*place, StackPopCleanup::Goto(block)),
|
|
|
|
None => (Place::null(&self), StackPopCleanup::None),
|
2017-07-28 09:52:19 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
self.push_stack_frame(
|
|
|
|
instance,
|
|
|
|
span,
|
|
|
|
mir,
|
2017-12-06 08:39:31 +01:00
|
|
|
return_place,
|
2017-07-28 09:52:19 +02:00
|
|
|
return_to_block,
|
|
|
|
)?;
|
|
|
|
|
|
|
|
Ok(false)
|
|
|
|
}
|
|
|
|
|
2018-07-15 21:03:52 +02:00
|
|
|
fn call_foreign_item(
|
2017-07-28 09:52:19 +02:00
|
|
|
&mut self,
|
|
|
|
def_id: DefId,
|
2018-08-15 21:01:40 +02:00
|
|
|
args: &[OpTy<'tcx>],
|
|
|
|
dest: PlaceTy<'tcx>,
|
2017-07-28 09:52:19 +02:00
|
|
|
dest_block: mir::BasicBlock,
|
|
|
|
) -> EvalResult<'tcx> {
|
|
|
|
let attrs = self.tcx.get_attrs(def_id);
|
2017-09-08 19:10:21 +02:00
|
|
|
let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
|
|
|
|
Some(name) => name.as_str(),
|
2018-05-01 11:04:01 +02:00
|
|
|
None => self.tcx.item_name(def_id).as_str(),
|
2017-09-08 19:10:21 +02:00
|
|
|
};
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
match &link_name[..] {
|
|
|
|
"malloc" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let size = self.read_scalar(args[0])?.to_usize(&self)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
if size == 0 {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2018-01-02 17:43:03 -05:00
|
|
|
let align = self.tcx.data_layout.pointer_align;
|
2018-07-02 17:00:36 +01:00
|
|
|
let ptr = self.memory.allocate(Size::from_bytes(size), align, MemoryKind::C.into())?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::Ptr(ptr), dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
"free" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let ptr = self.read_scalar(args[0])?.not_undef()?;
|
2018-08-07 15:22:11 +02:00
|
|
|
if !ptr.is_null() {
|
2017-08-10 08:48:38 -07:00
|
|
|
self.memory.deallocate(
|
|
|
|
ptr.to_ptr()?,
|
|
|
|
None,
|
|
|
|
MemoryKind::C.into(),
|
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-15 21:03:52 +02:00
|
|
|
"__rust_alloc" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let size = self.read_scalar(args[0])?.to_usize(&self)?;
|
|
|
|
let align = self.read_scalar(args[1])?.to_usize(&self)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
if size == 0 {
|
|
|
|
return err!(HeapAllocZeroBytes);
|
|
|
|
}
|
|
|
|
if !align.is_power_of_two() {
|
|
|
|
return err!(HeapAllocNonPowerOfTwoAlignment(align));
|
|
|
|
}
|
|
|
|
let ptr = self.memory.allocate(Size::from_bytes(size),
|
|
|
|
Align::from_bytes(align, align).unwrap(),
|
|
|
|
MemoryKind::Rust.into())?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::Ptr(ptr), dest)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
}
|
|
|
|
"__rust_alloc_zeroed" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let size = self.read_scalar(args[0])?.to_usize(&self)?;
|
|
|
|
let align = self.read_scalar(args[1])?.to_usize(&self)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
if size == 0 {
|
|
|
|
return err!(HeapAllocZeroBytes);
|
|
|
|
}
|
|
|
|
if !align.is_power_of_two() {
|
|
|
|
return err!(HeapAllocNonPowerOfTwoAlignment(align));
|
|
|
|
}
|
|
|
|
let ptr = self.memory.allocate(Size::from_bytes(size),
|
|
|
|
Align::from_bytes(align, align).unwrap(),
|
|
|
|
MemoryKind::Rust.into())?;
|
|
|
|
self.memory.write_repeat(ptr.into(), 0, Size::from_bytes(size))?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::Ptr(ptr), dest)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
}
|
|
|
|
"__rust_dealloc" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let ptr = self.read_scalar(args[0])?.to_ptr()?;
|
|
|
|
let old_size = self.read_scalar(args[1])?.to_usize(&self)?;
|
|
|
|
let align = self.read_scalar(args[2])?.to_usize(&self)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
if old_size == 0 {
|
|
|
|
return err!(HeapAllocZeroBytes);
|
|
|
|
}
|
|
|
|
if !align.is_power_of_two() {
|
|
|
|
return err!(HeapAllocNonPowerOfTwoAlignment(align));
|
|
|
|
}
|
|
|
|
self.memory.deallocate(
|
|
|
|
ptr,
|
|
|
|
Some((Size::from_bytes(old_size), Align::from_bytes(align, align).unwrap())),
|
|
|
|
MemoryKind::Rust.into(),
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
"__rust_realloc" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let ptr = self.read_scalar(args[0])?.to_ptr()?;
|
|
|
|
let old_size = self.read_scalar(args[1])?.to_usize(&self)?;
|
|
|
|
let align = self.read_scalar(args[2])?.to_usize(&self)?;
|
|
|
|
let new_size = self.read_scalar(args[3])?.to_usize(&self)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
if old_size == 0 || new_size == 0 {
|
|
|
|
return err!(HeapAllocZeroBytes);
|
|
|
|
}
|
|
|
|
if !align.is_power_of_two() {
|
|
|
|
return err!(HeapAllocNonPowerOfTwoAlignment(align));
|
|
|
|
}
|
|
|
|
let new_ptr = self.memory.reallocate(
|
|
|
|
ptr,
|
|
|
|
Size::from_bytes(old_size),
|
|
|
|
Align::from_bytes(align, align).unwrap(),
|
|
|
|
Size::from_bytes(new_size),
|
|
|
|
Align::from_bytes(align, align).unwrap(),
|
|
|
|
MemoryKind::Rust.into(),
|
|
|
|
)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::Ptr(new_ptr), dest)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
}
|
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
"syscall" => {
|
2017-07-31 17:14:27 +02:00
|
|
|
// TODO: read `syscall` ids like `sysconf` ids and
|
|
|
|
// figure out some way to actually process some of them
|
|
|
|
//
|
|
|
|
// libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)
|
|
|
|
// is called if a `HashMap` is created the regular way.
|
2018-08-15 21:01:40 +02:00
|
|
|
match self.read_scalar(args[0])?.to_usize(&self)? {
|
2017-08-10 08:48:38 -07:00
|
|
|
318 | 511 => {
|
|
|
|
return err!(Unimplemented(
|
|
|
|
"miri does not support random number generators".to_owned(),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
id => {
|
|
|
|
return err!(Unimplemented(
|
|
|
|
format!("miri does not support syscall id {}", id),
|
|
|
|
))
|
|
|
|
}
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
"dlsym" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let _handle = self.read_scalar(args[0])?;
|
|
|
|
let symbol = self.read_scalar(args[1])?.to_ptr()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
let symbol_name = self.memory.read_c_str(symbol)?;
|
|
|
|
let err = format!("bad c unicode symbol: {:?}", symbol_name);
|
|
|
|
let symbol_name = ::std::str::from_utf8(symbol_name).unwrap_or(&err);
|
2017-08-10 08:48:38 -07:00
|
|
|
return err!(Unimplemented(format!(
|
|
|
|
"miri does not support dynamically loading libraries (requested symbol: {})",
|
|
|
|
symbol_name
|
|
|
|
)));
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"__rust_maybe_catch_panic" => {
|
|
|
|
// fn __rust_maybe_catch_panic(f: fn(*mut u8), data: *mut u8, data_ptr: *mut usize, vtable_ptr: *mut usize) -> u32
|
|
|
|
// We abort on panic, so not much is going on here, but we still have to call the closure
|
2018-08-15 21:01:40 +02:00
|
|
|
let f = self.read_scalar(args[0])?.to_ptr()?;
|
|
|
|
let data = self.read_scalar(args[1])?.not_undef()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
let f_instance = self.memory.get_fn(f)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
// Now we make a function call. TODO: Consider making this re-usable? EvalContext::step does sth. similar for the TLS dtors,
|
|
|
|
// and of course eval_main.
|
|
|
|
let mir = self.load_mir(f_instance.def)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
let ret = Place::null(&self);
|
2017-07-28 09:52:19 +02:00
|
|
|
self.push_stack_frame(
|
|
|
|
f_instance,
|
|
|
|
mir.span,
|
|
|
|
mir,
|
2018-08-15 21:01:40 +02:00
|
|
|
ret,
|
2017-07-28 09:52:19 +02:00
|
|
|
StackPopCleanup::Goto(dest_block),
|
|
|
|
)?;
|
2017-09-25 15:55:21 +02:00
|
|
|
let mut args = self.frame().mir.args_iter();
|
2017-07-28 09:52:19 +02:00
|
|
|
|
2018-07-10 17:32:38 +02:00
|
|
|
let arg_local = args.next().ok_or_else(||
|
2017-08-10 08:48:38 -07:00
|
|
|
EvalErrorKind::AbiViolation(
|
|
|
|
"Argument to __rust_maybe_catch_panic does not take enough arguments."
|
|
|
|
.to_owned(),
|
|
|
|
),
|
|
|
|
)?;
|
2017-12-06 08:39:31 +01:00
|
|
|
let arg_dest = self.eval_place(&mir::Place::Local(arg_local))?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(data, arg_dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
2017-09-25 15:55:21 +02:00
|
|
|
assert!(args.next().is_none(), "__rust_maybe_catch_panic argument has more arguments than expected");
|
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
// We ourselves return 0
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
// Don't fall through
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
"__rust_start_panic" => {
|
2017-08-02 16:59:01 +02:00
|
|
|
return err!(Panic);
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"memcmp" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let left = self.read_scalar(args[0])?.not_undef()?;
|
|
|
|
let right = self.read_scalar(args[1])?.not_undef()?;
|
|
|
|
let n = Size::from_bytes(self.read_scalar(args[2])?.to_usize(&self)?);
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
let result = {
|
|
|
|
let left_bytes = self.memory.read_bytes(left, n)?;
|
|
|
|
let right_bytes = self.memory.read_bytes(right, n)?;
|
|
|
|
|
|
|
|
use std::cmp::Ordering::*;
|
|
|
|
match left_bytes.cmp(right_bytes) {
|
2018-05-30 14:29:32 +02:00
|
|
|
Less => -1i32,
|
2017-07-28 09:52:19 +02:00
|
|
|
Equal => 0,
|
|
|
|
Greater => 1,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-05-26 17:07:34 +02:00
|
|
|
self.write_scalar(
|
2018-05-30 14:29:32 +02:00
|
|
|
Scalar::from_i32(result),
|
2018-08-15 21:01:40 +02:00
|
|
|
dest,
|
2017-08-10 08:48:38 -07:00
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"memrchr" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let ptr = self.read_scalar(args[0])?.not_undef()?;
|
|
|
|
let val = self.read_scalar(args[1])?.to_bytes()? as u8;
|
|
|
|
let num = self.read_scalar(args[2])?.to_usize(&self)?;
|
2018-05-20 11:26:40 +02:00
|
|
|
if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().rev().position(
|
2017-08-10 08:48:38 -07:00
|
|
|
|&c| c == val,
|
|
|
|
)
|
|
|
|
{
|
2018-05-26 17:07:34 +02:00
|
|
|
let new_ptr = ptr.ptr_offset(Size::from_bytes(num - idx as u64 - 1), &self)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(new_ptr, dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
"memchr" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let ptr = self.read_scalar(args[0])?.not_undef()?;
|
|
|
|
let val = self.read_scalar(args[1])?.to_bytes()? as u8;
|
|
|
|
let num = self.read_scalar(args[2])?.to_usize(&self)?;
|
2018-05-20 11:26:40 +02:00
|
|
|
if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().position(
|
2017-08-10 08:48:38 -07:00
|
|
|
|&c| c == val,
|
|
|
|
)
|
|
|
|
{
|
2018-05-26 17:07:34 +02:00
|
|
|
let new_ptr = ptr.ptr_offset(Size::from_bytes(idx as u64), &self)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(new_ptr, dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
"getenv" => {
|
|
|
|
let result = {
|
2018-08-15 21:01:40 +02:00
|
|
|
let name_ptr = self.read_scalar(args[0])?.to_ptr()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
let name = self.memory.read_c_str(name_ptr)?;
|
2017-12-06 15:03:24 +01:00
|
|
|
match self.machine.env_vars.get(name) {
|
2018-05-26 17:07:34 +02:00
|
|
|
Some(&var) => Scalar::Ptr(var),
|
2018-08-07 15:22:11 +02:00
|
|
|
None => Scalar::null(self.memory.pointer_size()),
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
};
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(result, dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"unsetenv" => {
|
|
|
|
let mut success = None;
|
|
|
|
{
|
2018-08-15 21:01:40 +02:00
|
|
|
let name_ptr = self.read_scalar(args[0])?.not_undef()?;
|
2018-08-07 15:22:11 +02:00
|
|
|
if !name_ptr.is_null() {
|
2017-07-28 09:52:19 +02:00
|
|
|
let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
|
|
|
|
if !name.is_empty() && !name.contains(&b'=') {
|
2017-12-06 15:03:24 +01:00
|
|
|
success = Some(self.machine.env_vars.remove(name));
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(old) = success {
|
|
|
|
if let Some(var) = old {
|
2017-08-09 14:53:22 +02:00
|
|
|
self.memory.deallocate(var, None, MemoryKind::Env.into())?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_int(-1, dest.layout.size), dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
"setenv" => {
|
|
|
|
let mut new = None;
|
|
|
|
{
|
2018-08-15 21:01:40 +02:00
|
|
|
let name_ptr = self.read_scalar(args[0])?.not_undef()?;
|
|
|
|
let value_ptr = self.read_scalar(args[1])?.to_ptr()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
let value = self.memory.read_c_str(value_ptr)?;
|
2018-08-07 15:22:11 +02:00
|
|
|
if !name_ptr.is_null() {
|
2017-07-28 09:52:19 +02:00
|
|
|
let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
|
|
|
|
if !name.is_empty() && !name.contains(&b'=') {
|
|
|
|
new = Some((name.to_owned(), value.to_owned()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some((name, value)) = new {
|
|
|
|
// +1 for the null terminator
|
2017-08-10 08:48:38 -07:00
|
|
|
let value_copy = self.memory.allocate(
|
2018-05-20 11:26:40 +02:00
|
|
|
Size::from_bytes((value.len() + 1) as u64),
|
2018-01-02 17:43:03 -05:00
|
|
|
Align::from_bytes(1, 1).unwrap(),
|
2018-07-02 17:00:36 +01:00
|
|
|
MemoryKind::Env.into(),
|
2017-08-10 08:48:38 -07:00
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
self.memory.write_bytes(value_copy.into(), &value)?;
|
2018-05-20 11:26:40 +02:00
|
|
|
let trailing_zero_ptr = value_copy.offset(Size::from_bytes(value.len() as u64), &self)?.into();
|
2017-07-28 09:52:19 +02:00
|
|
|
self.memory.write_bytes(trailing_zero_ptr, &[0])?;
|
2017-12-06 15:03:24 +01:00
|
|
|
if let Some(var) = self.machine.env_vars.insert(
|
2017-08-10 08:48:38 -07:00
|
|
|
name.to_owned(),
|
|
|
|
value_copy,
|
|
|
|
)
|
|
|
|
{
|
2017-08-09 14:53:22 +02:00
|
|
|
self.memory.deallocate(var, None, MemoryKind::Env.into())?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_int(-1, dest.layout.size), dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
"write" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let fd = self.read_scalar(args[0])?.to_bytes()?;
|
|
|
|
let buf = self.read_scalar(args[1])?.not_undef()?;
|
|
|
|
let n = self.read_scalar(args[2])?.to_bytes()? as u64;
|
2017-07-28 09:52:19 +02:00
|
|
|
trace!("Called write({:?}, {:?}, {:?})", fd, buf, n);
|
2017-08-10 08:48:38 -07:00
|
|
|
let result = if fd == 1 || fd == 2 {
|
|
|
|
// stdout/stderr
|
2017-07-28 09:52:19 +02:00
|
|
|
use std::io::{self, Write};
|
2017-08-10 08:48:38 -07:00
|
|
|
|
2018-05-20 11:26:40 +02:00
|
|
|
let buf_cont = self.memory.read_bytes(buf, Size::from_bytes(n))?;
|
2017-08-10 08:48:38 -07:00
|
|
|
let res = if fd == 1 {
|
|
|
|
io::stdout().write(buf_cont)
|
|
|
|
} else {
|
|
|
|
io::stderr().write(buf_cont)
|
|
|
|
};
|
|
|
|
match res {
|
2018-05-26 17:07:34 +02:00
|
|
|
Ok(n) => n as i64,
|
2017-08-10 08:48:38 -07:00
|
|
|
Err(_) => -1,
|
|
|
|
}
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2017-09-16 12:36:28 +02:00
|
|
|
warn!("Ignored output to FD {}", fd);
|
2018-05-26 17:07:34 +02:00
|
|
|
n as i64 // pretend it all went well
|
2017-07-28 09:52:19 +02:00
|
|
|
}; // now result is the value we return back to the program
|
2018-05-26 17:07:34 +02:00
|
|
|
self.write_scalar(
|
2018-08-15 21:01:40 +02:00
|
|
|
Scalar::from_int(result, dest.layout.size),
|
2017-08-10 08:48:38 -07:00
|
|
|
dest,
|
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"strlen" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let ptr = self.read_scalar(args[0])?.to_ptr()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
let n = self.memory.read_c_str(ptr)?.len();
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_uint(n as u64, dest.layout.size), dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Some things needed for sys::thread initialization to go through
|
|
|
|
"signal" | "sigaction" | "sigaltstack" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::null(dest.layout.size), dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"sysconf" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let name = self.read_scalar(args[0])?.to_usize(&self)?;
|
2018-01-02 17:43:03 -05:00
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
trace!("sysconf() called with name {}", name);
|
|
|
|
// cache the sysconf integers via miri's global cache
|
|
|
|
let paths = &[
|
2018-08-15 21:01:40 +02:00
|
|
|
(&["libc", "_SC_PAGESIZE"], Scalar::from_int(4096, dest.layout.size)),
|
|
|
|
(&["libc", "_SC_GETPW_R_SIZE_MAX"], Scalar::from_int(-1, dest.layout.size)),
|
2017-07-28 09:52:19 +02:00
|
|
|
];
|
|
|
|
let mut result = None;
|
|
|
|
for &(path, path_value) in paths {
|
|
|
|
if let Ok(instance) = self.resolve_path(path) {
|
2017-08-10 08:48:38 -07:00
|
|
|
let cid = GlobalId {
|
|
|
|
instance,
|
|
|
|
promoted: None,
|
|
|
|
};
|
2018-04-07 11:43:46 +02:00
|
|
|
let const_val = self.const_eval(cid)?;
|
2018-05-13 13:14:26 +02:00
|
|
|
let value = const_val.unwrap_usize(self.tcx.tcx);
|
2018-01-14 18:59:13 +01:00
|
|
|
if value == name {
|
2017-07-28 09:52:19 +02:00
|
|
|
result = Some(path_value);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(result) = result {
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(result, dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
} else {
|
2017-08-10 08:48:38 -07:00
|
|
|
return err!(Unimplemented(
|
|
|
|
format!("Unimplemented sysconf name: {}", name),
|
|
|
|
));
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Hook pthread calls that go to the thread-local storage memory subsystem
|
|
|
|
"pthread_key_create" => {
|
2018-08-18 11:59:28 +02:00
|
|
|
let key_ptr = self.read_scalar(args[0])?.to_ptr()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
// Extract the function type out of the signature (that seems easier than constructing it ourselves...)
|
2018-08-15 21:01:40 +02:00
|
|
|
let dtor = match self.read_scalar(args[1])?.not_undef()? {
|
2018-05-26 17:07:34 +02:00
|
|
|
Scalar::Ptr(dtor_ptr) => Some(self.memory.get_fn(dtor_ptr)?),
|
2018-08-07 15:22:11 +02:00
|
|
|
Scalar::Bits { bits: 0, size } => {
|
|
|
|
assert_eq!(size as u64, self.memory.pointer_size().bytes());
|
|
|
|
None
|
|
|
|
},
|
2018-05-26 17:07:34 +02:00
|
|
|
Scalar::Bits { .. } => return err!(ReadBytesAsPointer),
|
2017-07-28 09:52:19 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
// Figure out how large a pthread TLS key actually is. This is libc::pthread_key_t.
|
2018-08-15 21:01:40 +02:00
|
|
|
let key_type = args[0].layout.ty.builtin_deref(true)
|
2018-07-10 17:32:38 +02:00
|
|
|
.ok_or_else(|| EvalErrorKind::AbiViolation("Wrong signature used for pthread_key_create: First argument must be a raw pointer.".to_owned()))?.ty;
|
2018-08-07 15:22:11 +02:00
|
|
|
let key_layout = self.layout_of(key_type)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
// Create key and write it into the memory where key_ptr wants it
|
|
|
|
let key = self.memory.create_tls_key(dtor) as u128;
|
2018-08-07 15:22:11 +02:00
|
|
|
if key_layout.size.bits() < 128 && key >= (1u128 << key_layout.size.bits() as u128) {
|
2017-08-02 16:59:01 +02:00
|
|
|
return err!(OutOfTls);
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
2018-05-26 17:07:34 +02:00
|
|
|
self.memory.write_scalar(
|
2018-04-13 16:02:55 +02:00
|
|
|
key_ptr,
|
2018-08-07 15:22:11 +02:00
|
|
|
key_layout.align,
|
|
|
|
Scalar::from_uint(key, key_layout.size).into(),
|
|
|
|
key_layout.size,
|
2017-08-10 08:48:38 -07:00
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
// Return success (0)
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
"pthread_key_delete" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let key = self.read_scalar(args[0])?.to_bytes()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
self.memory.delete_tls_key(key)?;
|
|
|
|
// Return success (0)
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
"pthread_getspecific" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let key = self.read_scalar(args[0])?.to_bytes()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
let ptr = self.memory.load_tls(key)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(ptr, dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
"pthread_setspecific" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let key = self.read_scalar(args[0])?.to_bytes()?;
|
|
|
|
let new_ptr = self.read_scalar(args[1])?.not_undef()?;
|
2017-07-28 09:52:19 +02:00
|
|
|
self.memory.store_tls(key, new_ptr)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
// Return success (0)
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
2017-07-19 15:06:21 +02:00
|
|
|
|
2018-03-23 12:18:33 +01:00
|
|
|
"_tlv_atexit" => {
|
2018-07-16 11:42:46 +02:00
|
|
|
return err!(Unimplemented("Thread-local store is not fully supported on macOS".to_owned()));
|
2017-07-19 15:06:21 +02:00
|
|
|
},
|
2017-07-28 09:52:19 +02:00
|
|
|
|
|
|
|
// Stub out all the other pthread calls to just return 0
|
|
|
|
link_name if link_name.starts_with("pthread_") => {
|
2018-07-26 16:43:45 +02:00
|
|
|
debug!("ignoring C ABI call: {}", link_name);
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-07-28 09:52:19 +02:00
|
|
|
|
2017-07-19 15:06:21 +02:00
|
|
|
"mmap" => {
|
|
|
|
// This is a horrible hack, but well... the guard page mechanism calls mmap and expects a particular return value, so we give it that value
|
2018-08-15 21:01:40 +02:00
|
|
|
let addr = self.read_scalar(args[0])?.not_undef()?;
|
|
|
|
self.write_scalar(addr, dest)?;
|
2017-07-19 15:06:21 +02:00
|
|
|
}
|
|
|
|
|
2018-07-15 21:03:52 +02:00
|
|
|
// Windows API subs
|
2018-07-15 23:47:28 +02:00
|
|
|
"AddVectoredExceptionHandler" => {
|
2018-07-15 21:03:52 +02:00
|
|
|
// any non zero value works for the stdlib. This is just used for stackoverflows anyway
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_int(1, dest.layout.size), dest)?;
|
2018-07-15 23:47:28 +02:00
|
|
|
},
|
|
|
|
"InitializeCriticalSection" |
|
|
|
|
"EnterCriticalSection" |
|
|
|
|
"LeaveCriticalSection" |
|
|
|
|
"DeleteCriticalSection" |
|
|
|
|
"SetLastError" => {
|
2018-08-14 18:44:31 +02:00
|
|
|
// Function does not return anything, nothing to do
|
|
|
|
},
|
|
|
|
"GetModuleHandleW" |
|
|
|
|
"GetProcAddress" |
|
|
|
|
"TryEnterCriticalSection" => {
|
2018-07-15 23:47:28 +02:00
|
|
|
// pretend these do not exist/nothing happened, by returning zero
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_null(dest)?;
|
2018-07-15 23:47:28 +02:00
|
|
|
},
|
|
|
|
"GetLastError" => {
|
|
|
|
// this is c::ERROR_CALL_NOT_IMPLEMENTED
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_int(120, dest.layout.size), dest)?;
|
2018-07-15 21:03:52 +02:00
|
|
|
},
|
|
|
|
|
2018-07-16 11:42:46 +02:00
|
|
|
// Windows TLS
|
|
|
|
"TlsAlloc" => {
|
|
|
|
// This just creates a key; Windows does not natively support TLS dtors.
|
|
|
|
|
|
|
|
// Create key and return it
|
|
|
|
let key = self.memory.create_tls_key(None) as u128;
|
2018-08-07 15:22:11 +02:00
|
|
|
|
|
|
|
// Figure out how large a TLS key actually is. This is c::DWORD.
|
2018-08-15 21:01:40 +02:00
|
|
|
if dest.layout.size.bits() < 128 && key >= (1u128 << dest.layout.size.bits() as u128) {
|
2018-07-16 11:42:46 +02:00
|
|
|
return err!(OutOfTls);
|
|
|
|
}
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_uint(key, dest.layout.size), dest)?;
|
2018-07-16 11:42:46 +02:00
|
|
|
}
|
|
|
|
"TlsGetValue" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let key = self.read_scalar(args[0])?.to_bytes()?;
|
2018-07-16 11:42:46 +02:00
|
|
|
let ptr = self.memory.load_tls(key)?;
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(ptr, dest)?;
|
2018-07-16 11:42:46 +02:00
|
|
|
}
|
|
|
|
"TlsSetValue" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
let key = self.read_scalar(args[0])?.to_bytes()?;
|
|
|
|
let new_ptr = self.read_scalar(args[1])?.not_undef()?;
|
2018-07-16 11:42:46 +02:00
|
|
|
self.memory.store_tls(key, new_ptr)?;
|
|
|
|
|
|
|
|
// Return success (1)
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_int(1, dest.layout.size), dest)?;
|
2018-07-16 11:42:46 +02:00
|
|
|
}
|
|
|
|
|
2018-07-15 21:03:52 +02:00
|
|
|
// We can't execute anything else
|
2017-07-28 09:52:19 +02:00
|
|
|
_ => {
|
2017-08-10 08:48:38 -07:00
|
|
|
return err!(Unimplemented(
|
2018-07-15 21:03:52 +02:00
|
|
|
format!("can't call foreign function: {}", link_name),
|
2017-08-10 08:48:38 -07:00
|
|
|
));
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Since we pushed no stack frame, the main loop will act
|
|
|
|
// as if the call just completed and it's returning to the
|
|
|
|
// current frame.
|
2018-08-15 21:01:40 +02:00
|
|
|
self.dump_place(*dest);
|
2017-07-28 09:52:19 +02:00
|
|
|
self.goto_block(dest_block);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Get an instance for a path.
|
|
|
|
fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>> {
|
2017-09-08 19:10:21 +02:00
|
|
|
self.tcx
|
|
|
|
.crates()
|
2017-08-10 08:48:38 -07:00
|
|
|
.iter()
|
2017-09-08 19:10:21 +02:00
|
|
|
.find(|&&krate| self.tcx.original_crate_name(krate) == path[0])
|
2017-07-28 09:52:19 +02:00
|
|
|
.and_then(|krate| {
|
|
|
|
let krate = DefId {
|
|
|
|
krate: *krate,
|
|
|
|
index: CRATE_DEF_INDEX,
|
|
|
|
};
|
2017-09-08 19:10:21 +02:00
|
|
|
let mut items = self.tcx.item_children(krate);
|
2017-07-28 09:52:19 +02:00
|
|
|
let mut path_it = path.iter().skip(1).peekable();
|
|
|
|
|
|
|
|
while let Some(segment) = path_it.next() {
|
2017-09-08 19:10:21 +02:00
|
|
|
for item in mem::replace(&mut items, Default::default()).iter() {
|
2017-07-28 09:52:19 +02:00
|
|
|
if item.ident.name == *segment {
|
|
|
|
if path_it.peek().is_none() {
|
2018-01-14 18:59:13 +01:00
|
|
|
return Some(ty::Instance::mono(self.tcx.tcx, item.def.def_id()));
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
2017-09-08 19:10:21 +02:00
|
|
|
items = self.tcx.item_children(item.def.def_id());
|
2017-07-28 09:52:19 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
})
|
|
|
|
.ok_or_else(|| {
|
2017-08-10 08:48:38 -07:00
|
|
|
let path = path.iter().map(|&s| s.to_owned()).collect();
|
2017-08-02 16:59:01 +02:00
|
|
|
EvalErrorKind::PathNotFound(path).into()
|
2017-07-28 09:52:19 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn call_missing_fn(
|
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
2018-08-15 21:01:40 +02:00
|
|
|
destination: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
args: &[OpTy<'tcx>],
|
2017-07-28 09:52:19 +02:00
|
|
|
path: String,
|
|
|
|
) -> EvalResult<'tcx> {
|
|
|
|
// In some cases in non-MIR libstd-mode, not having a destination is legit. Handle these early.
|
|
|
|
match &path[..] {
|
|
|
|
"std::panicking::rust_panic_with_hook" |
|
2017-08-29 11:32:10 +02:00
|
|
|
"core::panicking::panic_fmt::::panic_impl" |
|
2017-08-02 16:59:01 +02:00
|
|
|
"std::rt::begin_panic_fmt" => return err!(Panic),
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {}
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
let (dest, dest_block) = destination.ok_or_else(
|
|
|
|
|| EvalErrorKind::NoMirFor(path.clone()),
|
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
|
2018-07-15 21:03:52 +02:00
|
|
|
if self.tcx.is_foreign_item(instance.def_id()) {
|
|
|
|
// An external function
|
2017-07-28 09:52:19 +02:00
|
|
|
// TODO: That functions actually has a similar preamble to what follows here. May make sense to
|
|
|
|
// unify these two mechanisms for "hooking into missing functions".
|
2018-07-15 21:03:52 +02:00
|
|
|
self.call_foreign_item(
|
2017-08-10 08:48:38 -07:00
|
|
|
instance.def_id(),
|
2017-08-24 14:41:49 +02:00
|
|
|
args,
|
2017-08-10 08:48:38 -07:00
|
|
|
dest,
|
|
|
|
dest_block,
|
|
|
|
)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
match &path[..] {
|
|
|
|
// A Rust function is missing, which means we are running with MIR missing for libstd (or other dependencies).
|
|
|
|
// Still, we can make many things mostly work by "emulating" or ignoring some functions.
|
2018-07-26 17:30:52 +02:00
|
|
|
"std::io::_print" |
|
|
|
|
"std::io::_eprint" => {
|
2017-09-16 12:36:28 +02:00
|
|
|
warn!(
|
2017-08-10 08:48:38 -07:00
|
|
|
"Ignoring output. To run programs that print, make sure you have a libstd with full MIR."
|
|
|
|
);
|
|
|
|
}
|
|
|
|
"std::thread::Builder::new" => {
|
|
|
|
return err!(Unimplemented("miri does not support threading".to_owned()))
|
|
|
|
}
|
|
|
|
"std::env::args" => {
|
|
|
|
return err!(Unimplemented(
|
|
|
|
"miri does not support program arguments".to_owned(),
|
|
|
|
))
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
"std::panicking::panicking" |
|
|
|
|
"std::rt::panicking" => {
|
|
|
|
// we abort on panic -> `std::rt::panicking` always returns false
|
2018-08-15 21:01:40 +02:00
|
|
|
self.write_scalar(Scalar::from_bool(false), dest)?;
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
2018-07-15 21:03:52 +02:00
|
|
|
|
2017-08-02 16:59:01 +02:00
|
|
|
_ => return err!(NoMirFor(path)),
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Since we pushed no stack frame, the main loop will act
|
|
|
|
// as if the call just completed and it's returning to the
|
|
|
|
// current frame.
|
2018-08-15 21:01:40 +02:00
|
|
|
self.dump_place(*dest);
|
2017-07-28 09:52:19 +02:00
|
|
|
self.goto_block(dest_block);
|
2018-07-10 17:32:38 +02:00
|
|
|
Ok(())
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|
2017-11-21 13:32:40 +01:00
|
|
|
|
2018-08-15 21:01:40 +02:00
|
|
|
fn write_null(&mut self, dest: PlaceTy<'tcx>) -> EvalResult<'tcx> {
|
|
|
|
self.write_scalar(Scalar::null(dest.layout.size), dest)
|
2017-11-21 13:32:40 +01:00
|
|
|
}
|
2017-07-28 09:52:19 +02:00
|
|
|
}
|