2019-10-24 10:15:30 +02:00
|
|
|
use std::iter;
|
|
|
|
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir;
|
2019-06-08 22:14:47 +02:00
|
|
|
use rustc::mir::interpret::{InterpResult, PointerArithmetic};
|
2017-11-21 13:32:40 +01:00
|
|
|
use rustc::ty;
|
2019-12-23 12:56:23 +01:00
|
|
|
use rustc::ty::layout::{self, Align, LayoutOf, Size};
|
|
|
|
use rustc_apfloat::Float;
|
2020-01-05 17:53:45 +09:00
|
|
|
use rustc_span::source_map::Span;
|
2016-09-20 16:05:30 +02:00
|
|
|
|
2019-04-14 21:02:55 -04:00
|
|
|
use crate::*;
|
2018-05-26 17:07:34 +02:00
|
|
|
|
2019-06-13 08:52:04 +02:00
|
|
|
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
|
|
|
|
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
|
2017-07-28 13:08:27 +02:00
|
|
|
fn call_intrinsic(
|
2016-09-20 16:05:30 +02:00
|
|
|
&mut self,
|
2019-10-30 10:16:58 +01:00
|
|
|
span: Span,
|
2017-03-21 13:53:55 +01:00
|
|
|
instance: ty::Instance<'tcx>,
|
2019-04-15 15:36:09 +02:00
|
|
|
args: &[OpTy<'tcx, Tag>],
|
2019-11-25 22:48:31 +01:00
|
|
|
ret: Option<(PlaceTy<'tcx, Tag>, mir::BasicBlock)>,
|
2019-12-23 12:56:23 +01:00
|
|
|
unwind: Option<mir::BasicBlock>,
|
2019-06-08 22:14:47 +02:00
|
|
|
) -> InterpResult<'tcx> {
|
2018-12-11 14:16:58 +01:00
|
|
|
let this = self.eval_context_mut();
|
2019-11-25 22:48:31 +01:00
|
|
|
if this.emulate_intrinsic(span, instance, args, ret)? {
|
2018-08-23 19:28:48 +02:00
|
|
|
return Ok(());
|
|
|
|
}
|
2019-12-23 12:56:23 +01:00
|
|
|
let tcx = &{ this.tcx.tcx };
|
2017-03-29 09:10:05 +02:00
|
|
|
let substs = instance.substs;
|
2016-09-20 16:05:30 +02:00
|
|
|
|
2018-10-17 15:15:53 +02:00
|
|
|
// All these intrinsics take raw pointers, so if we access memory directly
|
|
|
|
// (as opposed to through a place), we have to remember to erase any tag
|
|
|
|
// that might still hang around!
|
2019-10-18 11:33:12 +02:00
|
|
|
let intrinsic_name = &*tcx.item_name(instance.def_id()).as_str();
|
2019-04-14 21:02:55 -04:00
|
|
|
|
2019-11-25 22:48:31 +01:00
|
|
|
// Handle diverging intrinsics.
|
|
|
|
let (dest, ret) = match intrinsic_name {
|
2019-04-14 21:02:55 -04:00
|
|
|
"abort" => {
|
2019-12-02 11:02:11 +01:00
|
|
|
throw_machine_stop!(TerminationInfo::Abort);
|
2019-04-14 21:02:55 -04:00
|
|
|
}
|
|
|
|
"miri_start_panic" => return this.handle_miri_start_panic(args, unwind),
|
2019-12-23 12:56:23 +01:00
|
|
|
_ =>
|
2019-11-25 22:48:31 +01:00
|
|
|
if let Some(p) = ret {
|
|
|
|
p
|
|
|
|
} else {
|
|
|
|
throw_unsup_format!("unimplemented (diverging) intrinsic: {}", intrinsic_name);
|
2019-12-23 12:56:23 +01:00
|
|
|
},
|
2019-04-14 21:02:55 -04:00
|
|
|
};
|
|
|
|
|
2019-09-05 18:17:58 +02:00
|
|
|
match intrinsic_name {
|
2016-09-20 16:05:30 +02:00
|
|
|
"arith_offset" => {
|
2019-11-08 22:07:52 +01:00
|
|
|
let offset = this.read_scalar(args[1])?.to_machine_isize(this)?;
|
2018-12-11 14:16:58 +01:00
|
|
|
let ptr = this.read_scalar(args[0])?.not_undef()?;
|
2018-08-15 21:01:40 +02:00
|
|
|
|
|
|
|
let pointee_ty = substs.type_at(0);
|
2018-12-11 14:16:58 +01:00
|
|
|
let pointee_size = this.layout_of(pointee_ty)?.size.bytes() as i64;
|
2018-08-15 21:01:40 +02:00
|
|
|
let offset = offset.overflowing_mul(pointee_size).0;
|
2018-12-11 14:16:58 +01:00
|
|
|
let result_ptr = ptr.ptr_wrapping_signed_offset(offset, this);
|
|
|
|
this.write_scalar(result_ptr, dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"assume" => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let cond = this.read_scalar(args[0])?.to_bool()?;
|
2017-08-10 08:48:38 -07:00
|
|
|
if !cond {
|
2019-08-05 10:45:48 +02:00
|
|
|
throw_ub_format!("`assume` intrinsic called with `false`");
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-08-02 20:20:56 +02:00
|
|
|
"volatile_load" => {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
|
|
|
this.copy_op(place.into(), dest)?;
|
2019-08-02 20:20:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"volatile_store" => {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
|
|
|
this.copy_op(args[1], place.into())?;
|
2019-08-02 20:20:56 +02:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "atomic_load"
|
|
|
|
| "atomic_load_relaxed"
|
|
|
|
| "atomic_load_acq"
|
|
|
|
=> {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
|
|
|
let val = this.read_scalar(place.into())?; // make sure it fits into a scalar; otherwise it cannot be atomic
|
2016-10-14 03:49:02 -06:00
|
|
|
|
2019-08-02 20:20:56 +02:00
|
|
|
// Check alignment requirements. Atomics must always be aligned to their size,
|
|
|
|
// even if the type they wrap would be less aligned (e.g. AtomicU64 on 32bit must
|
|
|
|
// be 8-aligned).
|
2019-08-04 10:52:09 +02:00
|
|
|
let align = Align::from_bytes(place.layout.size.bytes()).unwrap();
|
2019-10-18 11:11:50 +09:00
|
|
|
this.memory.check_ptr_access(place.ptr, place.layout.size, align)?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
|
|
|
this.write_scalar(val, dest)?;
|
2019-02-25 00:08:38 +09:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "atomic_store"
|
|
|
|
| "atomic_store_relaxed"
|
|
|
|
| "atomic_store_rel"
|
|
|
|
=> {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
2018-12-11 14:16:58 +01:00
|
|
|
let val = this.read_scalar(args[1])?; // make sure it fits into a scalar; otherwise it cannot be atomic
|
2016-10-14 03:49:02 -06:00
|
|
|
|
2019-08-02 20:20:56 +02:00
|
|
|
// Check alignment requirements. Atomics must always be aligned to their size,
|
|
|
|
// even if the type they wrap would be less aligned (e.g. AtomicU64 on 32bit must
|
|
|
|
// be 8-aligned).
|
2019-08-04 10:52:09 +02:00
|
|
|
let align = Align::from_bytes(place.layout.size.bytes()).unwrap();
|
2019-10-18 11:11:50 +09:00
|
|
|
this.memory.check_ptr_access(place.ptr, place.layout.size, align)?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
2019-08-04 10:52:09 +02:00
|
|
|
this.write_scalar(val, place.into())?;
|
2019-02-25 00:08:38 +09:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "atomic_fence_acq"
|
|
|
|
| "atomic_fence_rel"
|
|
|
|
| "atomic_fence_acqrel"
|
|
|
|
| "atomic_fence"
|
2020-01-24 19:02:58 -06:00
|
|
|
| "atomic_singlethreadfence_acq"
|
|
|
|
| "atomic_singlethreadfence_rel"
|
|
|
|
| "atomic_singlethreadfence_acqrel"
|
|
|
|
| "atomic_singlethreadfence"
|
2019-12-23 12:56:23 +01:00
|
|
|
=> {
|
2016-11-03 17:32:06 +01:00
|
|
|
// we are inherently singlethreaded and singlecored, this is a nop
|
|
|
|
}
|
|
|
|
|
2017-03-14 13:05:51 +01:00
|
|
|
_ if intrinsic_name.starts_with("atomic_xchg") => {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
2018-12-11 14:16:58 +01:00
|
|
|
let new = this.read_scalar(args[1])?;
|
2019-08-04 10:52:09 +02:00
|
|
|
let old = this.read_scalar(place.into())?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
|
|
|
// Check alignment requirements. Atomics must always be aligned to their size,
|
|
|
|
// even if the type they wrap would be less aligned (e.g. AtomicU64 on 32bit must
|
|
|
|
// be 8-aligned).
|
2019-08-04 10:52:09 +02:00
|
|
|
let align = Align::from_bytes(place.layout.size.bytes()).unwrap();
|
2019-10-18 11:11:50 +09:00
|
|
|
this.memory.check_ptr_access(place.ptr, place.layout.size, align)?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
2018-12-11 14:16:58 +01:00
|
|
|
this.write_scalar(old, dest)?; // old value is returned
|
2019-08-04 10:52:09 +02:00
|
|
|
this.write_scalar(new, place.into())?;
|
2016-11-15 15:19:38 +01:00
|
|
|
}
|
|
|
|
|
2017-03-14 13:05:51 +01:00
|
|
|
_ if intrinsic_name.starts_with("atomic_cxchg") => {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
2019-02-08 14:01:40 +01:00
|
|
|
let expect_old = this.read_immediate(args[1])?; // read as immediate for the sake of `binary_op()`
|
2018-12-11 14:16:58 +01:00
|
|
|
let new = this.read_scalar(args[2])?;
|
2019-08-04 10:52:09 +02:00
|
|
|
let old = this.read_immediate(place.into())?; // read as immediate for the sake of `binary_op()`
|
2019-08-02 20:20:56 +02:00
|
|
|
|
|
|
|
// Check alignment requirements. Atomics must always be aligned to their size,
|
|
|
|
// even if the type they wrap would be less aligned (e.g. AtomicU64 on 32bit must
|
|
|
|
// be 8-aligned).
|
2019-08-04 10:52:09 +02:00
|
|
|
let align = Align::from_bytes(place.layout.size.bytes()).unwrap();
|
2019-10-18 11:11:50 +09:00
|
|
|
this.memory.check_ptr_access(place.ptr, place.layout.size, align)?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
// `binary_op` will bail if either of them is not a scalar.
|
2019-08-10 21:19:25 +02:00
|
|
|
let eq = this.overflowing_binary_op(mir::BinOp::Eq, old, expect_old)?.0;
|
2018-11-05 08:51:55 +01:00
|
|
|
let res = Immediate::ScalarPair(old.to_scalar_or_undef(), eq.into());
|
2019-12-23 12:56:23 +01:00
|
|
|
// Return old value.
|
|
|
|
this.write_immediate(res, dest)?;
|
|
|
|
// Update ptr depending on comparison.
|
2018-08-15 21:01:40 +02:00
|
|
|
if eq.to_bool()? {
|
2019-08-04 10:52:09 +02:00
|
|
|
this.write_scalar(new, place.into())?;
|
2018-08-15 21:01:40 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "atomic_or"
|
|
|
|
| "atomic_or_acq"
|
|
|
|
| "atomic_or_rel"
|
|
|
|
| "atomic_or_acqrel"
|
|
|
|
| "atomic_or_relaxed"
|
|
|
|
| "atomic_xor"
|
|
|
|
| "atomic_xor_acq"
|
|
|
|
| "atomic_xor_rel"
|
|
|
|
| "atomic_xor_acqrel"
|
|
|
|
| "atomic_xor_relaxed"
|
|
|
|
| "atomic_and"
|
|
|
|
| "atomic_and_acq"
|
|
|
|
| "atomic_and_rel"
|
|
|
|
| "atomic_and_acqrel"
|
|
|
|
| "atomic_and_relaxed"
|
|
|
|
| "atomic_nand"
|
|
|
|
| "atomic_nand_acq"
|
|
|
|
| "atomic_nand_rel"
|
|
|
|
| "atomic_nand_acqrel"
|
|
|
|
| "atomic_nand_relaxed"
|
|
|
|
| "atomic_xadd"
|
|
|
|
| "atomic_xadd_acq"
|
|
|
|
| "atomic_xadd_rel"
|
|
|
|
| "atomic_xadd_acqrel"
|
|
|
|
| "atomic_xadd_relaxed"
|
|
|
|
| "atomic_xsub"
|
|
|
|
| "atomic_xsub_acq"
|
|
|
|
| "atomic_xsub_rel"
|
|
|
|
| "atomic_xsub_acqrel"
|
|
|
|
| "atomic_xsub_relaxed"
|
|
|
|
=> {
|
2019-08-04 10:52:09 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
|
|
|
if !place.layout.ty.is_integral() {
|
2019-08-03 20:31:33 +02:00
|
|
|
bug!("Atomic arithmetic operations only work on integer types");
|
2018-10-31 11:04:35 +01:00
|
|
|
}
|
2018-12-11 14:16:58 +01:00
|
|
|
let rhs = this.read_immediate(args[1])?;
|
2019-08-04 10:52:09 +02:00
|
|
|
let old = this.read_immediate(place.into())?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
|
|
|
// Check alignment requirements. Atomics must always be aligned to their size,
|
|
|
|
// even if the type they wrap would be less aligned (e.g. AtomicU64 on 32bit must
|
|
|
|
// be 8-aligned).
|
2019-08-04 10:52:09 +02:00
|
|
|
let align = Align::from_bytes(place.layout.size.bytes()).unwrap();
|
2019-10-18 11:11:50 +09:00
|
|
|
this.memory.check_ptr_access(place.ptr, place.layout.size, align)?;
|
2019-08-02 20:20:56 +02:00
|
|
|
|
2018-12-11 14:16:58 +01:00
|
|
|
this.write_immediate(*old, dest)?; // old value is returned
|
2019-02-06 11:38:40 +01:00
|
|
|
let (op, neg) = match intrinsic_name.split('_').nth(1).unwrap() {
|
|
|
|
"or" => (mir::BinOp::BitOr, false),
|
|
|
|
"xor" => (mir::BinOp::BitXor, false),
|
|
|
|
"and" => (mir::BinOp::BitAnd, false),
|
|
|
|
"xadd" => (mir::BinOp::Add, false),
|
|
|
|
"xsub" => (mir::BinOp::Sub, false),
|
|
|
|
"nand" => (mir::BinOp::BitAnd, true),
|
2017-03-14 13:05:51 +01:00
|
|
|
_ => bug!(),
|
2016-11-03 17:32:06 +01:00
|
|
|
};
|
2018-10-12 09:07:56 +02:00
|
|
|
// Atomics wrap around on overflow.
|
2019-08-10 21:19:25 +02:00
|
|
|
let val = this.binary_op(op, old, rhs)?;
|
2019-12-23 12:56:23 +01:00
|
|
|
let val = if neg { this.unary_op(mir::UnOp::Not, val)? } else { val };
|
2019-08-10 21:19:25 +02:00
|
|
|
this.write_immediate(*val, place.into())?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-11-03 17:32:06 +01:00
|
|
|
|
2016-09-20 16:05:30 +02:00
|
|
|
"breakpoint" => unimplemented!(), // halt miri
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "copy"
|
|
|
|
| "copy_nonoverlapping"
|
|
|
|
=> {
|
2017-03-29 09:10:05 +02:00
|
|
|
let elem_ty = substs.type_at(0);
|
2018-12-11 14:16:58 +01:00
|
|
|
let elem_layout = this.layout_of(elem_ty)?;
|
2017-12-06 15:03:24 +01:00
|
|
|
let elem_size = elem_layout.size.bytes();
|
2019-11-08 22:07:52 +01:00
|
|
|
let count = this.read_scalar(args[2])?.to_machine_usize(this)?;
|
2018-11-23 09:46:51 +01:00
|
|
|
let elem_align = elem_layout.align.abi;
|
2019-07-06 13:14:06 +02:00
|
|
|
|
|
|
|
let size = Size::from_bytes(count * elem_size);
|
2018-12-11 14:16:58 +01:00
|
|
|
let src = this.read_scalar(args[0])?.not_undef()?;
|
2019-10-18 11:11:50 +09:00
|
|
|
let src = this.memory.check_ptr_access(src, size, elem_align)?;
|
2018-12-11 14:16:58 +01:00
|
|
|
let dest = this.read_scalar(args[1])?.not_undef()?;
|
2019-10-18 11:11:50 +09:00
|
|
|
let dest = this.memory.check_ptr_access(dest, size, elem_align)?;
|
2019-07-06 13:14:06 +02:00
|
|
|
|
|
|
|
if let (Some(src), Some(dest)) = (src, dest) {
|
2019-10-18 11:11:50 +09:00
|
|
|
this.memory.copy(
|
2019-07-06 13:14:06 +02:00
|
|
|
src,
|
|
|
|
dest,
|
|
|
|
size,
|
|
|
|
intrinsic_name.ends_with("_nonoverlapping"),
|
|
|
|
)?;
|
|
|
|
}
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"discriminant_value" => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let place = this.deref_operand(args[0])?;
|
|
|
|
let discr_val = this.read_discriminant(place.into())?.0;
|
|
|
|
this.write_scalar(Scalar::from_uint(discr_val, dest.layout.size), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "sinf32"
|
|
|
|
| "fabsf32"
|
|
|
|
| "cosf32"
|
|
|
|
| "sqrtf32"
|
|
|
|
| "expf32"
|
|
|
|
| "exp2f32"
|
|
|
|
| "logf32"
|
|
|
|
| "log10f32"
|
|
|
|
| "log2f32"
|
|
|
|
| "floorf32"
|
|
|
|
| "ceilf32"
|
|
|
|
| "truncf32"
|
|
|
|
| "roundf32"
|
|
|
|
=> {
|
2019-06-09 00:12:57 +02:00
|
|
|
// FIXME: Using host floats.
|
|
|
|
let f = f32::from_bits(this.read_scalar(args[0])?.to_u32()?);
|
2019-09-05 18:17:58 +02:00
|
|
|
let f = match intrinsic_name {
|
2017-03-14 12:35:38 +01:00
|
|
|
"sinf32" => f.sin(),
|
|
|
|
"fabsf32" => f.abs(),
|
|
|
|
"cosf32" => f.cos(),
|
|
|
|
"sqrtf32" => f.sqrt(),
|
|
|
|
"expf32" => f.exp(),
|
|
|
|
"exp2f32" => f.exp2(),
|
|
|
|
"logf32" => f.ln(),
|
|
|
|
"log10f32" => f.log10(),
|
|
|
|
"log2f32" => f.log2(),
|
|
|
|
"floorf32" => f.floor(),
|
|
|
|
"ceilf32" => f.ceil(),
|
|
|
|
"truncf32" => f.trunc(),
|
2019-08-04 10:25:10 -04:00
|
|
|
"roundf32" => f.round(),
|
2017-03-14 12:35:38 +01:00
|
|
|
_ => bug!(),
|
|
|
|
};
|
2019-06-09 00:12:57 +02:00
|
|
|
this.write_scalar(Scalar::from_u32(f.to_bits()), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "sinf64"
|
|
|
|
| "fabsf64"
|
|
|
|
| "cosf64"
|
|
|
|
| "sqrtf64"
|
|
|
|
| "expf64"
|
|
|
|
| "exp2f64"
|
|
|
|
| "logf64"
|
|
|
|
| "log10f64"
|
|
|
|
| "log2f64"
|
|
|
|
| "floorf64"
|
|
|
|
| "ceilf64"
|
|
|
|
| "truncf64"
|
|
|
|
| "roundf64"
|
|
|
|
=> {
|
2019-06-09 00:12:57 +02:00
|
|
|
// FIXME: Using host floats.
|
|
|
|
let f = f64::from_bits(this.read_scalar(args[0])?.to_u64()?);
|
2019-09-05 18:17:58 +02:00
|
|
|
let f = match intrinsic_name {
|
2017-03-14 12:35:38 +01:00
|
|
|
"sinf64" => f.sin(),
|
|
|
|
"fabsf64" => f.abs(),
|
|
|
|
"cosf64" => f.cos(),
|
|
|
|
"sqrtf64" => f.sqrt(),
|
|
|
|
"expf64" => f.exp(),
|
|
|
|
"exp2f64" => f.exp2(),
|
|
|
|
"logf64" => f.ln(),
|
|
|
|
"log10f64" => f.log10(),
|
|
|
|
"log2f64" => f.log2(),
|
|
|
|
"floorf64" => f.floor(),
|
|
|
|
"ceilf64" => f.ceil(),
|
|
|
|
"truncf64" => f.trunc(),
|
2019-08-04 10:25:10 -04:00
|
|
|
"roundf64" => f.round(),
|
2017-03-14 12:35:38 +01:00
|
|
|
_ => bug!(),
|
|
|
|
};
|
2019-06-09 00:12:57 +02:00
|
|
|
this.write_scalar(Scalar::from_u64(f.to_bits()), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "fadd_fast"
|
|
|
|
| "fsub_fast"
|
|
|
|
| "fmul_fast"
|
|
|
|
| "fdiv_fast"
|
|
|
|
| "frem_fast"
|
|
|
|
=> {
|
2018-12-11 14:16:58 +01:00
|
|
|
let a = this.read_immediate(args[0])?;
|
|
|
|
let b = this.read_immediate(args[1])?;
|
2019-09-05 18:17:58 +02:00
|
|
|
let op = match intrinsic_name {
|
2017-02-10 05:58:34 -08:00
|
|
|
"fadd_fast" => mir::BinOp::Add,
|
|
|
|
"fsub_fast" => mir::BinOp::Sub,
|
|
|
|
"fmul_fast" => mir::BinOp::Mul,
|
|
|
|
"fdiv_fast" => mir::BinOp::Div,
|
|
|
|
"frem_fast" => mir::BinOp::Rem,
|
|
|
|
_ => bug!(),
|
|
|
|
};
|
2018-12-11 14:16:58 +01:00
|
|
|
this.binop_ignore_overflow(op, a, b, dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "minnumf32"
|
|
|
|
| "maxnumf32"
|
|
|
|
| "copysignf32"
|
|
|
|
=> {
|
2019-06-09 00:19:05 +02:00
|
|
|
let a = this.read_scalar(args[0])?.to_f32()?;
|
|
|
|
let b = this.read_scalar(args[1])?.to_f32()?;
|
2019-11-13 10:14:13 +08:00
|
|
|
let res = match intrinsic_name {
|
|
|
|
"minnumf32" => a.min(b),
|
|
|
|
"maxnumf32" => a.max(b),
|
|
|
|
"copysignf32" => a.copy_sign(b),
|
|
|
|
_ => bug!(),
|
2019-06-09 00:19:05 +02:00
|
|
|
};
|
|
|
|
this.write_scalar(Scalar::from_f32(res), dest)?;
|
|
|
|
}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "minnumf64"
|
|
|
|
| "maxnumf64"
|
|
|
|
| "copysignf64"
|
|
|
|
=> {
|
2019-06-09 00:19:05 +02:00
|
|
|
let a = this.read_scalar(args[0])?.to_f64()?;
|
|
|
|
let b = this.read_scalar(args[1])?.to_f64()?;
|
2019-11-13 10:14:13 +08:00
|
|
|
let res = match intrinsic_name {
|
|
|
|
"minnumf64" => a.min(b),
|
|
|
|
"maxnumf64" => a.max(b),
|
|
|
|
"copysignf64" => a.copy_sign(b),
|
|
|
|
_ => bug!(),
|
2019-06-09 00:19:05 +02:00
|
|
|
};
|
|
|
|
this.write_scalar(Scalar::from_f64(res), dest)?;
|
|
|
|
}
|
|
|
|
|
2019-11-03 10:03:30 +01:00
|
|
|
"exact_div" =>
|
2019-12-23 12:56:23 +01:00
|
|
|
this.exact_div(this.read_immediate(args[0])?, this.read_immediate(args[1])?, dest)?,
|
2018-05-07 18:02:57 +02:00
|
|
|
|
2019-02-09 13:07:55 +01:00
|
|
|
"forget" => {}
|
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "likely"
|
|
|
|
| "unlikely"
|
|
|
|
=> {
|
2019-02-09 13:07:55 +01:00
|
|
|
// These just return their argument
|
|
|
|
let b = this.read_immediate(args[0])?;
|
|
|
|
this.write_immediate(*b, dest)?;
|
|
|
|
}
|
2016-09-20 16:05:30 +02:00
|
|
|
|
2016-10-14 03:31:45 -06:00
|
|
|
"init" => {
|
2018-08-15 21:01:40 +02:00
|
|
|
// Check fast path: we don't want to force an allocation in case the destination is a simple value,
|
|
|
|
// but we also do not want to create a new allocation with 0s and then copy that over.
|
2018-10-09 22:35:14 +02:00
|
|
|
// FIXME: We do not properly validate in case of ZSTs and when doing it in memory!
|
|
|
|
// However, this only affects direct calls of the intrinsic; calls to the stable
|
|
|
|
// functions wrapping them do get their validation.
|
2018-11-13 17:19:42 +01:00
|
|
|
// FIXME: should we check that the destination pointer is aligned even for ZSTs?
|
2019-02-16 01:29:38 +00:00
|
|
|
if !dest.layout.is_zst() {
|
2018-08-18 11:59:28 +02:00
|
|
|
match dest.layout.abi {
|
|
|
|
layout::Abi::Scalar(ref s) => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let x = Scalar::from_int(0, s.value.size(this));
|
2019-07-24 20:42:53 +02:00
|
|
|
this.write_scalar(x, dest)?;
|
2018-08-18 11:59:28 +02:00
|
|
|
}
|
|
|
|
layout::Abi::ScalarPair(ref s1, ref s2) => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let x = Scalar::from_int(0, s1.value.size(this));
|
|
|
|
let y = Scalar::from_int(0, s2.value.size(this));
|
|
|
|
this.write_immediate(Immediate::ScalarPair(x.into(), y.into()), dest)?;
|
2018-08-18 11:59:28 +02:00
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// Do it in memory
|
2018-12-11 14:16:58 +01:00
|
|
|
let mplace = this.force_allocation(dest)?;
|
2020-01-15 19:27:21 +01:00
|
|
|
assert!(!mplace.layout.is_unsized());
|
2019-12-23 12:56:23 +01:00
|
|
|
this.memory.write_bytes(
|
|
|
|
mplace.ptr,
|
|
|
|
iter::repeat(0u8).take(dest.layout.size.bytes() as usize),
|
|
|
|
)?;
|
2018-08-18 11:59:28 +02:00
|
|
|
}
|
2018-04-07 11:43:46 +02:00
|
|
|
}
|
2016-11-03 12:31:04 +01:00
|
|
|
}
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
2016-09-20 16:05:30 +02:00
|
|
|
|
|
|
|
"pref_align_of" => {
|
2017-03-29 09:10:05 +02:00
|
|
|
let ty = substs.type_at(0);
|
2018-12-11 14:16:58 +01:00
|
|
|
let layout = this.layout_of(ty)?;
|
2018-11-23 09:46:51 +01:00
|
|
|
let align = layout.align.pref.bytes();
|
2018-12-11 14:16:58 +01:00
|
|
|
let ptr_size = this.pointer_size();
|
2018-08-07 15:22:11 +02:00
|
|
|
let align_val = Scalar::from_uint(align as u128, ptr_size);
|
2018-12-11 14:16:58 +01:00
|
|
|
this.write_scalar(align_val, dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"move_val_init" => {
|
2019-08-04 10:54:07 +02:00
|
|
|
let place = this.deref_operand(args[0])?;
|
|
|
|
this.copy_op(args[1], place.into())?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"offset" => {
|
2019-11-08 22:07:52 +01:00
|
|
|
let offset = this.read_scalar(args[1])?.to_machine_isize(this)?;
|
2018-12-11 14:16:58 +01:00
|
|
|
let ptr = this.read_scalar(args[0])?.not_undef()?;
|
|
|
|
let result_ptr = this.pointer_offset_inbounds(ptr, substs.type_at(0), offset)?;
|
|
|
|
this.write_scalar(result_ptr, dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-01-07 11:23:08 +01:00
|
|
|
"panic_if_uninhabited" => {
|
|
|
|
let ty = substs.type_at(0);
|
|
|
|
let layout = this.layout_of(ty)?;
|
|
|
|
if layout.abi.is_uninhabited() {
|
2019-11-03 15:39:03 +01:00
|
|
|
// FIXME: This should throw a panic in the interpreted program instead.
|
|
|
|
throw_unsup_format!("Trying to instantiate uninhabited type {}", ty)
|
2019-01-07 11:23:08 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-14 12:35:38 +01:00
|
|
|
"powf32" => {
|
2019-06-09 00:12:57 +02:00
|
|
|
// FIXME: Using host floats.
|
|
|
|
let f = f32::from_bits(this.read_scalar(args[0])?.to_u32()?);
|
|
|
|
let f2 = f32::from_bits(this.read_scalar(args[1])?.to_u32()?);
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_u32(f.powf(f2).to_bits()), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2017-03-14 12:35:38 +01:00
|
|
|
"powf64" => {
|
2019-06-09 00:12:57 +02:00
|
|
|
// FIXME: Using host floats.
|
|
|
|
let f = f64::from_bits(this.read_scalar(args[0])?.to_u64()?);
|
|
|
|
let f2 = f64::from_bits(this.read_scalar(args[1])?.to_u64()?);
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_u64(f.powf(f2).to_bits()), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2017-03-14 12:35:38 +01:00
|
|
|
"fmaf32" => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let a = this.read_scalar(args[0])?.to_f32()?;
|
|
|
|
let b = this.read_scalar(args[1])?.to_f32()?;
|
|
|
|
let c = this.read_scalar(args[2])?.to_f32()?;
|
2019-06-10 09:49:06 +02:00
|
|
|
let res = a.mul_add(b, c).value;
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_f32(res), dest)?;
|
2017-03-14 12:35:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
"fmaf64" => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let a = this.read_scalar(args[0])?.to_f64()?;
|
|
|
|
let b = this.read_scalar(args[1])?.to_f64()?;
|
|
|
|
let c = this.read_scalar(args[2])?.to_f64()?;
|
2019-06-10 09:49:06 +02:00
|
|
|
let res = a.mul_add(b, c).value;
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_f64(res), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2017-03-14 12:35:38 +01:00
|
|
|
"powif32" => {
|
2019-06-09 00:12:57 +02:00
|
|
|
// FIXME: Using host floats.
|
|
|
|
let f = f32::from_bits(this.read_scalar(args[0])?.to_u32()?);
|
2018-12-11 14:16:58 +01:00
|
|
|
let i = this.read_scalar(args[1])?.to_i32()?;
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_u32(f.powi(i).to_bits()), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2017-03-14 12:35:38 +01:00
|
|
|
"powif64" => {
|
2019-06-09 00:12:57 +02:00
|
|
|
// FIXME: Using host floats.
|
|
|
|
let f = f64::from_bits(this.read_scalar(args[0])?.to_u64()?);
|
2018-12-11 14:16:58 +01:00
|
|
|
let i = this.read_scalar(args[1])?.to_i32()?;
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_u64(f.powi(i).to_bits()), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
"size_of_val" => {
|
2018-12-11 14:16:58 +01:00
|
|
|
let mplace = this.deref_operand(args[0])?;
|
2019-12-23 12:56:23 +01:00
|
|
|
let (size, _) = this
|
|
|
|
.size_and_align_of_mplace(mplace)?
|
2018-10-09 22:35:14 +02:00
|
|
|
.expect("size_of_val called on extern type");
|
2018-12-11 14:16:58 +01:00
|
|
|
let ptr_size = this.pointer_size();
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_uint(size.bytes() as u128, ptr_size), dest)?;
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
2016-11-03 17:32:06 +01:00
|
|
|
|
2019-12-23 12:56:23 +01:00
|
|
|
#[rustfmt::skip]
|
|
|
|
| "min_align_of_val"
|
|
|
|
| "align_of_val"
|
|
|
|
=> {
|
2018-12-11 14:16:58 +01:00
|
|
|
let mplace = this.deref_operand(args[0])?;
|
2019-12-23 12:56:23 +01:00
|
|
|
let (_, align) = this
|
|
|
|
.size_and_align_of_mplace(mplace)?
|
2018-10-09 22:35:14 +02:00
|
|
|
.expect("size_of_val called on extern type");
|
2018-12-11 14:16:58 +01:00
|
|
|
let ptr_size = this.pointer_size();
|
2019-12-23 12:56:23 +01:00
|
|
|
this.write_scalar(Scalar::from_uint(align.bytes(), ptr_size), dest)?;
|
2016-11-03 17:32:06 +01:00
|
|
|
}
|
|
|
|
|
2019-08-07 10:24:27 +02:00
|
|
|
"uninit" => {
|
|
|
|
// Check fast path: we don't want to force an allocation in case the destination is a simple value,
|
|
|
|
// but we also do not want to create a new allocation with 0s and then copy that over.
|
|
|
|
// FIXME: We do not properly validate in case of ZSTs and when doing it in memory!
|
|
|
|
// However, this only affects direct calls of the intrinsic; calls to the stable
|
|
|
|
// functions wrapping them do get their validation.
|
|
|
|
// FIXME: should we check alignment for ZSTs?
|
|
|
|
if !dest.layout.is_zst() {
|
|
|
|
match dest.layout.abi {
|
|
|
|
layout::Abi::Scalar(..) => {
|
|
|
|
let x = ScalarMaybeUndef::Undef;
|
|
|
|
this.write_immediate(Immediate::Scalar(x), dest)?;
|
|
|
|
}
|
|
|
|
layout::Abi::ScalarPair(..) => {
|
|
|
|
let x = ScalarMaybeUndef::Undef;
|
|
|
|
this.write_immediate(Immediate::ScalarPair(x, x), dest)?;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// Do it in memory
|
|
|
|
let mplace = this.force_allocation(dest)?;
|
2020-01-15 19:27:21 +01:00
|
|
|
assert!(!mplace.layout.is_unsized());
|
2019-12-27 14:26:05 +01:00
|
|
|
let ptr = mplace.ptr.assert_ptr();
|
2019-10-19 12:28:39 +02:00
|
|
|
// We know the return place is in-bounds
|
2019-12-23 12:56:23 +01:00
|
|
|
this.memory.get_raw_mut(ptr.alloc_id)?.mark_definedness(
|
|
|
|
ptr,
|
|
|
|
dest.layout.size,
|
|
|
|
false,
|
|
|
|
);
|
2019-08-07 10:24:27 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-13 19:28:29 -04:00
|
|
|
"write_bytes" => {
|
2017-03-29 09:10:05 +02:00
|
|
|
let ty = substs.type_at(0);
|
2018-12-11 14:16:58 +01:00
|
|
|
let ty_layout = this.layout_of(ty)?;
|
|
|
|
let val_byte = this.read_scalar(args[1])?.to_u8()?;
|
|
|
|
let ptr = this.read_scalar(args[0])?.not_undef()?;
|
2019-11-08 22:07:52 +01:00
|
|
|
let count = this.read_scalar(args[2])?.to_machine_usize(this)?;
|
2018-11-13 17:19:42 +01:00
|
|
|
let byte_count = ty_layout.size * count;
|
2019-12-23 12:56:23 +01:00
|
|
|
this.memory
|
|
|
|
.write_bytes(ptr, iter::repeat(val_byte).take(byte_count.bytes() as usize))?;
|
2017-03-13 19:28:29 -04:00
|
|
|
}
|
|
|
|
|
2019-08-03 20:31:33 +02:00
|
|
|
name => throw_unsup_format!("unimplemented intrinsic: {}", name),
|
2016-09-20 16:05:30 +02:00
|
|
|
}
|
|
|
|
|
2019-11-25 22:48:31 +01:00
|
|
|
this.dump_place(*dest);
|
|
|
|
this.go_to_block(ret);
|
2016-09-20 16:05:30 +02:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|