rust/src/intrinsic.rs

714 lines
28 KiB
Rust
Raw Normal View History

use rustc::mir;
2018-05-26 10:07:34 -05:00
use rustc::ty::layout::{TyLayout, LayoutOf, Size, Primitive, Integer::*};
2017-11-21 06:32:40 -06:00
use rustc::ty;
2016-09-20 09:05:30 -05:00
2018-05-26 10:07:34 -05:00
use rustc::mir::interpret::{EvalResult, Scalar, Value};
2017-12-14 04:03:55 -06:00
use rustc_mir::interpret::{Place, PlaceExtra, HasMemory, EvalContext, ValTy};
2016-09-20 09:05:30 -05:00
use helpers::EvalContextExt as HelperEvalContextExt;
2018-05-26 10:07:34 -05:00
use super::ScalarExt;
pub trait EvalContextExt<'tcx> {
fn call_intrinsic(
&mut self,
instance: ty::Instance<'tcx>,
args: &[ValTy<'tcx>],
2017-12-06 01:39:31 -06:00
dest: Place,
2017-11-21 06:32:40 -06:00
dest_layout: TyLayout<'tcx>,
target: mir::BasicBlock,
) -> EvalResult<'tcx>;
}
2018-01-14 11:59:13 -06:00
impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
fn call_intrinsic(
2016-09-20 09:05:30 -05:00
&mut self,
2017-03-21 07:53:55 -05:00
instance: ty::Instance<'tcx>,
args: &[ValTy<'tcx>],
2017-12-06 01:39:31 -06:00
dest: Place,
2017-11-21 06:32:40 -06:00
dest_layout: TyLayout<'tcx>,
target: mir::BasicBlock,
) -> EvalResult<'tcx> {
2017-03-29 02:10:05 -05:00
let substs = instance.substs;
2016-09-20 09:05:30 -05:00
let intrinsic_name = &self.tcx.item_name(instance.def_id()).as_str()[..];
2016-09-29 09:42:01 -05:00
match intrinsic_name {
"add_with_overflow" => {
self.intrinsic_with_overflow(
mir::BinOp::Add,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?
}
"sub_with_overflow" => {
self.intrinsic_with_overflow(
mir::BinOp::Sub,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?
}
"mul_with_overflow" => {
self.intrinsic_with_overflow(
mir::BinOp::Mul,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?
}
2016-09-20 09:05:30 -05:00
"arith_offset" => {
let offset = self.value_to_isize(args[1])?;
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
let result_ptr = self.wrapping_pointer_offset(ptr, substs.type_at(0), offset)?;
2017-11-21 06:32:40 -06:00
self.write_ptr(dest, result_ptr, dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"assume" => {
2018-05-26 10:07:34 -05:00
let cond = self.value_to_scalar(args[0])?.to_bool()?;
if !cond {
return err!(AssumptionNotHeld);
}
2016-09-20 09:05:30 -05:00
}
2016-10-14 04:49:02 -05:00
"atomic_load" |
"atomic_load_relaxed" |
2016-11-15 08:19:38 -06:00
"atomic_load_acq" |
2016-10-14 04:49:02 -05:00
"volatile_load" => {
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
2018-01-02 16:43:03 -06:00
let align = self.layout_of(args[0].ty)?.align;
let valty = ValTy {
2018-01-02 16:43:03 -06:00
value: Value::ByRef(ptr, align),
ty: substs.type_at(0),
};
self.write_value(valty, dest)?;
2016-10-14 04:49:02 -05:00
}
"atomic_store" |
2016-12-15 11:27:47 -06:00
"atomic_store_relaxed" |
"atomic_store_rel" |
2016-10-14 04:49:02 -05:00
"volatile_store" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-01-02 16:43:03 -06:00
let align = self.layout_of(ty)?.align;
2017-12-14 04:03:55 -06:00
let dest = self.into_ptr(args[0].value)?;
2018-01-02 16:43:03 -06:00
self.write_value_to_ptr(args[1].value, dest, align, ty)?;
2016-10-14 04:49:02 -05:00
}
2016-11-03 11:32:06 -05:00
"atomic_fence_acq" => {
// we are inherently singlethreaded and singlecored, this is a nop
}
2017-03-14 07:05:51 -05:00
_ if intrinsic_name.starts_with("atomic_xchg") => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-01-02 16:43:03 -06:00
let align = self.layout_of(ty)?.align;
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
2018-05-26 10:07:34 -05:00
let change = self.value_to_scalar(args[1])?;
2018-01-02 16:43:03 -06:00
let old = self.read_value(ptr, align, ty)?;
2016-11-15 08:19:38 -06:00
let old = match old {
2018-05-26 10:07:34 -05:00
Value::Scalar(val) => val,
2017-07-28 21:43:05 -05:00
Value::ByRef { .. } => bug!("just read the value, can't be byref"),
2018-05-26 10:07:34 -05:00
Value::ScalarPair(..) => bug!("atomic_xchg doesn't work with nonprimitives"),
2016-11-15 08:19:38 -06:00
};
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, old, ty)?;
self.write_scalar(
Place::from_scalar_ptr(ptr, align),
change,
ty,
)?;
2016-11-15 08:19:38 -06:00
}
2017-03-14 07:05:51 -05:00
_ if intrinsic_name.starts_with("atomic_cxchg") => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-01-02 16:43:03 -06:00
let align = self.layout_of(ty)?.align;
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
2018-05-26 10:07:34 -05:00
let expect_old = self.value_to_scalar(args[1])?;
let change = self.value_to_scalar(args[2])?;
2018-01-02 16:43:03 -06:00
let old = self.read_value(ptr, align, ty)?;
2016-11-15 08:19:38 -06:00
let old = match old {
2018-05-26 10:07:34 -05:00
Value::Scalar(val) => val,
2017-07-28 21:43:05 -05:00
Value::ByRef { .. } => bug!("just read the value, can't be byref"),
2018-05-26 10:07:34 -05:00
Value::ScalarPair(..) => bug!("atomic_cxchg doesn't work with nonprimitives"),
2016-11-15 08:19:38 -06:00
};
let (val, _) = self.binary_op(mir::BinOp::Eq, old, ty, expect_old, ty)?;
2017-11-21 06:32:40 -06:00
let valty = ValTy {
2018-05-26 10:07:34 -05:00
value: Value::ScalarPair(old, val),
2017-11-21 06:32:40 -06:00
ty: dest_layout.ty,
};
self.write_value(valty, dest)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(
Place::from_scalar_ptr(ptr, dest_layout.align),
change,
ty,
)?;
}
"atomic_or" |
"atomic_or_acq" |
"atomic_or_rel" |
"atomic_or_acqrel" |
"atomic_or_relaxed" |
"atomic_xor" |
"atomic_xor_acq" |
"atomic_xor_rel" |
"atomic_xor_acqrel" |
"atomic_xor_relaxed" |
"atomic_and" |
"atomic_and_acq" |
"atomic_and_rel" |
"atomic_and_acqrel" |
"atomic_and_relaxed" |
"atomic_xadd" |
"atomic_xadd_acq" |
"atomic_xadd_rel" |
"atomic_xadd_acqrel" |
"atomic_xadd_relaxed" |
"atomic_xsub" |
"atomic_xsub_acq" |
"atomic_xsub_rel" |
"atomic_xsub_acqrel" |
"atomic_xsub_relaxed" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-01-02 16:43:03 -06:00
let align = self.layout_of(ty)?.align;
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
2018-05-26 10:07:34 -05:00
let change = self.value_to_scalar(args[1])?;
2018-01-02 16:43:03 -06:00
let old = self.read_value(ptr, align, ty)?;
2016-11-15 08:19:38 -06:00
let old = match old {
2018-05-26 10:07:34 -05:00
Value::Scalar(val) => val,
2017-07-28 21:43:05 -05:00
Value::ByRef { .. } => bug!("just read the value, can't be byref"),
2018-05-26 10:07:34 -05:00
Value::ScalarPair(..) => {
bug!("atomic_xadd_relaxed doesn't work with nonprimitives")
}
2016-11-15 08:19:38 -06:00
};
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, old, ty)?;
2017-03-14 07:05:51 -05:00
let op = match intrinsic_name.split('_').nth(1).unwrap() {
"or" => mir::BinOp::BitOr,
"xor" => mir::BinOp::BitXor,
"and" => mir::BinOp::BitAnd,
"xadd" => mir::BinOp::Add,
"xsub" => mir::BinOp::Sub,
_ => bug!(),
2016-11-03 11:32:06 -05:00
};
// FIXME: what do atomics do on overflow?
let (val, _) = self.binary_op(op, old, ty, change, ty)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(Place::from_scalar_ptr(ptr, dest_layout.align), val, ty)?;
}
2016-11-03 11:32:06 -05:00
2016-09-20 09:05:30 -05:00
"breakpoint" => unimplemented!(), // halt miri
"copy" |
"copy_nonoverlapping" => {
2017-03-29 02:10:05 -05:00
let elem_ty = substs.type_at(0);
2017-12-06 08:03:24 -06:00
let elem_layout = self.layout_of(elem_ty)?;
let elem_size = elem_layout.size.bytes();
let count = self.value_to_usize(args[2])?;
2017-07-20 15:20:33 -05:00
if count * elem_size != 0 {
// TODO: We do not even validate alignment for the 0-bytes case. libstd relies on this in vec::IntoIter::next.
// Also see the write_bytes intrinsic.
2018-01-02 16:43:03 -06:00
let elem_align = elem_layout.align;
2017-12-14 04:03:55 -06:00
let src = self.into_ptr(args[0].value)?;
let dest = self.into_ptr(args[1].value)?;
self.memory.copy(
src,
elem_align,
dest,
elem_align,
Size::from_bytes(count * elem_size),
intrinsic_name.ends_with("_nonoverlapping"),
)?;
2017-06-19 03:58:59 -05:00
}
2016-09-20 09:05:30 -05:00
}
"ctpop" | "cttz" | "cttz_nonzero" | "ctlz" | "ctlz_nonzero" | "bswap" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-05-26 10:07:34 -05:00
let num = self.value_to_scalar(args[0])?.to_bytes()?;
let kind = match self.layout_of(ty)?.abi {
ty::layout::Abi::Scalar(ref scalar) => scalar.value,
_ => Err(::rustc::mir::interpret::EvalErrorKind::TypeNotPrimitive(ty))?,
};
2017-06-22 02:08:19 -05:00
let num = if intrinsic_name.ends_with("_nonzero") {
if num == 0 {
return err!(Intrinsic(format!("{} called on 0", intrinsic_name)));
2017-06-22 02:08:19 -05:00
}
numeric_intrinsic(intrinsic_name.trim_right_matches("_nonzero"), num, kind)?
} else {
numeric_intrinsic(intrinsic_name, num, kind)?
};
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, num, ty)?;
2016-09-20 09:05:30 -05:00
}
"discriminant_value" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-06-05 03:21:28 -05:00
let layout = self.layout_of(ty)?;
2017-12-14 04:03:55 -06:00
let adt_ptr = self.into_ptr(args[0].value)?;
2018-01-02 16:43:03 -06:00
let adt_align = self.layout_of(args[0].ty)?.align;
2018-05-26 10:07:34 -05:00
let place = Place::from_scalar_ptr(adt_ptr, adt_align);
2018-06-05 03:21:28 -05:00
let discr_val = self.read_discriminant_value(place, layout)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, Scalar::from_u128(discr_val), dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"sinf32" | "fabsf32" | "cosf32" | "sqrtf32" | "expf32" | "exp2f32" | "logf32" |
"log10f32" | "log2f32" | "floorf32" | "ceilf32" | "truncf32" => {
2018-05-26 10:07:34 -05:00
let f = self.value_to_scalar(args[0])?.to_bytes()?;
let f = f32::from_bits(f as u32);
2017-03-14 06:35:38 -05:00
let f = match intrinsic_name {
"sinf32" => f.sin(),
"fabsf32" => f.abs(),
"cosf32" => f.cos(),
"sqrtf32" => f.sqrt(),
"expf32" => f.exp(),
"exp2f32" => f.exp2(),
"logf32" => f.ln(),
"log10f32" => f.log10(),
"log2f32" => f.log2(),
"floorf32" => f.floor(),
"ceilf32" => f.ceil(),
"truncf32" => f.trunc(),
_ => bug!(),
};
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, Scalar::from_f32(f), dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"sinf64" | "fabsf64" | "cosf64" | "sqrtf64" | "expf64" | "exp2f64" | "logf64" |
"log10f64" | "log2f64" | "floorf64" | "ceilf64" | "truncf64" => {
2018-05-26 10:07:34 -05:00
let f = self.value_to_scalar(args[0])?.to_bytes()?;
let f = f64::from_bits(f as u64);
2017-03-14 06:35:38 -05:00
let f = match intrinsic_name {
"sinf64" => f.sin(),
"fabsf64" => f.abs(),
"cosf64" => f.cos(),
"sqrtf64" => f.sqrt(),
"expf64" => f.exp(),
"exp2f64" => f.exp2(),
"logf64" => f.ln(),
"log10f64" => f.log10(),
"log2f64" => f.log2(),
"floorf64" => f.floor(),
"ceilf64" => f.ceil(),
"truncf64" => f.trunc(),
_ => bug!(),
};
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, Scalar::from_f64(f), dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"fadd_fast" | "fsub_fast" | "fmul_fast" | "fdiv_fast" | "frem_fast" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-05-26 10:07:34 -05:00
let a = self.value_to_scalar(args[0])?;
let b = self.value_to_scalar(args[1])?;
let op = match intrinsic_name {
"fadd_fast" => mir::BinOp::Add,
"fsub_fast" => mir::BinOp::Sub,
"fmul_fast" => mir::BinOp::Mul,
"fdiv_fast" => mir::BinOp::Div,
"frem_fast" => mir::BinOp::Rem,
_ => bug!(),
};
let result = self.binary_op(op, a, ty, b, ty)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, result.0, dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
2018-05-07 11:02:57 -05:00
"exact_div" => {
// Performs an exact division, resulting in undefined behavior where
// `x % y != 0` or `y == 0` or `x == T::min_value() && y == -1`
let ty = substs.type_at(0);
2018-05-26 10:07:34 -05:00
let a = self.value_to_scalar(args[0])?;
let b = self.value_to_scalar(args[1])?;
2018-05-07 11:02:57 -05:00
// check x % y != 0
if !self.binary_op(mir::BinOp::Rem, a, ty, b, ty)?.0.is_null()? {
2018-05-07 11:02:57 -05:00
return err!(ValidationFailure(format!("exact_div: {:?} cannot be divided by {:?}", a, b)));
}
let result = self.binary_op(mir::BinOp::Div, a, ty, b, ty)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, result.0, dest_layout.ty)?;
2018-05-07 11:02:57 -05:00
},
"likely" | "unlikely" | "forget" => {}
2016-09-20 09:05:30 -05:00
"init" => {
let size = dest_layout.size;
let init = |this: &mut Self, val: Value| {
2016-11-27 00:58:01 -06:00
let zero_val = match val {
2018-01-02 16:43:03 -06:00
Value::ByRef(ptr, _) => {
2017-07-12 23:06:57 -05:00
// These writes have no alignment restriction anyway.
this.memory.write_repeat(ptr, 0, size)?;
2017-08-08 08:53:07 -05:00
val
}
// TODO(solson): Revisit this, it's fishy to check for Undef here.
2018-05-26 10:07:34 -05:00
Value::Scalar(Scalar::Bits { defined: 0, .. }) => {
match this.layout_of(dest_layout.ty)?.abi {
ty::layout::Abi::Scalar(_) => Value::Scalar(Scalar::null()),
_ => {
2017-12-06 08:03:24 -06:00
// FIXME(oli-obk): pass TyLayout to alloc_ptr instead of Ty
2018-07-24 15:20:50 -05:00
let ptr = this.alloc_ptr(dest_layout)?;
2018-05-26 10:07:34 -05:00
let ptr = Scalar::Ptr(ptr);
this.memory.write_repeat(ptr, 0, size)?;
2018-01-02 16:43:03 -06:00
Value::ByRef(ptr, dest_layout.align)
}
}
}
2018-05-26 10:07:34 -05:00
Value::Scalar(_) => Value::Scalar(Scalar::null()),
Value::ScalarPair(..) => {
Value::ScalarPair(Scalar::null(), Scalar::null())
}
2016-11-27 00:58:01 -06:00
};
Ok(zero_val)
};
match dest {
2017-12-06 01:39:31 -06:00
Place::Local { frame, local } => self.modify_local(frame, local, init)?,
Place::Ptr {
2018-01-14 11:59:13 -06:00
ptr,
2018-01-02 16:43:03 -06:00
align: _align,
2017-12-06 01:39:31 -06:00
extra: PlaceExtra::None,
} => self.memory.write_repeat(ptr, 0, size)?,
2018-04-07 04:43:46 -05:00
Place::Ptr { .. } => {
bug!("init intrinsic tried to write to fat or unaligned ptr target")
}
}
}
2016-09-20 09:05:30 -05:00
"min_align_of" => {
2017-03-29 02:10:05 -05:00
let elem_ty = substs.type_at(0);
2017-12-06 08:03:24 -06:00
let elem_align = self.layout_of(elem_ty)?.align.abi();
2018-05-26 10:07:34 -05:00
let align_val = Scalar::from_u128(elem_align as u128);
self.write_scalar(dest, align_val, dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"pref_align_of" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2017-12-06 08:03:24 -06:00
let layout = self.layout_of(ty)?;
2017-11-21 06:32:40 -06:00
let align = layout.align.pref();
2018-05-26 10:07:34 -05:00
let align_val = Scalar::from_u128(align as u128);
self.write_scalar(dest, align_val, dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"move_val_init" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
2018-01-02 16:43:03 -06:00
let align = self.layout_of(args[0].ty)?.align;
self.write_value_to_ptr(args[1].value, ptr, align, ty)?;
2016-09-20 09:05:30 -05:00
}
"needs_drop" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2018-01-14 11:59:13 -06:00
let env = ty::ParamEnv::reveal_all();
let needs_drop = ty.needs_drop(self.tcx.tcx, env);
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_bool(needs_drop),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
"offset" => {
let offset = self.value_to_isize(args[1])?;
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
let result_ptr = self.pointer_offset(ptr, substs.type_at(0), offset)?;
2017-11-21 06:32:40 -06:00
self.write_ptr(dest, result_ptr, dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"overflowing_sub" => {
self.intrinsic_overflowing(
mir::BinOp::Sub,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
"overflowing_mul" => {
self.intrinsic_overflowing(
mir::BinOp::Mul,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
"overflowing_add" => {
self.intrinsic_overflowing(
mir::BinOp::Add,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
2017-03-14 06:35:38 -05:00
"powf32" => {
2018-05-26 10:07:34 -05:00
let f = self.value_to_scalar(args[0])?.to_bits(Size::from_bits(32))?;
let f = f32::from_bits(f as u32);
2018-05-26 10:07:34 -05:00
let f2 = self.value_to_scalar(args[1])?.to_bits(Size::from_bits(32))?;
let f2 = f32::from_bits(f2 as u32);
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_f32(f.powf(f2)),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
2017-03-14 06:35:38 -05:00
"powf64" => {
2018-05-26 10:07:34 -05:00
let f = self.value_to_scalar(args[0])?.to_bits(Size::from_bits(64))?;
let f = f64::from_bits(f as u64);
2018-05-26 10:07:34 -05:00
let f2 = self.value_to_scalar(args[1])?.to_bits(Size::from_bits(64))?;
let f2 = f64::from_bits(f2 as u64);
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_f64(f.powf(f2)),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
2017-03-14 06:35:38 -05:00
"fmaf32" => {
2018-05-26 10:07:34 -05:00
let a = self.value_to_scalar(args[0])?.to_bits(Size::from_bits(32))?;
let a = f32::from_bits(a as u32);
2018-05-26 10:07:34 -05:00
let b = self.value_to_scalar(args[1])?.to_bits(Size::from_bits(32))?;
let b = f32::from_bits(b as u32);
2018-05-26 10:07:34 -05:00
let c = self.value_to_scalar(args[2])?.to_bits(Size::from_bits(32))?;
let c = f32::from_bits(c as u32);
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_f32(a * b + c),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2017-03-14 06:35:38 -05:00
}
"fmaf64" => {
2018-05-26 10:07:34 -05:00
let a = self.value_to_scalar(args[0])?.to_bits(Size::from_bits(64))?;
let a = f64::from_bits(a as u64);
2018-05-26 10:07:34 -05:00
let b = self.value_to_scalar(args[1])?.to_bits(Size::from_bits(64))?;
let b = f64::from_bits(b as u64);
2018-05-26 10:07:34 -05:00
let c = self.value_to_scalar(args[2])?.to_bits(Size::from_bits(64))?;
let c = f64::from_bits(c as u64);
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_f64(a * b + c),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
2017-03-14 06:35:38 -05:00
"powif32" => {
2018-05-26 10:07:34 -05:00
let f = self.value_to_scalar(args[0])?.to_bits(Size::from_bits(32))?;
let f = f32::from_bits(f as u32);
let i = self.value_to_i32(args[1])?;
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_f32(f.powi(i)),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
2017-03-14 06:35:38 -05:00
"powif64" => {
2018-05-26 10:07:34 -05:00
let f = self.value_to_scalar(args[0])?.to_bits(Size::from_bits(64))?;
let f = f64::from_bits(f as u64);
let i = self.value_to_i32(args[1])?;
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_f64(f.powi(i)),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
"size_of" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2017-12-06 08:03:24 -06:00
let size = self.layout_of(ty)?.size.bytes().into();
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, Scalar::from_u128(size), dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"size_of_val" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
let (size, _) = self.size_and_align_of_dst(ty, args[0].value)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_u128(size.bytes() as u128),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-09-20 09:05:30 -05:00
}
2016-11-03 11:32:06 -05:00
"min_align_of_val" |
"align_of_val" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
let (_, align) = self.size_and_align_of_dst(ty, args[0].value)?;
2018-05-26 10:07:34 -05:00
self.write_scalar(
dest,
2018-05-26 10:07:34 -05:00
Scalar::from_u128(align.abi() as u128),
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
2016-11-03 11:32:06 -05:00
}
2016-09-20 09:05:30 -05:00
"type_name" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2016-09-20 09:05:30 -05:00
let ty_name = ty.to_string();
let value = self.str_to_value(&ty_name)?;
2017-11-21 06:32:40 -06:00
self.write_value(ValTy { value, ty: dest_layout.ty }, dest)?;
2016-09-23 03:38:30 -05:00
}
2016-09-20 09:05:30 -05:00
"type_id" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2016-09-20 09:05:30 -05:00
let n = self.tcx.type_id_hash(ty);
2018-05-26 10:07:34 -05:00
self.write_scalar(dest, Scalar::Bits { bits: n as u128, defined: 64 }, dest_layout.ty)?;
2016-09-20 09:05:30 -05:00
}
"transmute" => {
let src_ty = substs.type_at(0);
2018-01-14 11:59:13 -06:00
let _src_align = self.layout_of(src_ty)?.align;
2017-07-03 15:57:18 -05:00
let ptr = self.force_allocation(dest)?.to_ptr()?;
2018-01-02 16:43:03 -06:00
let dest_align = self.layout_of(substs.type_at(1))?.align;
2018-01-14 11:59:13 -06:00
self.write_value_to_ptr(args[0].value, ptr.into(), dest_align, src_ty).unwrap();
2016-09-20 09:05:30 -05:00
}
"unchecked_shl" => {
2017-12-06 08:03:24 -06:00
let bits = dest_layout.size.bytes() as u128 * 8;
2018-05-26 10:07:34 -05:00
let rhs = self.value_to_scalar(args[1])?
.to_bytes()?;
if rhs >= bits {
return err!(Intrinsic(
format!("Overflowing shift by {} in unchecked_shl", rhs),
));
}
self.intrinsic_overflowing(
mir::BinOp::Shl,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
}
"unchecked_shr" => {
2017-12-06 08:03:24 -06:00
let bits = dest_layout.size.bytes() as u128 * 8;
2018-05-26 10:07:34 -05:00
let rhs = self.value_to_scalar(args[1])?
.to_bytes()?;
if rhs >= bits {
return err!(Intrinsic(
format!("Overflowing shift by {} in unchecked_shr", rhs),
));
}
self.intrinsic_overflowing(
mir::BinOp::Shr,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
}
"unchecked_div" => {
2018-05-26 10:07:34 -05:00
let rhs = self.value_to_scalar(args[1])?
.to_bytes()?;
if rhs == 0 {
2017-08-02 09:59:01 -05:00
return err!(Intrinsic(format!("Division by 0 in unchecked_div")));
}
self.intrinsic_overflowing(
mir::BinOp::Div,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
}
"unchecked_rem" => {
2018-05-26 10:07:34 -05:00
let rhs = self.value_to_scalar(args[1])?
.to_bytes()?;
if rhs == 0 {
2017-08-02 09:59:01 -05:00
return err!(Intrinsic(format!("Division by 0 in unchecked_rem")));
}
self.intrinsic_overflowing(
mir::BinOp::Rem,
args[0],
args[1],
dest,
2017-11-21 06:32:40 -06:00
dest_layout.ty,
)?;
}
"uninit" => {
let size = dest_layout.size;
let uninit = |this: &mut Self, val: Value| match val {
2018-01-02 16:43:03 -06:00
Value::ByRef(ptr, _) => {
this.memory.mark_definedness(ptr, size, false)?;
Ok(val)
}
2018-05-26 10:07:34 -05:00
_ => Ok(Value::Scalar(Scalar::undef())),
};
match dest {
2017-12-06 01:39:31 -06:00
Place::Local { frame, local } => self.modify_local(frame, local, uninit)?,
Place::Ptr {
2018-01-14 11:59:13 -06:00
ptr,
2018-01-02 16:43:03 -06:00
align: _align,
2017-12-06 01:39:31 -06:00
extra: PlaceExtra::None,
} => self.memory.mark_definedness(ptr, size, false)?,
2018-04-07 04:43:46 -05:00
Place::Ptr { .. } => {
bug!("uninit intrinsic tried to write to fat or unaligned ptr target")
}
}
}
2016-09-20 09:05:30 -05:00
2017-03-13 18:28:29 -05:00
"write_bytes" => {
2017-03-29 02:10:05 -05:00
let ty = substs.type_at(0);
2017-12-06 08:03:24 -06:00
let ty_layout = self.layout_of(ty)?;
let val_byte = self.value_to_u8(args[1])?;
2017-12-14 04:03:55 -06:00
let ptr = self.into_ptr(args[0].value)?;
let count = self.value_to_usize(args[2])?;
if count > 0 {
2017-07-20 15:20:33 -05:00
// HashMap relies on write_bytes on a NULL ptr with count == 0 to work
// TODO: Should we, at least, validate the alignment? (Also see the copy intrinsic)
2018-01-02 16:43:03 -06:00
self.memory.check_align(ptr, ty_layout.align)?;
self.memory.write_repeat(ptr, val_byte, ty_layout.size * count)?;
}
2017-03-13 18:28:29 -05:00
}
2017-08-02 09:59:01 -05:00
name => return err!(Unimplemented(format!("unimplemented intrinsic: {}", name))),
2016-09-20 09:05:30 -05:00
}
self.goto_block(target);
2016-09-20 09:05:30 -05:00
// Since we pushed no stack frame, the main loop will act
// as if the call just completed and it's returning to the
// current frame.
Ok(())
}
}
2016-09-29 09:42:01 -05:00
fn numeric_intrinsic<'tcx>(
name: &str,
2017-06-22 02:08:19 -05:00
bytes: u128,
2018-05-26 10:07:34 -05:00
kind: Primitive,
) -> EvalResult<'tcx, Scalar> {
macro_rules! integer_intrinsic {
2017-06-21 02:06:35 -05:00
($method:ident) => ({
let result_bytes = match kind {
2018-05-26 10:07:34 -05:00
Primitive::Int(I8, true) => (bytes as i8).$method() as u128,
Primitive::Int(I8, false) => (bytes as u8).$method() as u128,
Primitive::Int(I16, true) => (bytes as i16).$method() as u128,
Primitive::Int(I16, false) => (bytes as u16).$method() as u128,
Primitive::Int(I32, true) => (bytes as i32).$method() as u128,
Primitive::Int(I32, false) => (bytes as u32).$method() as u128,
Primitive::Int(I64, true) => (bytes as i64).$method() as u128,
Primitive::Int(I64, false) => (bytes as u64).$method() as u128,
Primitive::Int(I128, true) => (bytes as i128).$method() as u128,
Primitive::Int(I128, false) => bytes.$method() as u128,
2017-06-22 02:08:19 -05:00
_ => bug!("invalid `{}` argument: {:?}", name, bytes),
};
2018-05-26 10:07:34 -05:00
Scalar::from_u128(result_bytes)
});
}
let result_val = match name {
2017-06-21 02:06:35 -05:00
"bswap" => integer_intrinsic!(swap_bytes),
"ctlz" => integer_intrinsic!(leading_zeros),
2017-06-21 02:06:35 -05:00
"ctpop" => integer_intrinsic!(count_ones),
"cttz" => integer_intrinsic!(trailing_zeros),
_ => bug!("not a numeric intrinsic: {}", name),
};
Ok(result_val)
2016-09-29 09:42:01 -05:00
}