2019-06-27 16:59:00 -05:00
|
|
|
use rustc::ty::{Ty, layout::{Size, LayoutOf}};
|
2017-07-25 04:32:48 -05:00
|
|
|
use rustc::mir;
|
|
|
|
|
2018-11-01 02:56:41 -05:00
|
|
|
use crate::*;
|
2017-07-25 04:32:48 -05:00
|
|
|
|
|
|
|
pub trait EvalContextExt<'tcx> {
|
2019-06-23 10:26:12 -05:00
|
|
|
fn pointer_inbounds(
|
|
|
|
&self,
|
|
|
|
ptr: Pointer<Tag>
|
|
|
|
) -> InterpResult<'tcx>;
|
|
|
|
|
2019-07-24 09:17:49 -05:00
|
|
|
fn binary_ptr_op(
|
2017-07-25 04:32:48 -05:00
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2019-04-15 08:36:09 -05:00
|
|
|
left: ImmTy<'tcx, Tag>,
|
|
|
|
right: ImmTy<'tcx, Tag>,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
|
2017-07-25 04:32:48 -05:00
|
|
|
|
|
|
|
fn ptr_int_arithmetic(
|
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2019-04-15 08:36:09 -05:00
|
|
|
left: Pointer<Tag>,
|
2018-08-14 13:25:56 -05:00
|
|
|
right: u128,
|
2017-07-25 04:32:48 -05:00
|
|
|
signed: bool,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
|
2018-08-15 14:01:40 -05:00
|
|
|
|
2018-08-28 11:13:58 -05:00
|
|
|
fn ptr_eq(
|
|
|
|
&self,
|
2019-04-15 08:36:09 -05:00
|
|
|
left: Scalar<Tag>,
|
|
|
|
right: Scalar<Tag>,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, bool>;
|
2018-08-28 11:13:58 -05:00
|
|
|
|
2018-08-15 14:01:40 -05:00
|
|
|
fn pointer_offset_inbounds(
|
|
|
|
&self,
|
2019-04-15 08:36:09 -05:00
|
|
|
ptr: Scalar<Tag>,
|
2018-08-15 14:01:40 -05:00
|
|
|
pointee_ty: Ty<'tcx>,
|
|
|
|
offset: i64,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, Scalar<Tag>>;
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
|
2019-06-13 01:52:04 -05:00
|
|
|
impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
|
2019-06-23 10:26:12 -05:00
|
|
|
/// Test if the pointer is in-bounds of a live allocation.
|
|
|
|
#[inline]
|
|
|
|
fn pointer_inbounds(&self, ptr: Pointer<Tag>) -> InterpResult<'tcx> {
|
|
|
|
let (size, _align) = self.memory().get_size_and_align(ptr.alloc_id, AllocCheck::Live)?;
|
|
|
|
ptr.check_in_alloc(size, CheckInAllocMsg::InboundsTest)
|
|
|
|
}
|
|
|
|
|
2019-07-24 09:17:49 -05:00
|
|
|
fn binary_ptr_op(
|
2017-07-25 04:32:48 -05:00
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2019-04-15 08:36:09 -05:00
|
|
|
left: ImmTy<'tcx, Tag>,
|
|
|
|
right: ImmTy<'tcx, Tag>,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
|
2018-08-28 11:13:58 -05:00
|
|
|
use rustc::mir::BinOp::*;
|
|
|
|
|
2019-02-08 09:27:00 -06:00
|
|
|
trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
|
|
|
|
|
|
|
|
// Operations that support fat pointers
|
|
|
|
match bin_op {
|
|
|
|
Eq | Ne => {
|
|
|
|
let eq = match (*left, *right) {
|
|
|
|
(Immediate::Scalar(left), Immediate::Scalar(right)) =>
|
|
|
|
self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
|
|
|
|
(Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
|
|
|
|
self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
|
|
|
|
self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
|
|
|
|
_ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
|
|
|
|
};
|
|
|
|
return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
|
|
|
|
}
|
|
|
|
_ => {},
|
|
|
|
}
|
|
|
|
|
2019-02-15 19:29:38 -06:00
|
|
|
// Now we expect no more fat pointers.
|
2019-02-08 07:01:40 -06:00
|
|
|
let left_layout = left.layout;
|
|
|
|
let left = left.to_scalar()?;
|
|
|
|
let right_layout = right.layout;
|
|
|
|
let right = right.to_scalar()?;
|
2018-07-15 14:25:03 -05:00
|
|
|
|
2019-08-03 03:25:55 -05:00
|
|
|
Ok(match bin_op {
|
2018-08-15 14:01:40 -05:00
|
|
|
Offset => {
|
|
|
|
let pointee_ty = left_layout.ty
|
2018-01-14 11:59:13 -06:00
|
|
|
.builtin_deref(true)
|
2017-08-10 10:48:38 -05:00
|
|
|
.expect("Offset called on non-ptr type")
|
|
|
|
.ty;
|
2018-08-15 14:01:40 -05:00
|
|
|
let ptr = self.pointer_offset_inbounds(
|
2018-07-10 10:32:38 -05:00
|
|
|
left,
|
2017-08-10 10:48:38 -05:00
|
|
|
pointee_ty,
|
2018-08-28 11:13:58 -05:00
|
|
|
right.to_isize(self)?,
|
2017-08-10 10:48:38 -05:00
|
|
|
)?;
|
2019-08-03 03:25:55 -05:00
|
|
|
(ptr, false)
|
2017-08-10 10:48:38 -05:00
|
|
|
}
|
2018-08-28 11:13:58 -05:00
|
|
|
// These need both to be pointer, and fail if they are not in the same location
|
|
|
|
Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
|
2019-07-24 09:17:49 -05:00
|
|
|
let left = left.assert_ptr();
|
|
|
|
let right = right.assert_ptr();
|
2017-07-25 04:32:48 -05:00
|
|
|
if left.alloc_id == right.alloc_id {
|
|
|
|
let res = match bin_op {
|
|
|
|
Lt => left.offset < right.offset,
|
|
|
|
Le => left.offset <= right.offset,
|
|
|
|
Gt => left.offset > right.offset,
|
|
|
|
Ge => left.offset >= right.offset,
|
2017-08-10 10:48:38 -05:00
|
|
|
Sub => {
|
2018-08-28 11:13:58 -05:00
|
|
|
// subtract the offsets
|
2018-10-19 12:51:41 -05:00
|
|
|
let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
|
|
|
|
let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
|
2018-08-15 14:01:40 -05:00
|
|
|
let layout = self.layout_of(self.tcx.types.usize)?;
|
2017-08-10 10:48:38 -05:00
|
|
|
return self.binary_op(
|
|
|
|
Sub,
|
2019-02-08 07:01:40 -06:00
|
|
|
ImmTy::from_scalar(left_offset, layout),
|
|
|
|
ImmTy::from_scalar(right_offset, layout),
|
2018-08-28 11:13:58 -05:00
|
|
|
)
|
2017-08-10 10:48:38 -05:00
|
|
|
}
|
2017-07-25 04:32:48 -05:00
|
|
|
_ => bug!("We already established it has to be one of these operators."),
|
|
|
|
};
|
2019-08-03 03:25:55 -05:00
|
|
|
(Scalar::from_bool(res), false)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else {
|
|
|
|
// Both are pointers, but from different allocations.
|
2019-08-03 03:25:55 -05:00
|
|
|
throw_unsup!(InvalidPointerMath)
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
2019-07-24 09:17:49 -05:00
|
|
|
Lt | Le | Gt | Ge if left.is_bits() && right.is_bits() => {
|
|
|
|
let left = left.assert_bits(self.memory().pointer_size());
|
|
|
|
let right = right.assert_bits(self.memory().pointer_size());
|
|
|
|
let res = match bin_op {
|
|
|
|
Lt => left < right,
|
|
|
|
Le => left <= right,
|
|
|
|
Gt => left > right,
|
|
|
|
Ge => left >= right,
|
|
|
|
_ => bug!("We already established it has to be one of these operators."),
|
|
|
|
};
|
|
|
|
Ok((Scalar::from_bool(res), false))
|
|
|
|
}
|
2019-06-20 09:38:55 -05:00
|
|
|
Gt | Ge if left.is_ptr() && right.is_bits() => {
|
|
|
|
// "ptr >[=] integer" can be tested if the integer is small enough.
|
2019-07-24 09:17:49 -05:00
|
|
|
let left = left.assert_ptr();
|
|
|
|
let right = right.assert_bits(self.memory().pointer_size());
|
2019-06-20 09:38:55 -05:00
|
|
|
let (_alloc_size, alloc_align) = self.memory()
|
2019-06-23 10:26:12 -05:00
|
|
|
.get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
|
|
|
|
.expect("alloc info with MaybeDead cannot fail");
|
2019-06-20 09:38:55 -05:00
|
|
|
let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
|
|
|
|
let result = match bin_op {
|
|
|
|
Gt => min_ptr_val > right,
|
|
|
|
Ge => min_ptr_val >= right,
|
|
|
|
_ => bug!(),
|
|
|
|
};
|
|
|
|
if result {
|
|
|
|
// Definitely true!
|
2019-08-03 03:25:55 -05:00
|
|
|
(Scalar::from_bool(true), false)
|
2019-06-20 09:38:55 -05:00
|
|
|
} else {
|
|
|
|
// Sorry, can't tell.
|
2019-08-03 03:25:55 -05:00
|
|
|
throw_unsup!(InvalidPointerMath)
|
2019-06-20 09:38:55 -05:00
|
|
|
}
|
|
|
|
}
|
2018-08-28 11:13:58 -05:00
|
|
|
// These work if the left operand is a pointer, and the right an integer
|
|
|
|
Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
|
2017-07-25 04:32:48 -05:00
|
|
|
// Cast to i128 is fine as we checked the kind to be ptr-sized
|
2017-08-10 10:48:38 -05:00
|
|
|
self.ptr_int_arithmetic(
|
|
|
|
bin_op,
|
2019-07-24 09:17:49 -05:00
|
|
|
left.assert_ptr(),
|
|
|
|
right.assert_bits(self.memory().pointer_size()),
|
2018-08-28 11:13:58 -05:00
|
|
|
right_layout.abi.is_signed(),
|
2019-08-03 03:25:55 -05:00
|
|
|
)?
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
2018-08-14 13:00:18 -05:00
|
|
|
// Commutative operators also work if the integer is on the left
|
2018-08-28 11:13:58 -05:00
|
|
|
Add | BitAnd if left.is_bits() && right.is_ptr() => {
|
2017-07-25 04:32:48 -05:00
|
|
|
// This is a commutative operation, just swap the operands
|
2017-08-10 10:48:38 -05:00
|
|
|
self.ptr_int_arithmetic(
|
|
|
|
bin_op,
|
2019-07-24 09:17:49 -05:00
|
|
|
right.assert_ptr(),
|
|
|
|
left.assert_bits(self.memory().pointer_size()),
|
2018-08-28 11:13:58 -05:00
|
|
|
left_layout.abi.is_signed(),
|
2019-08-03 03:25:55 -05:00
|
|
|
)?
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
2018-08-28 11:13:58 -05:00
|
|
|
// Nothing else works
|
2019-08-03 03:25:55 -05:00
|
|
|
_ => throw_unsup!(InvalidPointerMath),
|
|
|
|
})
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
|
2018-08-28 11:13:58 -05:00
|
|
|
fn ptr_eq(
|
|
|
|
&self,
|
2019-04-15 08:36:09 -05:00
|
|
|
left: Scalar<Tag>,
|
|
|
|
right: Scalar<Tag>,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, bool> {
|
2019-02-08 09:27:00 -06:00
|
|
|
let size = self.pointer_size();
|
2019-07-23 14:38:53 -05:00
|
|
|
// Just compare the integers.
|
|
|
|
// TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
|
|
|
|
let left = self.force_bits(left, size)?;
|
|
|
|
let right = self.force_bits(right, size)?;
|
|
|
|
Ok(left == right)
|
2018-08-28 11:13:58 -05:00
|
|
|
}
|
|
|
|
|
2017-07-25 04:32:48 -05:00
|
|
|
fn ptr_int_arithmetic(
|
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2019-04-15 08:36:09 -05:00
|
|
|
left: Pointer<Tag>,
|
2018-08-14 13:25:56 -05:00
|
|
|
right: u128,
|
2017-07-25 04:32:48 -05:00
|
|
|
signed: bool,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
|
2017-07-25 04:32:48 -05:00
|
|
|
use rustc::mir::BinOp::*;
|
|
|
|
|
2019-04-15 08:36:09 -05:00
|
|
|
fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
|
2018-05-26 10:07:34 -05:00
|
|
|
(Scalar::Ptr(res), over)
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(match bin_op {
|
|
|
|
Sub =>
|
2019-02-15 19:29:38 -06:00
|
|
|
// The only way this can overflow is by underflowing, so signdeness of the right
|
|
|
|
// operands does not matter.
|
2018-08-14 13:25:56 -05:00
|
|
|
map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
|
2017-07-25 04:32:48 -05:00
|
|
|
Add if signed =>
|
2018-08-14 13:25:56 -05:00
|
|
|
map_to_primval(left.overflowing_signed_offset(right as i128, self)),
|
2017-07-25 04:32:48 -05:00
|
|
|
Add if !signed =>
|
2018-05-20 04:26:40 -05:00
|
|
|
map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
|
2017-07-25 04:32:48 -05:00
|
|
|
|
|
|
|
BitAnd if !signed => {
|
2019-06-30 17:12:45 -05:00
|
|
|
let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
|
|
|
|
.expect("alloc info with MaybeDead cannot fail")
|
|
|
|
.1.bytes();
|
2018-08-14 13:25:56 -05:00
|
|
|
let base_mask = {
|
2019-02-15 19:29:38 -06:00
|
|
|
// FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
|
2018-10-19 12:51:41 -05:00
|
|
|
let shift = 128 - self.memory().pointer_size().bits();
|
2018-08-14 13:25:56 -05:00
|
|
|
let value = !(ptr_base_align as u128 - 1);
|
2019-02-15 19:29:38 -06:00
|
|
|
// Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
|
2018-08-14 13:25:56 -05:00
|
|
|
(value << shift) >> shift
|
|
|
|
};
|
2019-05-26 07:43:34 -05:00
|
|
|
let ptr_size = self.memory().pointer_size();
|
2019-02-15 19:29:38 -06:00
|
|
|
trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
|
2018-08-14 13:25:56 -05:00
|
|
|
ptr_base_align, right, base_mask);
|
2017-07-25 04:32:48 -05:00
|
|
|
if right & base_mask == base_mask {
|
2019-02-15 19:29:38 -06:00
|
|
|
// Case 1: the base address bits are all preserved, i.e., right is all-1 there.
|
2018-08-14 13:25:56 -05:00
|
|
|
let offset = (left.offset.bytes() as u128 & right) as u64;
|
2018-10-16 04:21:38 -05:00
|
|
|
(
|
|
|
|
Scalar::Ptr(Pointer::new_with_tag(
|
|
|
|
left.alloc_id,
|
|
|
|
Size::from_bytes(offset),
|
|
|
|
left.tag,
|
|
|
|
)),
|
|
|
|
false,
|
|
|
|
)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else if right & base_mask == 0 {
|
2019-02-15 19:29:38 -06:00
|
|
|
// Case 2: the base address bits are all taken away, i.e., right is all-0 there.
|
2019-05-26 07:43:34 -05:00
|
|
|
let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
|
|
|
|
(v, false)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else {
|
2019-08-03 03:25:55 -05:00
|
|
|
throw_unsup!(ReadPointerAsBytes);
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-14 13:00:18 -05:00
|
|
|
Rem if !signed => {
|
2018-08-14 05:16:29 -05:00
|
|
|
// Doing modulo a divisor of the alignment is allowed.
|
2019-02-15 19:29:38 -06:00
|
|
|
// (Intuition: modulo a divisor leaks less information.)
|
2019-06-30 17:12:45 -05:00
|
|
|
let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
|
|
|
|
.expect("alloc info with MaybeDead cannot fail")
|
|
|
|
.1.bytes();
|
2018-08-14 13:00:18 -05:00
|
|
|
let right = right as u64;
|
2019-05-26 07:43:34 -05:00
|
|
|
let ptr_size = self.memory().pointer_size();
|
2018-08-14 13:00:18 -05:00
|
|
|
if right == 1 {
|
2019-02-15 19:29:38 -06:00
|
|
|
// Modulo 1 is always 0.
|
2019-05-26 07:43:34 -05:00
|
|
|
(Scalar::from_uint(0u32, ptr_size), false)
|
2018-08-14 05:16:29 -05:00
|
|
|
} else if ptr_base_align % right == 0 {
|
2019-02-15 19:29:38 -06:00
|
|
|
// The base address would be cancelled out by the modulo operation, so we can
|
|
|
|
// just take the modulo of the offset.
|
|
|
|
(
|
2019-05-26 07:43:34 -05:00
|
|
|
Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
|
2019-02-15 19:29:38 -06:00
|
|
|
false,
|
|
|
|
)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else {
|
2019-08-03 03:25:55 -05:00
|
|
|
throw_unsup!(ReadPointerAsBytes);
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
_ => {
|
2019-02-15 19:29:38 -06:00
|
|
|
let msg = format!(
|
|
|
|
"unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
|
|
|
|
bin_op,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
if signed { "signed" } else { "unsigned" }
|
|
|
|
);
|
2019-08-03 03:25:55 -05:00
|
|
|
throw_unsup!(Unimplemented(msg));
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2018-08-15 14:01:40 -05:00
|
|
|
|
2019-02-15 19:29:38 -06:00
|
|
|
/// Raises an error if the offset moves the pointer outside of its allocation.
|
|
|
|
/// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
|
|
|
|
/// moves in there because the size is 0). We also consider the NULL pointer its own separate
|
|
|
|
/// allocation, and all the remaining integers pointers their own allocation.
|
2018-08-15 14:01:40 -05:00
|
|
|
fn pointer_offset_inbounds(
|
|
|
|
&self,
|
2019-04-15 08:36:09 -05:00
|
|
|
ptr: Scalar<Tag>,
|
2018-08-15 14:01:40 -05:00
|
|
|
pointee_ty: Ty<'tcx>,
|
|
|
|
offset: i64,
|
2019-06-08 15:14:47 -05:00
|
|
|
) -> InterpResult<'tcx, Scalar<Tag>> {
|
2019-02-15 19:29:38 -06:00
|
|
|
// FIXME: assuming here that type size is less than `i64::max_value()`.
|
2018-08-15 14:01:40 -05:00
|
|
|
let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
|
2019-02-15 19:29:38 -06:00
|
|
|
let offset = offset
|
|
|
|
.checked_mul(pointee_size)
|
2019-08-03 03:25:55 -05:00
|
|
|
.ok_or_else(|| err_panic!(Overflow(mir::BinOp::Mul)))?;
|
2019-02-15 19:29:38 -06:00
|
|
|
// Now let's see what kind of pointer this is.
|
2019-07-03 03:54:16 -05:00
|
|
|
let ptr = if offset == 0 {
|
|
|
|
match ptr {
|
|
|
|
Scalar::Ptr(ptr) => ptr,
|
|
|
|
Scalar::Raw { .. } => {
|
|
|
|
// Offset 0 on an integer. We accept that, pretending there is
|
|
|
|
// a little zero-sized allocation here.
|
|
|
|
return Ok(ptr);
|
|
|
|
}
|
2018-10-08 03:22:26 -05:00
|
|
|
}
|
2019-07-03 03:54:16 -05:00
|
|
|
} else {
|
|
|
|
// Offset > 0. We *require* a pointer.
|
|
|
|
self.force_ptr(ptr)?
|
|
|
|
};
|
|
|
|
// Both old and new pointer must be in-bounds of a *live* allocation.
|
|
|
|
// (Of the same allocation, but that part is trivial with our representation.)
|
|
|
|
self.pointer_inbounds(ptr)?;
|
|
|
|
let ptr = ptr.signed_offset(offset, self)?;
|
|
|
|
self.pointer_inbounds(ptr)?;
|
|
|
|
Ok(Scalar::Ptr(ptr))
|
2018-08-15 14:01:40 -05:00
|
|
|
}
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|