2018-08-28 11:13:58 -05:00
|
|
|
use rustc::ty::{Ty, layout::TyLayout};
|
2017-07-25 04:32:48 -05:00
|
|
|
use rustc::mir;
|
|
|
|
|
2017-12-14 04:03:55 -06:00
|
|
|
use super::*;
|
2017-07-25 04:32:48 -05:00
|
|
|
|
|
|
|
pub trait EvalContextExt<'tcx> {
|
|
|
|
fn ptr_op(
|
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2018-05-26 10:07:34 -05:00
|
|
|
left: Scalar,
|
2018-08-15 14:01:40 -05:00
|
|
|
left_layout: TyLayout<'tcx>,
|
2018-05-26 10:07:34 -05:00
|
|
|
right: Scalar,
|
2018-08-15 14:01:40 -05:00
|
|
|
right_layout: TyLayout<'tcx>,
|
2018-08-28 11:13:58 -05:00
|
|
|
) -> EvalResult<'tcx, (Scalar, bool)>;
|
2017-07-25 04:32:48 -05:00
|
|
|
|
|
|
|
fn ptr_int_arithmetic(
|
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2018-05-26 10:07:34 -05:00
|
|
|
left: Pointer,
|
2018-08-14 13:25:56 -05:00
|
|
|
right: u128,
|
2017-07-25 04:32:48 -05:00
|
|
|
signed: bool,
|
2018-05-26 10:07:34 -05:00
|
|
|
) -> EvalResult<'tcx, (Scalar, bool)>;
|
2018-08-15 14:01:40 -05:00
|
|
|
|
2018-08-28 11:13:58 -05:00
|
|
|
fn ptr_eq(
|
|
|
|
&self,
|
|
|
|
left: Scalar,
|
|
|
|
right: Scalar,
|
|
|
|
size: Size,
|
|
|
|
) -> EvalResult<'tcx, bool>;
|
|
|
|
|
2018-08-15 14:01:40 -05:00
|
|
|
fn pointer_offset_inbounds(
|
|
|
|
&self,
|
|
|
|
ptr: Scalar,
|
|
|
|
pointee_ty: Ty<'tcx>,
|
|
|
|
offset: i64,
|
|
|
|
) -> EvalResult<'tcx, Scalar>;
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
|
2018-01-14 11:59:13 -06:00
|
|
|
impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
|
2017-07-25 04:32:48 -05:00
|
|
|
fn ptr_op(
|
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2018-05-26 10:07:34 -05:00
|
|
|
left: Scalar,
|
2018-08-15 14:01:40 -05:00
|
|
|
left_layout: TyLayout<'tcx>,
|
2018-05-26 10:07:34 -05:00
|
|
|
right: Scalar,
|
2018-08-15 14:01:40 -05:00
|
|
|
right_layout: TyLayout<'tcx>,
|
2018-08-28 11:13:58 -05:00
|
|
|
) -> EvalResult<'tcx, (Scalar, bool)> {
|
|
|
|
use rustc::mir::BinOp::*;
|
|
|
|
|
2018-07-15 14:25:03 -05:00
|
|
|
trace!("ptr_op: {:?} {:?} {:?}", left, bin_op, right);
|
2018-08-28 11:13:58 -05:00
|
|
|
debug_assert!(left.is_ptr() || right.is_ptr() || bin_op == Offset);
|
2018-07-15 14:25:03 -05:00
|
|
|
|
2017-07-25 04:32:48 -05:00
|
|
|
match bin_op {
|
2018-08-15 14:01:40 -05:00
|
|
|
Offset => {
|
|
|
|
let pointee_ty = left_layout.ty
|
2018-01-14 11:59:13 -06:00
|
|
|
.builtin_deref(true)
|
2017-08-10 10:48:38 -05:00
|
|
|
.expect("Offset called on non-ptr type")
|
|
|
|
.ty;
|
2018-08-15 14:01:40 -05:00
|
|
|
let ptr = self.pointer_offset_inbounds(
|
2018-07-10 10:32:38 -05:00
|
|
|
left,
|
2017-08-10 10:48:38 -05:00
|
|
|
pointee_ty,
|
2018-08-28 11:13:58 -05:00
|
|
|
right.to_isize(self)?,
|
2017-08-10 10:48:38 -05:00
|
|
|
)?;
|
2018-08-28 11:13:58 -05:00
|
|
|
Ok((ptr, false))
|
2017-08-10 10:48:38 -05:00
|
|
|
}
|
2017-07-25 04:32:48 -05:00
|
|
|
// These work on anything
|
2018-08-28 11:13:58 -05:00
|
|
|
Eq =>
|
|
|
|
Ok((Scalar::from_bool(self.ptr_eq(left, right, left_layout.size)?), false)),
|
|
|
|
Ne =>
|
|
|
|
Ok((Scalar::from_bool(!self.ptr_eq(left, right, left_layout.size)?), false)),
|
|
|
|
// These need both to be pointer, and fail if they are not in the same location
|
|
|
|
Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
|
|
|
|
let left = left.to_ptr().expect("we checked is_ptr");
|
|
|
|
let right = right.to_ptr().expect("we checked is_ptr");
|
2017-07-25 04:32:48 -05:00
|
|
|
if left.alloc_id == right.alloc_id {
|
|
|
|
let res = match bin_op {
|
|
|
|
Lt => left.offset < right.offset,
|
|
|
|
Le => left.offset <= right.offset,
|
|
|
|
Gt => left.offset > right.offset,
|
|
|
|
Ge => left.offset >= right.offset,
|
2017-08-10 10:48:38 -05:00
|
|
|
Sub => {
|
2018-08-28 11:13:58 -05:00
|
|
|
// subtract the offsets
|
2018-08-15 14:01:40 -05:00
|
|
|
let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory.pointer_size());
|
|
|
|
let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory.pointer_size());
|
|
|
|
let layout = self.layout_of(self.tcx.types.usize)?;
|
2017-08-10 10:48:38 -05:00
|
|
|
return self.binary_op(
|
|
|
|
Sub,
|
2018-08-28 11:13:58 -05:00
|
|
|
left_offset, layout,
|
|
|
|
right_offset, layout,
|
|
|
|
)
|
2017-08-10 10:48:38 -05:00
|
|
|
}
|
2017-07-25 04:32:48 -05:00
|
|
|
_ => bug!("We already established it has to be one of these operators."),
|
|
|
|
};
|
2018-08-28 11:13:58 -05:00
|
|
|
Ok((Scalar::from_bool(res), false))
|
2017-07-25 04:32:48 -05:00
|
|
|
} else {
|
|
|
|
// Both are pointers, but from different allocations.
|
2017-08-02 09:59:01 -05:00
|
|
|
err!(InvalidPointerMath)
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
2018-08-28 11:13:58 -05:00
|
|
|
// These work if the left operand is a pointer, and the right an integer
|
|
|
|
Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
|
2017-07-25 04:32:48 -05:00
|
|
|
// Cast to i128 is fine as we checked the kind to be ptr-sized
|
2017-08-10 10:48:38 -05:00
|
|
|
self.ptr_int_arithmetic(
|
|
|
|
bin_op,
|
2018-08-28 11:13:58 -05:00
|
|
|
left.to_ptr().expect("we checked is_ptr"),
|
|
|
|
right.to_bits(self.memory.pointer_size()).expect("we checked is_bits"),
|
|
|
|
right_layout.abi.is_signed(),
|
|
|
|
)
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
2018-08-14 13:00:18 -05:00
|
|
|
// Commutative operators also work if the integer is on the left
|
2018-08-28 11:13:58 -05:00
|
|
|
Add | BitAnd if left.is_bits() && right.is_ptr() => {
|
2017-07-25 04:32:48 -05:00
|
|
|
// This is a commutative operation, just swap the operands
|
2017-08-10 10:48:38 -05:00
|
|
|
self.ptr_int_arithmetic(
|
|
|
|
bin_op,
|
2018-08-28 11:13:58 -05:00
|
|
|
right.to_ptr().expect("we checked is_ptr"),
|
|
|
|
left.to_bits(self.memory.pointer_size()).expect("we checked is_bits"),
|
|
|
|
left_layout.abi.is_signed(),
|
|
|
|
)
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
2018-08-28 11:13:58 -05:00
|
|
|
// Nothing else works
|
|
|
|
_ => err!(InvalidPointerMath),
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-28 11:13:58 -05:00
|
|
|
fn ptr_eq(
|
|
|
|
&self,
|
|
|
|
left: Scalar,
|
|
|
|
right: Scalar,
|
|
|
|
size: Size,
|
|
|
|
) -> EvalResult<'tcx, bool> {
|
|
|
|
Ok(match (left, right) {
|
|
|
|
(Scalar::Bits { .. }, Scalar::Bits { .. }) =>
|
|
|
|
left.to_bits(size)? == right.to_bits(size)?,
|
|
|
|
(Scalar::Ptr(left), Scalar::Ptr(right)) => {
|
|
|
|
// Comparison illegal if one of them is out-of-bounds, *unless* they
|
|
|
|
// are in the same allocation.
|
|
|
|
if left.alloc_id == right.alloc_id {
|
|
|
|
left.offset == right.offset
|
|
|
|
} else {
|
|
|
|
// This accepts one-past-the end. So technically there is still
|
|
|
|
// some non-determinism that we do not fully rule out when two
|
|
|
|
// allocations sit right next to each other. The C/C++ standards are
|
|
|
|
// somewhat fuzzy about this case, so I think for now this check is
|
|
|
|
// "good enough".
|
|
|
|
self.memory.check_bounds(left, false)?;
|
|
|
|
self.memory.check_bounds(right, false)?;
|
|
|
|
// Two live in-bounds pointers, we can compare across allocations
|
|
|
|
left == right
|
|
|
|
}
|
|
|
|
}
|
2018-08-30 04:04:48 -05:00
|
|
|
// Comparing ptr and integer -- we allow compating with NULL, and with addresses
|
|
|
|
// so close to the end of the `usize` range that they cannot overlap with an allocation
|
|
|
|
// of the given size.
|
|
|
|
(Scalar::Ptr(ptr), Scalar::Bits { bits, size }) |
|
|
|
|
(Scalar::Bits { bits, size }, Scalar::Ptr(ptr)) => {
|
|
|
|
assert_eq!(size as u64, self.pointer_size().bytes());
|
|
|
|
if bits == 0 {
|
|
|
|
// Nothing equals 0
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
// Compute the highest address at which this allocation could live
|
|
|
|
let alloc = self.memory.get(ptr.alloc_id)?;
|
|
|
|
let max_base_addr =
|
|
|
|
(1u128 << self.pointer_size().bits()) - alloc.bytes.len() as u128;
|
|
|
|
let max_addr = max_base_addr + ptr.offset.bytes() as u128;
|
|
|
|
if bits > max_addr {
|
|
|
|
// The integer is too big, this cannot possibly be equal
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
// TODO: We could also take alignment into account
|
|
|
|
return err!(InvalidPointerMath);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-08-28 11:13:58 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-07-25 04:32:48 -05:00
|
|
|
fn ptr_int_arithmetic(
|
|
|
|
&self,
|
|
|
|
bin_op: mir::BinOp,
|
2018-05-26 10:07:34 -05:00
|
|
|
left: Pointer,
|
2018-08-14 13:25:56 -05:00
|
|
|
right: u128,
|
2017-07-25 04:32:48 -05:00
|
|
|
signed: bool,
|
2018-05-26 10:07:34 -05:00
|
|
|
) -> EvalResult<'tcx, (Scalar, bool)> {
|
2017-07-25 04:32:48 -05:00
|
|
|
use rustc::mir::BinOp::*;
|
|
|
|
|
2018-05-26 10:07:34 -05:00
|
|
|
fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) {
|
|
|
|
(Scalar::Ptr(res), over)
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(match bin_op {
|
|
|
|
Sub =>
|
|
|
|
// The only way this can overflow is by underflowing, so signdeness of the right operands does not matter
|
2018-08-14 13:25:56 -05:00
|
|
|
map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
|
2017-07-25 04:32:48 -05:00
|
|
|
Add if signed =>
|
2018-08-14 13:25:56 -05:00
|
|
|
map_to_primval(left.overflowing_signed_offset(right as i128, self)),
|
2017-07-25 04:32:48 -05:00
|
|
|
Add if !signed =>
|
2018-05-20 04:26:40 -05:00
|
|
|
map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
|
2017-07-25 04:32:48 -05:00
|
|
|
|
|
|
|
BitAnd if !signed => {
|
2018-08-14 13:00:18 -05:00
|
|
|
let ptr_base_align = self.memory.get(left.alloc_id)?.align.abi();
|
2018-08-14 13:25:56 -05:00
|
|
|
let base_mask = {
|
|
|
|
// FIXME: Use interpret::truncate, once that takes a Size instead of a Layout
|
|
|
|
let shift = 128 - self.memory.pointer_size().bits();
|
|
|
|
let value = !(ptr_base_align as u128 - 1);
|
|
|
|
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
|
|
|
|
(value << shift) >> shift
|
|
|
|
};
|
2018-08-07 08:22:11 -05:00
|
|
|
let ptr_size = self.memory.pointer_size().bytes() as u8;
|
2018-08-14 13:25:56 -05:00
|
|
|
trace!("Ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
|
|
|
|
ptr_base_align, right, base_mask);
|
2017-07-25 04:32:48 -05:00
|
|
|
if right & base_mask == base_mask {
|
|
|
|
// Case 1: The base address bits are all preserved, i.e., right is all-1 there
|
2018-08-14 13:25:56 -05:00
|
|
|
let offset = (left.offset.bytes() as u128 & right) as u64;
|
|
|
|
(Scalar::Ptr(Pointer::new(left.alloc_id, Size::from_bytes(offset))), false)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else if right & base_mask == 0 {
|
|
|
|
// Case 2: The base address bits are all taken away, i.e., right is all-0 there
|
2018-08-14 13:25:56 -05:00
|
|
|
(Scalar::Bits { bits: (left.offset.bytes() as u128) & right, size: ptr_size }, false)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else {
|
2017-08-02 09:59:01 -05:00
|
|
|
return err!(ReadPointerAsBytes);
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-14 13:00:18 -05:00
|
|
|
Rem if !signed => {
|
2018-08-14 05:16:29 -05:00
|
|
|
// Doing modulo a divisor of the alignment is allowed.
|
|
|
|
// (Intuition: Modulo a divisor leaks less information.)
|
2018-08-14 13:00:18 -05:00
|
|
|
let ptr_base_align = self.memory.get(left.alloc_id)?.align.abi();
|
|
|
|
let right = right as u64;
|
|
|
|
let ptr_size = self.memory.pointer_size().bytes() as u8;
|
|
|
|
if right == 1 {
|
|
|
|
// modulo 1 is always 0
|
|
|
|
(Scalar::Bits { bits: 0, size: ptr_size }, false)
|
2018-08-14 05:16:29 -05:00
|
|
|
} else if ptr_base_align % right == 0 {
|
2018-08-14 13:00:18 -05:00
|
|
|
// the base address would be cancelled out by the modulo operation, so we can
|
|
|
|
// just take the modulo of the offset
|
|
|
|
(Scalar::Bits { bits: (left.offset.bytes() % right) as u128, size: ptr_size }, false)
|
2017-07-25 04:32:48 -05:00
|
|
|
} else {
|
2017-08-02 09:59:01 -05:00
|
|
|
return err!(ReadPointerAsBytes);
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
_ => {
|
|
|
|
let msg = format!("unimplemented binary op on pointer {:?}: {:?}, {:?} ({})", bin_op, left, right, if signed { "signed" } else { "unsigned" });
|
2017-08-02 09:59:01 -05:00
|
|
|
return err!(Unimplemented(msg));
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2018-08-15 14:01:40 -05:00
|
|
|
|
|
|
|
/// This function raises an error if the offset moves the pointer outside of its allocation. We consider
|
|
|
|
/// ZSTs their own huge allocation that doesn't overlap with anything (and nothing moves in there because the size is 0).
|
|
|
|
/// We also consider the NULL pointer its own separate allocation, and all the remaining integers pointers their own
|
|
|
|
/// allocation.
|
|
|
|
fn pointer_offset_inbounds(
|
|
|
|
&self,
|
|
|
|
ptr: Scalar,
|
|
|
|
pointee_ty: Ty<'tcx>,
|
|
|
|
offset: i64,
|
|
|
|
) -> EvalResult<'tcx, Scalar> {
|
|
|
|
if ptr.is_null() {
|
|
|
|
// NULL pointers must only be offset by 0
|
|
|
|
return if offset == 0 {
|
|
|
|
Ok(ptr)
|
|
|
|
} else {
|
|
|
|
err!(InvalidNullPointerUsage)
|
|
|
|
};
|
|
|
|
}
|
|
|
|
// FIXME: assuming here that type size is < i64::max_value()
|
|
|
|
let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
|
|
|
|
let offset = offset.checked_mul(pointee_size).ok_or_else(|| EvalErrorKind::Overflow(mir::BinOp::Mul))?;
|
|
|
|
// Now let's see what kind of pointer this is
|
|
|
|
if let Scalar::Ptr(ptr) = ptr {
|
|
|
|
// Both old and new pointer must be in-bounds.
|
|
|
|
// (Of the same allocation, but that part is trivial with our representation.)
|
|
|
|
self.memory.check_bounds(ptr, false)?;
|
|
|
|
let ptr = ptr.signed_offset(offset, self)?;
|
|
|
|
self.memory.check_bounds(ptr, false)?;
|
|
|
|
Ok(Scalar::Ptr(ptr))
|
|
|
|
} else {
|
|
|
|
// An integer pointer. They can move around freely, as long as they do not overflow
|
|
|
|
// (which ptr_signed_offset checks).
|
|
|
|
ptr.ptr_signed_offset(offset, self)
|
|
|
|
}
|
|
|
|
}
|
2017-07-25 04:32:48 -05:00
|
|
|
}
|