rust/src/operator.rs

93 lines
4.0 KiB
Rust
Raw Normal View History

use log::trace;
2020-04-02 17:05:35 -05:00
use rustc_middle::{mir, ty::Ty};
2022-07-06 20:28:47 -05:00
use rustc_target::abi::Size;
2018-11-01 02:56:41 -05:00
use crate::*;
pub trait EvalContextExt<'tcx> {
fn binary_ptr_op(
&self,
bin_op: mir::BinOp,
2022-07-20 15:02:06 -05:00
left: &ImmTy<'tcx, Provenance>,
right: &ImmTy<'tcx, Provenance>,
) -> InterpResult<'tcx, (Scalar<Provenance>, bool, Ty<'tcx>)>;
}
2019-06-13 01:52:04 -05:00
impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
fn binary_ptr_op(
&self,
bin_op: mir::BinOp,
2022-07-20 15:02:06 -05:00
left: &ImmTy<'tcx, Provenance>,
right: &ImmTy<'tcx, Provenance>,
) -> InterpResult<'tcx, (Scalar<Provenance>, bool, Ty<'tcx>)> {
use rustc_middle::mir::BinOp::*;
2019-02-08 09:27:00 -06:00
trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
Ok(match bin_op {
2022-07-06 20:28:47 -05:00
Eq | Ne | Lt | Le | Gt | Ge => {
assert_eq!(left.layout.abi, right.layout.abi); // types an differ, e.g. fn ptrs with different `for`
2022-07-05 20:11:48 -05:00
let size = self.pointer_size();
// Just compare the bits. ScalarPairs are compared lexicographically.
// We thus always compare pairs and simply fill scalars up with 0.
let left = match **left {
Immediate::Scalar(l) => (l.check_init()?.to_bits(size)?, 0),
Immediate::ScalarPair(l1, l2) =>
(l1.check_init()?.to_bits(size)?, l2.check_init()?.to_bits(size)?),
2022-07-06 20:45:47 -05:00
Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
2022-07-05 20:11:48 -05:00
};
let right = match **right {
Immediate::Scalar(r) => (r.check_init()?.to_bits(size)?, 0),
Immediate::ScalarPair(r1, r2) =>
(r1.check_init()?.to_bits(size)?, r2.check_init()?.to_bits(size)?),
2022-07-06 20:45:47 -05:00
Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
2022-07-05 20:11:48 -05:00
};
let res = match bin_op {
2022-07-06 20:28:47 -05:00
Eq => left == right,
Ne => left != right,
Lt => left < right,
Le => left <= right,
Gt => left > right,
Ge => left >= right,
2022-07-06 20:28:47 -05:00
_ => bug!(),
};
2019-08-10 14:19:25 -05:00
(Scalar::from_bool(res), false, self.tcx.types.bool)
}
Offset => {
2022-07-06 20:28:47 -05:00
assert!(left.layout.ty.is_unsafe_ptr());
2022-07-24 16:51:29 -05:00
let ptr = left.to_scalar()?.to_pointer(self)?;
2022-07-06 20:28:47 -05:00
let offset = right.to_scalar()?.to_machine_isize(self)?;
2019-12-23 05:56:23 -06:00
let pointee_ty =
left.layout.ty.builtin_deref(true).expect("Offset called on non-ptr type").ty;
2022-07-06 20:28:47 -05:00
let ptr = self.ptr_offset_inbounds(ptr, pointee_ty, offset)?;
2021-07-15 13:33:08 -05:00
(Scalar::from_maybe_pointer(ptr, self), false, left.layout.ty)
}
2022-07-06 20:28:47 -05:00
// Some more operations are possible with atomics.
// The return value always has the provenance of the *left* operand.
Add | Sub | BitOr | BitAnd | BitXor => {
assert!(left.layout.ty.is_unsafe_ptr());
assert!(right.layout.ty.is_unsafe_ptr());
2022-07-24 16:51:29 -05:00
let ptr = left.to_scalar()?.to_pointer(self)?;
2022-07-06 20:28:47 -05:00
// We do the actual operation with usize-typed scalars.
let left = ImmTy::from_uint(ptr.addr().bytes(), self.machine.layouts.usize);
let right = ImmTy::from_uint(
right.to_scalar()?.to_machine_usize(self)?,
self.machine.layouts.usize,
);
let (result, overflowing, _ty) =
self.overflowing_binary_op(bin_op, &left, &right)?;
// Construct a new pointer with the provenance of `ptr` (the LHS).
let result_ptr =
Pointer::new(ptr.provenance, Size::from_bytes(result.to_machine_usize(self)?));
(Scalar::from_maybe_pointer(result_ptr, self), overflowing, left.layout.ty)
}
2022-07-06 20:28:47 -05:00
_ => span_bug!(self.cur_span(), "Invalid operator on pointers: {:?}", bin_op),
})
}
}