rust/src/librustc_mir/transform/const_prop.rs

772 lines
31 KiB
Rust
Raw Normal View History

2018-01-28 07:41:17 -06:00
//! Propagates constants for early reporting of statically known
//! assertion failures
use rustc::hir::def::DefKind;
use rustc::mir::{
AggregateKind, Constant, Location, Place, PlaceBase, Mir, Operand, Rvalue, Local,
NullOp, UnOp, StatementKind, Statement, LocalKind, Static, StaticKind,
TerminatorKind, Terminator, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem,
SourceScope, SourceScopeLocalData, LocalDecl, Promoted,
};
use rustc::mir::visit::{
Visitor, PlaceContext, MutatingUseContext, MutVisitor, NonMutatingUseContext,
};
2019-04-01 11:02:18 -05:00
use rustc::mir::interpret::{InterpError, Scalar, GlobalId, EvalResult};
use rustc::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
use syntax_pos::{Span, DUMMY_SP};
use rustc::ty::subst::InternalSubsts;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc::ty::layout::{
LayoutOf, TyLayout, LayoutError,
HasTyCtxt, TargetDataLayout, HasDataLayout,
};
2018-01-28 07:41:17 -06:00
use crate::interpret::{self, InterpretCx, ScalarMaybeUndef, Immediate, OpTy, ImmTy, MemoryKind};
2019-02-07 15:28:15 -06:00
use crate::const_eval::{
2019-01-14 10:54:00 -06:00
CompileTimeInterpreter, error_to_const_error, eval_promoted, mk_eval_cx,
2018-11-06 09:16:27 -06:00
};
2019-02-07 15:28:15 -06:00
use crate::transform::{MirPass, MirSource};
2018-01-28 07:41:17 -06:00
pub struct ConstProp;
impl MirPass for ConstProp {
fn run_pass<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
2019-02-03 04:51:07 -06:00
source: MirSource<'tcx>,
2018-01-28 07:41:17 -06:00
mir: &mut Mir<'tcx>) {
// will be evaluated by miri and produce its errors there
if source.promoted.is_some() {
return;
}
use rustc::hir::map::blocks::FnLikeNode;
let hir_id = tcx.hir().as_local_hir_id(source.def_id())
.expect("Non-local call to local provider is_const_fn");
let is_fn_like = FnLikeNode::from_node(tcx.hir().get_by_hir_id(hir_id)).is_some();
let is_assoc_const = match tcx.def_kind(source.def_id()) {
Some(DefKind::AssociatedConst) => true,
_ => false,
};
// Only run const prop on functions, methods, closures and associated constants
if !is_fn_like && !is_assoc_const {
// skip anon_const/statics/consts because they'll be evaluated by miri anyway
2019-02-03 04:51:07 -06:00
trace!("ConstProp skipped for {:?}", source.def_id());
return
}
2019-02-03 04:51:07 -06:00
trace!("ConstProp starting for {:?}", source.def_id());
2018-01-28 07:41:17 -06:00
2018-01-29 08:12:45 -06:00
// FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
// constants, instead of just checking for const-folding succeeding.
// That would require an uniform one-def no-mutation analysis
// and RPO (or recursing when needing the value of a local).
let mut optimization_finder = ConstPropagator::new(mir, tcx, source);
optimization_finder.visit_mir(mir);
2018-01-28 07:41:17 -06:00
// put back the data we stole from `mir`
std::mem::replace(
&mut mir.source_scope_local_data,
optimization_finder.source_scope_local_data
);
std::mem::replace(
&mut mir.promoted,
optimization_finder.promoted
);
2019-02-03 04:51:07 -06:00
trace!("ConstProp done for {:?}", source.def_id());
2018-01-28 07:41:17 -06:00
}
}
type Const<'tcx> = OpTy<'tcx>;
2018-01-28 07:41:17 -06:00
/// Finds optimization opportunities on the MIR.
struct ConstPropagator<'a, 'mir, 'tcx:'a+'mir> {
2019-03-25 23:06:15 -05:00
ecx: InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
2018-01-28 07:41:17 -06:00
tcx: TyCtxt<'a, 'tcx, 'tcx>,
2019-02-03 04:51:07 -06:00
source: MirSource<'tcx>,
2018-01-29 08:12:45 -06:00
places: IndexVec<Local, Option<Const<'tcx>>>,
can_const_prop: IndexVec<Local, bool>,
param_env: ParamEnv<'tcx>,
source_scope_local_data: ClearCrossCrate<IndexVec<SourceScope, SourceScopeLocalData>>,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
promoted: IndexVec<Promoted, Mir<'tcx>>,
2018-01-28 07:41:17 -06:00
}
impl<'a, 'b, 'tcx> LayoutOf for ConstPropagator<'a, 'b, 'tcx> {
2019-04-26 07:26:49 -05:00
type Ty = Ty<'tcx>;
type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
2019-04-26 07:26:49 -05:00
fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
self.tcx.layout_of(self.param_env.and(ty))
}
}
impl<'a, 'b, 'tcx> HasDataLayout for ConstPropagator<'a, 'b, 'tcx> {
#[inline]
fn data_layout(&self) -> &TargetDataLayout {
&self.tcx.data_layout
}
}
impl<'a, 'b, 'tcx> HasTyCtxt<'tcx> for ConstPropagator<'a, 'b, 'tcx> {
#[inline]
fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
self.tcx
}
}
impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> {
2018-01-28 07:41:17 -06:00
fn new(
mir: &mut Mir<'tcx>,
2018-01-28 07:41:17 -06:00
tcx: TyCtxt<'a, 'tcx, 'tcx>,
2019-02-03 04:51:07 -06:00
source: MirSource<'tcx>,
) -> ConstPropagator<'a, 'mir, 'tcx> {
2019-02-03 04:51:07 -06:00
let param_env = tcx.param_env(source.def_id());
let ecx = mk_eval_cx(tcx, tcx.def_span(source.def_id()), param_env);
let can_const_prop = CanConstProp::check(mir);
let source_scope_local_data = std::mem::replace(
&mut mir.source_scope_local_data,
ClearCrossCrate::Clear
);
let promoted = std::mem::replace(
&mut mir.promoted,
IndexVec::new()
);
ConstPropagator {
ecx,
2018-01-28 07:41:17 -06:00
tcx,
source,
param_env,
can_const_prop,
2018-01-29 08:12:45 -06:00
places: IndexVec::from_elem(None, &mir.local_decls),
source_scope_local_data,
//FIXME(wesleywiser) we can't steal this because `Visitor::super_visit_mir()` needs it
local_decls: mir.local_decls.clone(),
promoted,
2018-01-28 07:41:17 -06:00
}
}
fn use_ecx<F, T>(
&mut self,
2018-06-02 16:38:57 -05:00
source_info: SourceInfo,
f: F
) -> Option<T>
where
F: FnOnce(&mut Self) -> EvalResult<'tcx, T>,
{
2018-06-02 16:38:57 -05:00
self.ecx.tcx.span = source_info.span;
let lint_root = match self.source_scope_local_data {
2018-06-02 16:38:57 -05:00
ClearCrossCrate::Set(ref ivs) => {
//FIXME(#51314): remove this check
if source_info.scope.index() >= ivs.len() {
return None;
}
ivs[source_info.scope].lint_root
},
ClearCrossCrate::Clear => return None,
};
let r = match f(self) {
Ok(val) => Some(val),
Err(error) => {
let diagnostic = error_to_const_error(&self.ecx, error);
2019-04-01 11:02:18 -05:00
use rustc::mir::interpret::InterpError::*;
match diagnostic.error {
2018-07-18 07:23:07 -05:00
// don't report these, they make no sense in a const prop context
| MachineError(_)
| Exit(_)
// at runtime these transformations might make sense
// FIXME: figure out the rules and start linting
| FunctionAbiMismatch(..)
| FunctionArgMismatch(..)
| FunctionRetMismatch(..)
| FunctionArgCountMismatch
// fine at runtime, might be a register address or sth
| ReadBytesAsPointer
// fine at runtime
| ReadForeignStatic
| Unimplemented(_)
// don't report const evaluator limits
| StackFrameLimitReached
| NoMirFor(..)
| InlineAsm
=> {},
| InvalidMemoryAccess
| DanglingPointerDeref
| DoubleFree
| InvalidFunctionPointer
| InvalidBool
2018-08-26 07:22:59 -05:00
| InvalidDiscriminant(..)
| PointerOutOfBounds { .. }
| InvalidNullPointerUsage
| ValidationFailure(..)
| InvalidPointerMath
| ReadUndefBytes(_)
| DeadLocal
| InvalidBoolOp(_)
| DerefFunctionPointer
| ExecuteMemory
| Intrinsic(..)
| InvalidChar(..)
| AbiViolation(_)
| AlignmentCheckFailed{..}
| CalledClosureAsFunction
| VtableForArgumentlessMethod
| ModifiedConstantMemory
| ModifiedStatic
| AssumptionNotHeld
// FIXME: should probably be removed and turned into a bug! call
| TypeNotPrimitive(_)
| ReallocatedWrongMemoryKind(_, _)
| DeallocatedWrongMemoryKind(_, _)
| ReallocateNonBasePtr
| DeallocateNonBasePtr
| IncorrectAllocationInformation(..)
| UnterminatedCString(_)
| HeapAllocZeroBytes
| HeapAllocNonPowerOfTwoAlignment(_)
| Unreachable
| ReadFromReturnPointer
| GeneratorResumedAfterReturn
| GeneratorResumedAfterPanic
| ReferencedConstant
| InfiniteLoop
=> {
// FIXME: report UB here
},
| OutOfTls
| TlsOutOfBounds
| PathNotFound(_)
=> bug!("these should not be in rustc, but in miri's machine errors"),
| Layout(_)
| UnimplementedTraitSelection
| TypeckError
| TooGeneric
// these are just noise
=> {},
// non deterministic
| ReadPointerAsBytes
// FIXME: implement
=> {},
| Panic { .. }
| BoundsCheck{..}
| Overflow(_)
| OverflowNeg
| DivisionByZero
| RemainderByZero
=> {
diagnostic.report_as_lint(
2018-07-18 07:23:07 -05:00
self.ecx.tcx,
"this expression will panic at runtime",
lint_root,
None,
2018-07-18 07:23:07 -05:00
);
}
}
None
},
};
self.ecx.tcx.span = DUMMY_SP;
r
}
2018-06-02 16:38:57 -05:00
fn eval_constant(
&mut self,
c: &Constant<'tcx>,
) -> Option<Const<'tcx>> {
self.ecx.tcx.span = c.span;
2019-03-14 04:19:31 -05:00
match self.ecx.eval_const_to_op(*c.literal, None) {
Ok(op) => {
Some(op)
2018-06-04 11:32:06 -05:00
},
Err(error) => {
let err = error_to_const_error(&self.ecx, error);
err.report_as_error(self.ecx.tcx, "erroneous constant used");
None
},
2018-01-28 07:41:17 -06:00
}
}
fn eval_place(&mut self, place: &Place<'tcx>, source_info: SourceInfo) -> Option<Const<'tcx>> {
match *place {
Place::Base(PlaceBase::Local(loc)) => self.places[loc].clone(),
Place::Projection(ref proj) => match proj.elem {
ProjectionElem::Field(field, _) => {
trace!("field proj on {:?}", proj.base);
let base = self.eval_place(&proj.base, source_info)?;
let res = self.use_ecx(source_info, |this| {
this.ecx.operand_field(base, field.index() as u64)
2018-06-04 07:50:29 -05:00
})?;
Some(res)
},
// We could get more projections by using e.g., `operand_projection`,
// but we do not even have the stack frame set up properly so
// an `Index` projection would throw us off-track.
_ => None,
},
Place::Base(
PlaceBase::Static(box Static {kind: StaticKind::Promoted(promoted), ..})
) => {
2019-02-03 04:51:07 -06:00
let generics = self.tcx.generics_of(self.source.def_id());
if generics.requires_monomorphization(self.tcx) {
// FIXME: can't handle code with generics
return None;
}
let substs = InternalSubsts::identity_for_item(self.tcx, self.source.def_id());
2019-02-03 04:51:07 -06:00
let instance = Instance::new(self.source.def_id(), substs);
let cid = GlobalId {
instance,
2019-03-17 00:42:39 -05:00
promoted: Some(promoted),
};
// cannot use `const_eval` here, because that would require having the MIR
// for the current function available, but we're producing said MIR right now
let res = self.use_ecx(source_info, |this| {
let mir = &this.promoted[promoted];
eval_promoted(this.tcx, cid, mir, this.param_env)
})?;
trace!("evaluated promoted {:?} to {:?}", promoted, res);
Some(res.into())
},
_ => None,
}
}
2018-06-02 16:38:57 -05:00
fn eval_operand(&mut self, op: &Operand<'tcx>, source_info: SourceInfo) -> Option<Const<'tcx>> {
2018-01-28 07:41:17 -06:00
match *op {
Operand::Constant(ref c) => self.eval_constant(c),
| Operand::Move(ref place)
| Operand::Copy(ref place) => self.eval_place(place, source_info),
2018-01-28 07:41:17 -06:00
}
}
fn const_prop(
&mut self,
rvalue: &Rvalue<'tcx>,
2018-06-04 11:32:06 -05:00
place_layout: TyLayout<'tcx>,
2018-01-29 08:12:45 -06:00
source_info: SourceInfo,
2018-01-28 07:41:17 -06:00
) -> Option<Const<'tcx>> {
2018-01-29 08:12:45 -06:00
let span = source_info.span;
2018-01-28 07:41:17 -06:00
match *rvalue {
Rvalue::Use(ref op) => {
2018-06-02 16:38:57 -05:00
self.eval_operand(op, source_info)
2018-01-28 07:41:17 -06:00
},
Rvalue::Repeat(..) |
Rvalue::Ref(..) |
Rvalue::Aggregate(..) |
Rvalue::NullaryOp(NullOp::Box, _) |
Rvalue::Discriminant(..) => None,
2018-07-18 07:23:07 -05:00
Rvalue::Cast(kind, ref operand, _) => {
let op = self.eval_operand(operand, source_info)?;
2018-07-18 07:23:07 -05:00
self.use_ecx(source_info, |this| {
2018-12-19 07:11:01 -06:00
let dest = this.ecx.allocate(place_layout, MemoryKind::Stack);
this.ecx.cast(op, kind, dest.into())?;
Ok(dest.into())
2018-07-18 07:23:07 -05:00
})
}
2018-01-28 07:41:17 -06:00
// FIXME(oli-obk): evaluate static/constant slice lengths
Rvalue::Len(_) => None,
Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some(
ImmTy {
imm: Immediate::Scalar(
Scalar::Bits {
bits: n as u128,
size: self.tcx.data_layout.pointer_size.bytes() as u8,
}.into()
),
layout: self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
}.into()
))
2018-01-28 07:41:17 -06:00
}
Rvalue::UnaryOp(op, ref arg) => {
2019-02-03 04:51:07 -06:00
let def_id = if self.tcx.is_closure(self.source.def_id()) {
self.tcx.closure_base_def_id(self.source.def_id())
2018-01-28 07:41:17 -06:00
} else {
2019-02-03 04:51:07 -06:00
self.source.def_id()
2018-01-28 07:41:17 -06:00
};
let generics = self.tcx.generics_of(def_id);
if generics.requires_monomorphization(self.tcx) {
2018-01-28 07:41:17 -06:00
// FIXME: can't handle code with generics
return None;
}
let arg = self.eval_operand(arg, source_info)?;
let val = self.use_ecx(source_info, |this| {
let prim = this.ecx.read_immediate(arg)?;
match op {
UnOp::Neg => {
// Need to do overflow check here: For actual CTFE, MIR
// generation emits code that does this before calling the op.
if prim.to_bits()? == (1 << (prim.layout.size.bits() - 1)) {
return err!(OverflowNeg);
}
}
UnOp::Not => {
// Cannot overflow
}
}
// Now run the actual operation.
this.ecx.unary_op(op, prim)
})?;
let res = ImmTy {
imm: Immediate::Scalar(val.into()),
layout: place_layout,
};
Some(res.into())
2018-01-28 07:41:17 -06:00
}
Rvalue::CheckedBinaryOp(op, ref left, ref right) |
Rvalue::BinaryOp(op, ref left, ref right) => {
trace!("rvalue binop {:?} for {:?} and {:?}", op, left, right);
2018-06-02 16:38:57 -05:00
let right = self.eval_operand(right, source_info)?;
2019-02-03 04:51:07 -06:00
let def_id = if self.tcx.is_closure(self.source.def_id()) {
self.tcx.closure_base_def_id(self.source.def_id())
2018-01-28 07:41:17 -06:00
} else {
2019-02-03 04:51:07 -06:00
self.source.def_id()
2018-01-28 07:41:17 -06:00
};
let generics = self.tcx.generics_of(def_id);
if generics.requires_monomorphization(self.tcx) {
2018-01-28 07:41:17 -06:00
// FIXME: can't handle code with generics
return None;
}
2018-06-02 16:38:57 -05:00
let r = self.use_ecx(source_info, |this| {
this.ecx.read_immediate(right)
})?;
if op == BinOp::Shr || op == BinOp::Shl {
let left_ty = left.ty(&self.local_decls, self.tcx);
2018-05-23 10:45:50 -05:00
let left_bits = self
.tcx
.layout_of(self.param_env.and(left_ty))
.unwrap()
.size
.bits();
let right_size = right.layout.size;
let r_bits = r.to_scalar().and_then(|r| r.to_bits(right_size));
if r_bits.ok().map_or(false, |b| b >= left_bits as u128) {
let source_scope_local_data = match self.source_scope_local_data {
ClearCrossCrate::Set(ref data) => data,
ClearCrossCrate::Clear => return None,
};
let dir = if op == BinOp::Shr {
"right"
} else {
"left"
};
2019-02-22 08:48:14 -06:00
let hir_id = source_scope_local_data[source_info.scope].lint_root;
self.tcx.lint_hir(
::rustc::lint::builtin::EXCEEDING_BITSHIFTS,
2019-02-22 08:48:14 -06:00
hir_id,
span,
&format!("attempt to shift {} with overflow", dir));
2018-01-29 13:47:09 -06:00
return None;
}
}
2018-06-02 16:38:57 -05:00
let left = self.eval_operand(left, source_info)?;
let l = self.use_ecx(source_info, |this| {
this.ecx.read_immediate(left)
})?;
2018-01-28 07:41:17 -06:00
trace!("const evaluating {:?} for {:?} and {:?}", op, left, right);
2018-06-02 16:38:57 -05:00
let (val, overflow) = self.use_ecx(source_info, |this| {
this.ecx.binary_op(op, l, r)
})?;
let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue {
2018-10-26 05:33:26 -05:00
Immediate::ScalarPair(
val.into(),
Scalar::from_bool(overflow).into(),
)
} else {
if overflow {
2019-04-01 11:02:18 -05:00
let err = InterpError::Overflow(op).into();
2018-06-02 16:38:57 -05:00
let _: Option<()> = self.use_ecx(source_info, |_| Err(err));
return None;
}
2018-10-26 05:33:26 -05:00
Immediate::Scalar(val.into())
};
let res = ImmTy {
imm: val,
layout: place_layout,
};
Some(res.into())
2018-01-28 07:41:17 -06:00
},
}
}
fn operand_from_scalar(&self, scalar: Scalar, ty: Ty<'tcx>, span: Span) -> Operand<'tcx> {
Operand::Constant(Box::new(
Constant {
span,
ty,
user_ty: None,
literal: self.tcx.mk_const(ty::Const::from_scalar(
scalar,
ty,
))
}
))
}
fn replace_with_const(&self, rval: &mut Rvalue<'tcx>, value: Const<'tcx>, span: Span) {
self.ecx.validate_operand(
value,
vec![],
None,
true,
).expect("value should already be a valid const");
if let interpret::Operand::Immediate(im) = *value {
match im {
interpret::Immediate::Scalar(ScalarMaybeUndef::Scalar(scalar)) => {
*rval = Rvalue::Use(self.operand_from_scalar(scalar, value.layout.ty, span));
},
Immediate::ScalarPair(
ScalarMaybeUndef::Scalar(one),
ScalarMaybeUndef::Scalar(two)
) => {
let ty = &value.layout.ty.sty;
if let ty::Tuple(substs) = ty {
*rval = Rvalue::Aggregate(
Box::new(AggregateKind::Tuple),
vec![
self.operand_from_scalar(one, substs[0].expect_ty(), span),
self.operand_from_scalar(two, substs[1].expect_ty(), span),
],
);
}
},
_ => { }
}
}
}
fn should_const_prop(&self) -> bool {
self.tcx.sess.opts.debugging_opts.mir_opt_level >= 2
}
2018-01-28 07:41:17 -06:00
}
fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
2019-04-26 07:26:49 -05:00
ty: Ty<'tcx>) -> Option<u64> {
2018-01-31 08:45:59 -06:00
tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
2018-01-28 07:41:17 -06:00
}
struct CanConstProp {
can_const_prop: IndexVec<Local, bool>,
2018-01-28 07:41:17 -06:00
// false at the beginning, once set, there are not allowed to be any more assignments
found_assignment: IndexVec<Local, bool>,
2018-01-28 07:41:17 -06:00
}
impl CanConstProp {
2018-01-28 07:41:17 -06:00
/// returns true if `local` can be propagated
2019-02-07 15:28:15 -06:00
fn check(mir: &Mir<'_>) -> IndexVec<Local, bool> {
2018-01-28 07:41:17 -06:00
let mut cpv = CanConstProp {
can_const_prop: IndexVec::from_elem(true, &mir.local_decls),
found_assignment: IndexVec::from_elem(false, &mir.local_decls),
2018-01-28 07:41:17 -06:00
};
for (local, val) in cpv.can_const_prop.iter_enumerated_mut() {
// cannot use args at all
// cannot use locals because if x < y { y - x } else { x - y } would
// lint for x != y
// FIXME(oli-obk): lint variables until they are used in a condition
// FIXME(oli-obk): lint if return value is constant
*val = mir.local_kind(local) == LocalKind::Temp;
}
2018-01-28 07:41:17 -06:00
cpv.visit_mir(mir);
cpv.can_const_prop
}
}
2018-01-28 07:41:17 -06:00
impl<'tcx> Visitor<'tcx> for CanConstProp {
fn visit_local(
2018-01-28 07:41:17 -06:00
&mut self,
&local: &Local,
context: PlaceContext,
_: Location,
2018-01-28 07:41:17 -06:00
) {
use rustc::mir::visit::PlaceContext::*;
match context {
// Constants must have at most one write
// FIXME(oli-obk): we could be more powerful here, if the multiple writes
// only occur in independent execution paths
MutatingUse(MutatingUseContext::Store) => if self.found_assignment[local] {
self.can_const_prop[local] = false;
} else {
self.found_assignment[local] = true
2018-01-28 07:41:17 -06:00
},
// Reading constants is allowed an arbitrary number of times
NonMutatingUse(NonMutatingUseContext::Copy) |
NonMutatingUse(NonMutatingUseContext::Move) |
NonMutatingUse(NonMutatingUseContext::Inspect) |
NonMutatingUse(NonMutatingUseContext::Projection) |
MutatingUse(MutatingUseContext::Projection) |
NonUse(_) => {},
_ => self.can_const_prop[local] = false,
2018-01-28 07:41:17 -06:00
}
}
}
impl<'b, 'a, 'tcx> MutVisitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
2018-01-28 07:41:17 -06:00
fn visit_constant(
&mut self,
constant: &mut Constant<'tcx>,
2018-01-28 07:41:17 -06:00
location: Location,
) {
trace!("visit_constant: {:?}", constant);
self.super_constant(constant, location);
self.eval_constant(constant);
2018-01-28 07:41:17 -06:00
}
fn visit_statement(
&mut self,
statement: &mut Statement<'tcx>,
2018-01-28 07:41:17 -06:00
location: Location,
) {
trace!("visit_statement: {:?}", statement);
if let StatementKind::Assign(ref place, ref mut rval) = statement.kind {
2019-04-26 07:26:49 -05:00
let place_ty: Ty<'tcx> = place
.ty(&self.local_decls, self.tcx)
.ty;
2018-06-04 11:32:06 -05:00
if let Ok(place_layout) = self.tcx.layout_of(self.param_env.and(place_ty)) {
if let Some(value) = self.const_prop(rval, place_layout, statement.source_info) {
if let Place::Base(PlaceBase::Local(local)) = *place {
2018-06-04 11:32:06 -05:00
trace!("checking whether {:?} can be stored to {:?}", value, local);
if self.can_const_prop[local] {
trace!("storing {:?} to {:?}", value, local);
assert!(self.places[local].is_none());
self.places[local] = Some(value);
if self.should_const_prop() {
self.replace_with_const(rval, value, statement.source_info.span);
}
2018-06-04 11:32:06 -05:00
}
2018-01-28 07:41:17 -06:00
}
}
}
}
self.super_statement(statement, location);
2018-01-28 07:41:17 -06:00
}
fn visit_terminator(
2018-01-28 07:41:17 -06:00
&mut self,
terminator: &mut Terminator<'tcx>,
location: Location,
2018-01-28 07:41:17 -06:00
) {
self.super_terminator(terminator, location);
let source_info = terminator.source_info;
match &mut terminator.kind {
TerminatorKind::Assert { expected, msg, ref mut cond, .. } => {
if let Some(value) = self.eval_operand(&cond, source_info) {
trace!("assertion on {:?} should be {:?}", value, expected);
let expected = ScalarMaybeUndef::from(Scalar::from_bool(*expected));
let value_const = self.ecx.read_scalar(value).unwrap();
if expected != value_const {
// poison all places this operand references so that further code
// doesn't use the invalid value
match cond {
Operand::Move(ref place) | Operand::Copy(ref place) => {
let mut place = place;
while let Place::Projection(ref proj) = *place {
place = &proj.base;
}
if let Place::Base(PlaceBase::Local(local)) = *place {
self.places[local] = None;
}
},
Operand::Constant(_) => {}
}
let span = terminator.source_info.span;
let hir_id = self
.tcx
.hir()
.as_local_hir_id(self.source.def_id())
.expect("some part of a failing const eval must be local");
use rustc::mir::interpret::InterpError::*;
let msg = match msg {
Overflow(_) |
OverflowNeg |
DivisionByZero |
RemainderByZero => msg.description().to_owned(),
BoundsCheck { ref len, ref index } => {
let len = self
.eval_operand(len, source_info)
.expect("len must be const");
let len = match self.ecx.read_scalar(len) {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
other => bug!("const len not primitive: {:?}", other),
};
let index = self
.eval_operand(index, source_info)
.expect("index must be const");
let index = match self.ecx.read_scalar(index) {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
other => bug!("const index not primitive: {:?}", other),
};
format!(
"index out of bounds: \
the len is {} but the index is {}",
len,
index,
)
},
// Need proper const propagator for these
_ => return,
};
self.tcx.lint_hir(
::rustc::lint::builtin::CONST_ERR,
hir_id,
span,
&msg,
);
} else {
if self.should_const_prop() {
if let ScalarMaybeUndef::Scalar(scalar) = value_const {
*cond = self.operand_from_scalar(
scalar,
self.tcx.types.bool,
source_info.span,
);
}
}
}
2018-01-28 07:41:17 -06:00
}
},
TerminatorKind::SwitchInt { ref mut discr, switch_ty, .. } => {
if self.should_const_prop() {
if let Some(value) = self.eval_operand(&discr, source_info) {
if let ScalarMaybeUndef::Scalar(scalar) =
self.ecx.read_scalar(value).unwrap() {
*discr = self.operand_from_scalar(scalar, switch_ty, source_info.span);
}
}
}
},
//none of these have Operands to const-propagate
TerminatorKind::Goto { .. } |
TerminatorKind::Resume |
TerminatorKind::Abort |
TerminatorKind::Return |
TerminatorKind::Unreachable |
TerminatorKind::Drop { .. } |
TerminatorKind::DropAndReplace { .. } |
TerminatorKind::Yield { .. } |
TerminatorKind::GeneratorDrop |
TerminatorKind::FalseEdges { .. } |
TerminatorKind::FalseUnwind { .. } => { }
//FIXME(wesleywiser) Call does have Operands that could be const-propagated
TerminatorKind::Call { .. } => { }
2018-01-28 07:41:17 -06:00
}
}
}