cleanup op_to_const a bit; rename ConstValue::ByRef → Indirect
This commit is contained in:
parent
551f481ffb
commit
0f8908da27
@ -116,7 +116,7 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
}
|
||||
|
||||
match const_val {
|
||||
ConstValue::ZeroSized => unreachable!(), // we already handles ZST above
|
||||
ConstValue::ZeroSized => unreachable!(), // we already handled ZST above
|
||||
ConstValue::Scalar(x) => match x {
|
||||
Scalar::Int(int) => {
|
||||
if fx.clif_type(layout.ty).is_some() {
|
||||
@ -200,7 +200,7 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
CValue::by_val(val, layout)
|
||||
}
|
||||
},
|
||||
ConstValue::ByRef { alloc_id, offset } => {
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
// FIXME: avoid creating multiple allocations for the same AllocId?
|
||||
CValue::by_ref(
|
||||
|
@ -172,7 +172,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
||||
.expect("simd_shuffle idx not const");
|
||||
|
||||
let idx_bytes = match idx_const {
|
||||
ConstValue::ByRef { alloc_id, offset } => {
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
let size = Size::from_bytes(
|
||||
4 * ret_lane_count, /* size_of([u32; ret_lane_count]) */
|
||||
|
@ -116,7 +116,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
||||
let b_llval = bx.const_usize((end - start) as u64);
|
||||
OperandValue::Pair(a_llval, b_llval)
|
||||
}
|
||||
ConstValue::ByRef { alloc_id, offset } => {
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
let alloc = bx.tcx().global_alloc(alloc_id).unwrap_memory();
|
||||
// FIXME: should we attempt to avoid building the same AllocId multiple times?
|
||||
return Self::from_const_alloc(bx, layout, alloc, offset);
|
||||
|
@ -112,13 +112,13 @@ pub(super) fn op_to_const<'tcx>(
|
||||
ecx: &CompileTimeEvalContext<'_, 'tcx>,
|
||||
op: &OpTy<'tcx>,
|
||||
) -> ConstValue<'tcx> {
|
||||
// Handle ZST consistently and early.
|
||||
if op.layout.is_zst() {
|
||||
return ConstValue::ZeroSized;
|
||||
}
|
||||
|
||||
// We do not have value optimizations for everything.
|
||||
// Only scalars and slices, since they are very common.
|
||||
// Note that further down we turn scalars of uninitialized bits back to `ByRef`. These can result
|
||||
// from scalar unions that are initialized with one of their zero sized variants. We could
|
||||
// instead allow `ConstValue::Scalar` to store `ScalarMaybeUninit`, but that would affect all
|
||||
// the usual cases of extracting e.g. a `usize`, without there being a real use case for the
|
||||
// `Undef` situation.
|
||||
let try_as_immediate = match op.layout.abi {
|
||||
Abi::Scalar(abi::Scalar::Initialized { .. }) => true,
|
||||
Abi::ScalarPair(..) => match op.layout.ty.kind() {
|
||||
@ -134,7 +134,7 @@ pub(super) fn op_to_const<'tcx>(
|
||||
let immediate = if try_as_immediate {
|
||||
Right(ecx.read_immediate(op).expect("normalization works on validated constants"))
|
||||
} else {
|
||||
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
|
||||
// It is guaranteed that any non-slice scalar pair is actually `Indirect` here.
|
||||
// When we come back from raw const eval, we are always by-ref. The only way our op here is
|
||||
// by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
|
||||
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
|
||||
@ -144,28 +144,15 @@ pub(super) fn op_to_const<'tcx>(
|
||||
|
||||
debug!(?immediate);
|
||||
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
let to_const_value = |mplace: &MPlaceTy<'_>| {
|
||||
debug!("to_const_value(mplace: {:?})", mplace);
|
||||
match mplace.ptr().into_parts() {
|
||||
(Some(alloc_id), offset) => ConstValue::ByRef { alloc_id, offset },
|
||||
(None, offset) => {
|
||||
assert!(mplace.layout.is_zst());
|
||||
assert_eq!(
|
||||
offset.bytes() % mplace.layout.align.abi.bytes(),
|
||||
0,
|
||||
"this MPlaceTy must come from a validated constant, thus we can assume the \
|
||||
alignment is correct",
|
||||
);
|
||||
ConstValue::ZeroSized
|
||||
}
|
||||
}
|
||||
};
|
||||
match immediate {
|
||||
Left(ref mplace) => to_const_value(mplace),
|
||||
Left(ref mplace) => {
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
let (alloc_id, offset) = mplace.ptr().into_parts();
|
||||
let alloc_id = alloc_id.expect("cannot have `fake` place fot non-ZST type");
|
||||
ConstValue::Indirect { alloc_id, offset }
|
||||
}
|
||||
// see comment on `let try_as_immediate` above
|
||||
Right(imm) => match *imm {
|
||||
_ if imm.layout.is_zst() => ConstValue::ZeroSized,
|
||||
Immediate::Scalar(x) => ConstValue::Scalar(x),
|
||||
Immediate::ScalarPair(a, b) => {
|
||||
debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
|
||||
@ -186,7 +173,7 @@ pub(super) fn op_to_const<'tcx>(
|
||||
let len: usize = len.try_into().unwrap();
|
||||
ConstValue::Slice { data, start, end: start + len }
|
||||
}
|
||||
Immediate::Uninit => to_const_value(&op.assert_mem_place()),
|
||||
Immediate::Uninit => bug!("`Uninit` is not a valid value for {}", op.layout.ty),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -756,7 +756,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
};
|
||||
let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?;
|
||||
let op = match val_val {
|
||||
ConstValue::ByRef { alloc_id, offset } => {
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
// We rely on mutability being set correctly in that allocation to prevent writes
|
||||
// where none should happen.
|
||||
let ptr = self.global_base_pointer(Pointer::new(alloc_id, offset))?;
|
||||
|
@ -30,19 +30,21 @@ pub struct ConstAlloc<'tcx> {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
|
||||
#[derive(HashStable, Lift)]
|
||||
pub enum ConstValue<'tcx> {
|
||||
/// Used only for types with `layout::abi::Scalar` ABI.
|
||||
/// Used for types with `layout::abi::Scalar` ABI.
|
||||
///
|
||||
/// Not using the enum `Value` to encode that this must not be `Uninit`.
|
||||
Scalar(Scalar),
|
||||
|
||||
/// Only used for ZSTs.
|
||||
/// Only for ZSTs.
|
||||
ZeroSized,
|
||||
|
||||
/// Used only for `&[u8]` and `&str`
|
||||
/// Used for `&[u8]` and `&str`.
|
||||
///
|
||||
/// This is worth the optimization since Rust has literals of that type.
|
||||
Slice { data: ConstAllocation<'tcx>, start: usize, end: usize },
|
||||
|
||||
/// A value not represented/representable by `Scalar` or `Slice`
|
||||
ByRef {
|
||||
/// A value not representable by the other variants; needs to be stored in-memory.
|
||||
Indirect {
|
||||
/// The backing memory of the value. May contain more memory than needed for just the value
|
||||
/// if this points into some other larger ConstValue.
|
||||
///
|
||||
@ -62,7 +64,7 @@ impl<'tcx> ConstValue<'tcx> {
|
||||
#[inline]
|
||||
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
|
||||
match *self {
|
||||
ConstValue::ByRef { .. } | ConstValue::Slice { .. } | ConstValue::ZeroSized => None,
|
||||
ConstValue::Indirect { .. } | ConstValue::Slice { .. } | ConstValue::ZeroSized => None,
|
||||
ConstValue::Scalar(val) => Some(val),
|
||||
}
|
||||
}
|
||||
|
@ -2914,7 +2914,7 @@ fn pretty_print_const_value<'tcx>(
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
(ConstValue::ByRef { alloc_id, offset }, ty::Array(t, n)) if *t == u8_type => {
|
||||
(ConstValue::Indirect { alloc_id, offset }, ty::Array(t, n)) if *t == u8_type => {
|
||||
let n = n.try_to_target_usize(tcx).unwrap();
|
||||
let alloc = tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
// cast is ok because we already checked for pointer size (32 or 64 bit) above
|
||||
|
@ -460,7 +460,7 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> {
|
||||
ConstValue::ZeroSized => "<ZST>".to_string(),
|
||||
ConstValue::Scalar(s) => format!("Scalar({s:?})"),
|
||||
ConstValue::Slice { .. } => "Slice(..)".to_string(),
|
||||
ConstValue::ByRef { .. } => "ByRef(..)".to_string(),
|
||||
ConstValue::Indirect { .. } => "ByRef(..)".to_string(),
|
||||
};
|
||||
|
||||
let fmt_valtree = |valtree: &ty::ValTree<'tcx>| match valtree {
|
||||
@ -709,7 +709,11 @@ pub fn write_allocations<'tcx>(
|
||||
// `u8`/`str` slices, shouldn't contain pointers that we want to print.
|
||||
Either::Right(std::iter::empty())
|
||||
}
|
||||
ConstValue::ByRef { alloc_id, .. } => Either::Left(std::iter::once(alloc_id)),
|
||||
ConstValue::Indirect { alloc_id, .. } => {
|
||||
// FIXME: we don't actually want to print all of these, since some are printed nicely directly as values inline in MIR.
|
||||
// Really we'd want `pretty_print_const_value` to decide which allocations to print, instead of having a separate visitor.
|
||||
Either::Left(std::iter::once(alloc_id))
|
||||
}
|
||||
}
|
||||
}
|
||||
struct CollectAllocIds(BTreeSet<AllocId>);
|
||||
|
@ -143,7 +143,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
|
||||
#[inline(always)]
|
||||
fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment {
|
||||
// We do not check for alignment to avoid having to carry an `Align`
|
||||
// in `ConstValue::ByRef`.
|
||||
// in `ConstValue::Indirect`.
|
||||
CheckAlignment::No
|
||||
}
|
||||
|
||||
@ -552,7 +552,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
.ok()?;
|
||||
|
||||
Some(ConstantKind::Val(
|
||||
ConstValue::ByRef { alloc_id, offset: Size::ZERO },
|
||||
ConstValue::Indirect { alloc_id, offset: Size::ZERO },
|
||||
value.layout.ty,
|
||||
))
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ impl EnumSizeOpt {
|
||||
span,
|
||||
user_ty: None,
|
||||
literal: ConstantKind::Val(
|
||||
interpret::ConstValue::ByRef { alloc_id, offset: Size::ZERO },
|
||||
interpret::ConstValue::Indirect { alloc_id, offset: Size::ZERO },
|
||||
tmp_ty,
|
||||
),
|
||||
};
|
||||
|
@ -1470,7 +1470,7 @@ fn collect_const_value<'tcx>(
|
||||
) {
|
||||
match value {
|
||||
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_alloc(tcx, ptr.provenance, output),
|
||||
ConstValue::ByRef { alloc_id, .. } => collect_alloc(tcx, alloc_id, output),
|
||||
ConstValue::Indirect { alloc_id, .. } => collect_alloc(tcx, alloc_id, output),
|
||||
ConstValue::Slice { data, start: _, end: _ } => {
|
||||
for &id in data.inner().provenance().ptrs().values() {
|
||||
collect_alloc(tcx, id, output);
|
||||
|
@ -72,7 +72,7 @@ pub fn new_allocation<'tcx>(
|
||||
.unwrap();
|
||||
allocation.stable(tables)
|
||||
}
|
||||
ConstValue::ByRef { alloc_id, offset } => {
|
||||
ConstValue::Indirect { alloc_id, offset } => {
|
||||
let alloc = tables.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
let ty_size = tables
|
||||
.tcx
|
||||
|
@ -232,7 +232,7 @@ fn path_to_matched_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Ve
|
||||
cx.tcx.type_of(def_id).instantiate_identity(),
|
||||
),
|
||||
Res::Def(DefKind::Const, def_id) => match cx.tcx.const_eval_poly(def_id).ok()? {
|
||||
ConstValue::ByRef { alloc, offset } if offset.bytes() == 0 => {
|
||||
ConstValue::Indirect { alloc, offset } if offset.bytes() == 0 => {
|
||||
read_mir_alloc_def_path(cx, alloc.inner(), cx.tcx.type_of(def_id).instantiate_identity())
|
||||
},
|
||||
_ => None,
|
||||
|
@ -684,7 +684,7 @@ pub fn miri_to_const<'tcx>(lcx: &LateContext<'tcx>, result: mir::ConstantKind<'t
|
||||
},
|
||||
_ => None,
|
||||
},
|
||||
mir::ConstantKind::Val(ConstValue::ByRef { alloc_id, offset: _ }, _) => {
|
||||
mir::ConstantKind::Val(ConstValue::Indirect { alloc_id, offset: _ }, _) => {
|
||||
let alloc = lcx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
match result.ty().kind() {
|
||||
ty::Adt(adt_def, _) if adt_def.is_struct() => Some(Constant::Adt(result)),
|
||||
|
Loading…
x
Reference in New Issue
Block a user