Implement packed statics

This commit is contained in:
Oliver Schneider 2017-08-08 15:53:07 +02:00
parent 181851fc6b
commit 8ab1eeef51
No known key found for this signature in database
GPG Key ID: A69F8D225B3AD7D9
12 changed files with 158 additions and 91 deletions

View File

@ -329,8 +329,8 @@ impl<'a, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'tcx, super::Evaluator>
if let Ok(instance) = self.resolve_path(path) {
let cid = GlobalId { instance, promoted: None };
// compute global if not cached
let val = match self.globals.get(&cid).map(|&ptr| ptr) {
Some(ptr) => self.value_to_primval(Value::by_ref(ptr.into()), usize)?.to_u64()?,
let val = match self.globals.get(&cid).cloned() {
Some(ptr) => self.value_to_primval(Value::ByRef(ptr), usize)?.to_u64()?,
None => eval_body_as_primval(self.tcx, instance)?.0.to_u64()?,
};
if val == name {

View File

@ -8,7 +8,7 @@ use rustc_miri::interpret::{
Lvalue, LvalueExtra,
PrimVal, PrimValKind, Value, Pointer,
HasMemory,
EvalContext,
EvalContext, PtrAndAlign,
};
use helpers::EvalContextExt as HelperEvalContextExt;
@ -266,10 +266,10 @@ impl<'a, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'tcx, super::Evaluator>
let size = self.type_size(dest_ty)?.expect("cannot zero unsized value");
let init = |this: &mut Self, val: Value| {
let zero_val = match val {
Value::ByRef { ptr, aligned } => {
Value::ByRef(PtrAndAlign { ptr, .. }) => {
// These writes have no alignment restriction anyway.
this.memory.write_repeat(ptr, 0, size)?;
Value::ByRef { ptr, aligned }
val
},
// TODO(solson): Revisit this, it's fishy to check for Undef here.
Value::ByVal(PrimVal::Undef) => match this.ty_to_primval_kind(dest_ty) {
@ -289,7 +289,7 @@ impl<'a, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'tcx, super::Evaluator>
};
match dest {
Lvalue::Local { frame, local } => self.modify_local(frame, local, init)?,
Lvalue::Ptr { ptr, extra: LvalueExtra::None, aligned: true } => self.memory.write_repeat(ptr, 0, size)?,
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned: true }, extra: LvalueExtra::None } => self.memory.write_repeat(ptr, 0, size)?,
Lvalue::Ptr { .. } => bug!("init intrinsic tried to write to fat or unaligned ptr target"),
}
}
@ -456,16 +456,16 @@ impl<'a, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'tcx, super::Evaluator>
let size = dest_layout.size(&self.tcx.data_layout).bytes();
let uninit = |this: &mut Self, val: Value| {
match val {
Value::ByRef { ptr, aligned } => {
Value::ByRef(PtrAndAlign { ptr, .. }) => {
this.memory.mark_definedness(ptr, size, false)?;
Ok(Value::ByRef { ptr, aligned })
Ok(val)
},
_ => Ok(Value::ByVal(PrimVal::Undef)),
}
};
match dest {
Lvalue::Local { frame, local } => self.modify_local(frame, local, uninit)?,
Lvalue::Ptr { ptr, extra: LvalueExtra::None, aligned: true } =>
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned: true }, extra: LvalueExtra::None } =>
self.memory.mark_definedness(ptr, size, false)?,
Lvalue::Ptr { .. } => bug!("uninit intrinsic tried to write to fat or unaligned ptr target"),
}

View File

@ -9,7 +9,7 @@ use super::{
EvalResult, EvalError, EvalErrorKind,
GlobalId, Lvalue, Value,
PrimVal,
EvalContext, StackPopCleanup,
EvalContext, StackPopCleanup, PtrAndAlign,
Kind,
};
@ -34,7 +34,8 @@ pub fn eval_body_as_primval<'a, 'tcx>(
let size = ecx.type_size_with_substs(mir.return_ty, instance.substs)?.expect("unsized global");
let align = ecx.type_align_with_substs(mir.return_ty, instance.substs)?;
let ptr = ecx.memory.allocate(size, align, Kind::UninitializedStatic)?;
ecx.globals.insert(cid, ptr);
let aligned = !ecx.is_packed(mir.return_ty)?;
ecx.globals.insert(cid, PtrAndAlign { ptr: ptr.into(), aligned });
let mutable = !mir.return_ty.is_freeze(
ecx.tcx,
ty::ParamEnv::empty(Reveal::All),
@ -57,7 +58,7 @@ pub fn eval_body_as_primval<'a, 'tcx>(
while ecx.step()? {}
}
let value = Value::by_ref(ecx.globals.get(&cid).expect("global not cached").into());
let value = Value::ByRef(*ecx.globals.get(&cid).expect("global not cached"));
Ok((ecx.value_to_primval(value, mir.return_ty)?, mir.return_ty))
}

View File

@ -7,7 +7,7 @@ use rustc::middle::const_val::ConstVal;
use rustc::middle::region::CodeExtent;
use rustc::mir;
use rustc::traits::Reveal;
use rustc::ty::layout::{self, Layout, Size, Align};
use rustc::ty::layout::{self, Layout, Size, Align, HasDataLayout};
use rustc::ty::subst::{Subst, Substs, Kind};
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Binder};
use rustc::traits;
@ -41,7 +41,7 @@ pub struct EvalContext<'a, 'tcx: 'a, M: Machine<'tcx>> {
pub(crate) suspended: HashMap<DynamicLifetime, Vec<ValidationQuery<'tcx>>>,
/// Precomputed statics, constants and promoteds.
pub globals: HashMap<GlobalId<'tcx>, MemoryPointer>,
pub globals: HashMap<GlobalId<'tcx>, PtrAndAlign>,
/// The virtual call stack.
pub(crate) stack: Vec<Frame<'tcx>>,
@ -143,6 +143,25 @@ pub struct TyAndPacked<'tcx> {
pub packed: bool,
}
#[derive(Copy, Clone, Debug)]
pub struct PtrAndAlign {
pub ptr: Pointer,
/// Remember whether this lvalue is *supposed* to be aligned.
pub aligned: bool,
}
impl PtrAndAlign {
pub fn to_ptr<'tcx>(self) -> EvalResult<'tcx, MemoryPointer> {
self.ptr.to_ptr()
}
pub fn offset<'tcx, C: HasDataLayout>(self, i: u64, cx: C) -> EvalResult<'tcx, Self> {
Ok(PtrAndAlign {
ptr: self.ptr.offset(i, cx)?,
aligned: self.aligned,
})
}
}
impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
pub fn new(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
@ -503,7 +522,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
}
pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
if let Some(Value::ByRef { ptr, aligned: _ }) = local {
if let Some(Value::ByRef(ptr)) = local {
trace!("deallocating local");
let ptr = ptr.to_ptr()?;
self.memory.dump_alloc(ptr.alloc_id);
@ -536,9 +555,11 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
self.memory.write_uint(discr_dest, discr_val, discr_size)?;
let dest = Lvalue::Ptr {
ptr: dest_ptr.into(),
ptr: PtrAndAlign {
ptr: dest_ptr.into(),
aligned: true,
},
extra: LvalueExtra::DowncastVariant(variant_idx),
aligned: true,
};
self.assign_fields(dest, dest_ty, operands)
@ -617,7 +638,13 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
self.inc_step_counter_and_check_limit(operands.len() as u64)?;
use rustc::ty::layout::Layout::*;
match *dest_layout {
Univariant { .. } | Array { .. } => {
Univariant { ref variant, .. } => {
self.write_maybe_aligned_mut(!variant.packed, |ecx| {
ecx.assign_fields(dest, dest_ty, operands)
})?;
}
Array { .. } => {
self.assign_fields(dest, dest_ty, operands)?;
}
@ -664,10 +691,12 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
}
}
StructWrappedNullablePointer { nndiscr, ref discrfield_source, .. } => {
StructWrappedNullablePointer { nndiscr, ref discrfield_source, ref nonnull, .. } => {
if let mir::AggregateKind::Adt(_, variant, _, _) = **kind {
if nndiscr == variant as u64 {
self.assign_fields(dest, dest_ty, operands)?;
self.write_maybe_aligned_mut(!nonnull.packed, |ecx| {
ecx.assign_fields(dest, dest_ty, operands)
})?;
} else {
for operand in operands {
let operand_ty = self.operand_ty(operand);
@ -682,7 +711,9 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
let dest = dest.offset(offset.bytes(), &self)?;
let dest_size = self.type_size(ty)?
.expect("bad StructWrappedNullablePointer discrfield");
self.memory.write_int(dest, 0, dest_size)?;
self.memory.write_maybe_aligned_mut(!nonnull.packed, |mem| {
mem.write_int(dest, 0, dest_size)
})?;
}
} else {
bug!("tried to assign {:?} to Layout::RawNullablePointer", kind);
@ -707,12 +738,14 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
self.assign_fields(dest, dest_ty, operands)?;
}
UntaggedUnion { .. } => {
UntaggedUnion { ref variants } => {
assert_eq!(operands.len(), 1);
let operand = &operands[0];
let value = self.eval_operand(operand)?;
let value_ty = self.operand_ty(operand);
self.write_value(value, dest, value_ty)?;
self.write_maybe_aligned_mut(!variants.packed, |ecx| {
ecx.write_value(value, dest, value_ty)
})?;
}
_ => {
@ -756,12 +789,12 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
let src = self.eval_lvalue(lvalue)?;
// We ignore the alignment of the lvalue here -- special handling for packed structs ends
// at the `&` operator.
let (ptr, extra, _aligned) = self.force_allocation(src)?.to_ptr_extra_aligned();
let (ptr, extra) = self.force_allocation(src)?.to_ptr_extra_aligned();
let val = match extra {
LvalueExtra::None => ptr.to_value(),
LvalueExtra::Length(len) => ptr.to_value_with_len(len),
LvalueExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
LvalueExtra::None => ptr.ptr.to_value(),
LvalueExtra::Length(len) => ptr.ptr.to_value_with_len(len),
LvalueExtra::Vtable(vtable) => ptr.ptr.to_value_with_vtable(vtable),
LvalueExtra::DowncastVariant(..) =>
bug!("attempted to take a reference to an enum downcast lvalue"),
};
@ -1024,7 +1057,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
Literal::Item { def_id, substs } => {
let instance = self.resolve_associated_const(def_id, substs);
let cid = GlobalId { instance, promoted: None };
Value::by_ref(self.globals.get(&cid).expect("static/const not cached").into())
Value::ByRef(*self.globals.get(&cid).expect("static/const not cached"))
}
Literal::Promoted { index } => {
@ -1032,7 +1065,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
instance: self.frame().instance,
promoted: Some(index),
};
Value::by_ref(self.globals.get(&cid).expect("promoted not cached").into())
Value::ByRef(*self.globals.get(&cid).expect("promoted not cached"))
}
};
@ -1041,6 +1074,10 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
}
}
pub fn read_global_as_value(&self, gid: GlobalId) -> Value {
Value::ByRef(*self.globals.get(&gid).expect("global not cached"))
}
pub fn operand_ty(&self, operand: &mir::Operand<'tcx>) -> Ty<'tcx> {
self.monomorphize(operand.ty(self.mir(), self.tcx), self.substs())
}
@ -1052,6 +1089,21 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
Ok(())
}
pub fn is_packed(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, bool> {
let layout = self.type_layout(ty)?;
use rustc::ty::layout::Layout::*;
Ok(match *layout {
Univariant { ref variant, .. } => variant.packed,
StructWrappedNullablePointer { ref nonnull, .. } => nonnull.packed,
UntaggedUnion { ref variants } => variants.packed,
// can only apply #[repr(packed)] to struct and union
_ => false,
})
}
pub fn force_allocation(
&mut self,
lvalue: Lvalue,
@ -1061,8 +1113,8 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
// -1 since we don't store the return value
match self.stack[frame].locals[local.index() - 1] {
None => return err!(DeadLocal),
Some(Value::ByRef { ptr, aligned }) => {
Lvalue::Ptr { ptr, aligned, extra: LvalueExtra::None }
Some(Value::ByRef(ptr)) => {
Lvalue::Ptr { ptr, extra: LvalueExtra::None }
},
Some(val) => {
let ty = self.stack[frame].mir.local_decls[local].ty;
@ -1083,7 +1135,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
/// ensures this Value is not a ByRef
pub(super) fn follow_by_ref_value(&mut self, value: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
match value {
Value::ByRef { ptr, aligned } => {
Value::ByRef(PtrAndAlign { ptr, aligned }) => {
self.read_maybe_aligned(aligned, |ectx| ectx.read_value(ptr, ty))
}
other => Ok(other),
@ -1141,7 +1193,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
// correct if we never look at this data with the wrong type.
match dest {
Lvalue::Ptr { ptr, extra, aligned } => {
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned }, extra } => {
assert_eq!(extra, LvalueExtra::None);
self.write_maybe_aligned_mut(aligned,
|ectx| ectx.write_value_to_ptr(src_val, ptr, dest_ty))
@ -1167,7 +1219,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
old_dest_val: Value,
dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> {
if let Value::ByRef { ptr: dest_ptr, aligned } = old_dest_val {
if let Value::ByRef(PtrAndAlign { ptr: dest_ptr, aligned }) = old_dest_val {
// If the value is already `ByRef` (that is, backed by an `Allocation`),
// then we must write the new value into this allocation, because there may be
// other pointers into the allocation. These other pointers are logically
@ -1178,7 +1230,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
self.write_maybe_aligned_mut(aligned,
|ectx| ectx.write_value_to_ptr(src_val, dest_ptr, dest_ty))?;
} else if let Value::ByRef { ptr: src_ptr, aligned } = src_val {
} else if let Value::ByRef(PtrAndAlign { ptr: src_ptr, aligned }) = src_val {
// If the value is not `ByRef`, then we know there are no pointers to it
// and we can simply overwrite the `Value` in the locals array directly.
//
@ -1216,7 +1268,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> {
match value {
Value::ByRef { ptr, aligned } => {
Value::ByRef(PtrAndAlign { ptr, aligned }) => {
self.read_maybe_aligned_mut(aligned, |ectx| ectx.copy(ptr, dest, dest_ty))
},
Value::ByVal(primval) => {
@ -1551,7 +1603,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
//let src = adt::MaybeSizedValue::sized(src);
//let dst = adt::MaybeSizedValue::sized(dst);
let src_ptr = match src {
Value::ByRef { ptr, aligned: true } => ptr,
Value::ByRef(PtrAndAlign { ptr, aligned: true }) => ptr,
// TODO: Is it possible for unaligned pointers to occur here?
_ => bug!("expected aligned pointer, got {:?}", src),
};
@ -1598,7 +1650,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
Err(err) => {
panic!("Failed to access local: {:?}", err);
}
Ok(Value::ByRef { ptr, aligned }) => match ptr.into_inner_primval() {
Ok(Value::ByRef(PtrAndAlign{ ptr, aligned })) => match ptr.into_inner_primval() {
PrimVal::Ptr(ptr) => {
write!(msg, " by {}ref:", if aligned { "" } else { "unaligned " }).unwrap();
allocs.push(ptr.alloc_id);

View File

@ -9,6 +9,7 @@ use super::{
MemoryPointer,
PrimVal, Value, Pointer,
Machine,
PtrAndAlign,
};
#[derive(Copy, Clone, Debug)]
@ -18,10 +19,8 @@ pub enum Lvalue {
/// An lvalue may have an invalid (integral or undef) pointer,
/// since it might be turned back into a reference
/// before ever being dereferenced.
ptr: Pointer,
ptr: PtrAndAlign,
extra: LvalueExtra,
/// Remember whether this lvalue is *supposed* to be aligned.
aligned: bool,
},
/// An lvalue referring to a value on the stack. Represented by a stack frame index paired with
@ -58,23 +57,23 @@ impl<'tcx> Lvalue {
}
pub fn from_primval_ptr(ptr: Pointer) -> Self {
Lvalue::Ptr { ptr, extra: LvalueExtra::None, aligned: true }
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned: true }, extra: LvalueExtra::None }
}
pub fn from_ptr(ptr: MemoryPointer) -> Self {
Self::from_primval_ptr(ptr.into())
}
pub(super) fn to_ptr_extra_aligned(self) -> (Pointer, LvalueExtra, bool) {
pub(super) fn to_ptr_extra_aligned(self) -> (PtrAndAlign, LvalueExtra) {
match self {
Lvalue::Ptr { ptr, extra, aligned } => (ptr, extra, aligned),
Lvalue::Ptr { ptr, extra } => (ptr, extra),
_ => bug!("to_ptr_and_extra: expected Lvalue::Ptr, got {:?}", self),
}
}
pub fn to_ptr(self) -> EvalResult<'tcx, MemoryPointer> {
let (ptr, extra, _aligned) = self.to_ptr_extra_aligned();
let (ptr, extra) = self.to_ptr_extra_aligned();
// At this point, we forget about the alignment information -- the lvalue has been turned into a reference,
// and no matter where it came from, it now must be aligned.
assert_eq!(extra, LvalueExtra::None);
@ -111,7 +110,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
Static(ref static_) => {
let instance = ty::Instance::mono(self.tcx, static_.def_id);
let cid = GlobalId { instance, promoted: None };
Ok(Some(Value::by_ref(self.globals.get(&cid).expect("global not cached").into())))
Ok(Some(Value::ByRef(*self.globals.get(&cid).expect("global not cached"))))
},
Projection(ref proj) => self.try_read_lvalue_projection(proj),
}
@ -161,9 +160,9 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
pub fn read_lvalue(&self, lvalue: Lvalue) -> EvalResult<'tcx, Value> {
match lvalue {
Lvalue::Ptr { ptr, extra, aligned } => {
Lvalue::Ptr { ptr, extra } => {
assert_eq!(extra, LvalueExtra::None);
Ok(Value::ByRef { ptr, aligned })
Ok(Value::ByRef(ptr))
}
Lvalue::Local { frame, local } => {
self.stack[frame].get_local(local)
@ -180,7 +179,10 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
Static(ref static_) => {
let instance = ty::Instance::mono(self.tcx, static_.def_id);
let gid = GlobalId { instance, promoted: None };
Lvalue::from_ptr(*self.globals.get(&gid).expect("uncached global"))
Lvalue::Ptr {
ptr: *self.globals.get(&gid).expect("uncached global"),
extra: LvalueExtra::None,
}
}
Projection(ref proj) => {
@ -212,10 +214,11 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
},
General { ref variants, .. } => {
let (_, base_extra, _) = base.to_ptr_extra_aligned();
let (_, base_extra) = base.to_ptr_extra_aligned();
if let LvalueExtra::DowncastVariant(variant_idx) = base_extra {
// +1 for the discriminant, which is field 0
(variants[variant_idx].offsets[field_index + 1], variants[variant_idx].packed)
assert!(!variants[variant_idx].packed);
(variants[variant_idx].offsets[field_index + 1], false)
} else {
bug!("field access on enum had no variant index");
}
@ -262,8 +265,8 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
};
// Do not allocate in trivial cases
let (base_ptr, base_extra, aligned) = match base {
Lvalue::Ptr { ptr, extra, aligned } => (ptr, extra, aligned),
let (base_ptr, base_extra) = match base {
Lvalue::Ptr { ptr, extra } => (ptr, extra),
Lvalue::Local { frame, local } => match self.stack[frame].get_local(local)? {
// in case the type has a single field, just return the value
Value::ByVal(_) if self.get_field_count(base_ty).map(|c| c == 1).unwrap_or(false) => {
@ -278,13 +281,16 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
let offset = match base_extra {
LvalueExtra::Vtable(tab) => {
let (_, align) = self.size_and_align_of_dst(base_ty, base_ptr.to_value_with_vtable(tab))?;
let (_, align) = self.size_and_align_of_dst(base_ty, base_ptr.ptr.to_value_with_vtable(tab))?;
offset.abi_align(Align::from_bytes(align, align).unwrap()).bytes()
}
_ => offset.bytes(),
};
let ptr = base_ptr.offset(offset, &self)?;
let mut ptr = base_ptr.offset(offset, &self)?;
// if we were unaligned, stay unaligned
// no matter what we were, if we are packed, we must not be aligned anymore
ptr.aligned &= !packed;
let field_ty = self.monomorphize(field_ty, self.substs());
@ -301,33 +307,33 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
base_extra
};
Ok(Lvalue::Ptr { ptr, extra, aligned: aligned && !packed })
Ok(Lvalue::Ptr { ptr, extra } )
}
pub(super) fn val_to_lvalue(&self, val: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Lvalue> {
Ok(match self.tcx.struct_tail(ty).sty {
ty::TyDynamic(..) => {
let (ptr, vtable) = val.into_ptr_vtable_pair(&self.memory)?;
Lvalue::Ptr { ptr, extra: LvalueExtra::Vtable(vtable), aligned: true }
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned: true }, extra: LvalueExtra::Vtable(vtable) }
},
ty::TyStr | ty::TySlice(_) => {
let (ptr, len) = val.into_slice(&self.memory)?;
Lvalue::Ptr { ptr, extra: LvalueExtra::Length(len), aligned: true }
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned: true }, extra: LvalueExtra::Length(len) }
},
_ => Lvalue::Ptr { ptr: val.into_ptr(&self.memory)?, extra: LvalueExtra::None, aligned: true },
_ => Lvalue::from_primval_ptr(val.into_ptr(&self.memory)?),
})
}
pub(super) fn lvalue_index(&mut self, base: Lvalue, outer_ty: Ty<'tcx>, n: u64) -> EvalResult<'tcx, Lvalue> {
// Taking the outer type here may seem odd; it's needed because for array types, the outer type gives away the length.
let base = self.force_allocation(base)?;
let (base_ptr, _, aligned) = base.to_ptr_extra_aligned();
let (base_ptr, _) = base.to_ptr_extra_aligned();
let (elem_ty, len) = base.elem_ty_and_len(outer_ty);
let elem_size = self.type_size(elem_ty)?.expect("slice element must be sized");
assert!(n < len, "Tried to access element {} of array/slice with length {}", n, len);
let ptr = base_ptr.offset(n * elem_size, self.memory.layout)?;
Ok(Lvalue::Ptr { ptr, extra: LvalueExtra::None, aligned })
Ok(Lvalue::Ptr { ptr, extra: LvalueExtra::None })
}
pub(super) fn eval_lvalue_projection(
@ -337,7 +343,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
proj_elem: &mir::ProjectionElem<'tcx, mir::Operand<'tcx>, Ty<'tcx>>,
) -> EvalResult<'tcx, Lvalue> {
use rustc::mir::ProjectionElem::*;
let (ptr, extra, aligned) = match *proj_elem {
let (ptr, extra) = match *proj_elem {
Field(field, field_ty) => {
return self.lvalue_field(base, field.index(), base_ty, field_ty);
}
@ -346,7 +352,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
let base_layout = self.type_layout(base_ty)?;
// FIXME(solson)
let base = self.force_allocation(base)?;
let (base_ptr, base_extra, aligned) = base.to_ptr_extra_aligned();
let (base_ptr, base_extra) = base.to_ptr_extra_aligned();
use rustc::ty::layout::Layout::*;
let extra = match *base_layout {
@ -354,7 +360,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => base_extra,
_ => bug!("variant downcast on non-aggregate: {:?}", base_layout),
};
(base_ptr, extra, aligned)
(base_ptr, extra)
}
Deref => {
@ -383,7 +389,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
ConstantIndex { offset, min_length, from_end } => {
// FIXME(solson)
let base = self.force_allocation(base)?;
let (base_ptr, _, aligned) = base.to_ptr_extra_aligned();
let (base_ptr, _) = base.to_ptr_extra_aligned();
let (elem_ty, n) = base.elem_ty_and_len(base_ty);
let elem_size = self.type_size(elem_ty)?.expect("sequence element must be sized");
@ -396,24 +402,24 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
};
let ptr = base_ptr.offset(index * elem_size, &self)?;
(ptr, LvalueExtra::None, aligned)
(ptr, LvalueExtra::None)
}
Subslice { from, to } => {
// FIXME(solson)
let base = self.force_allocation(base)?;
let (base_ptr, _, aligned) = base.to_ptr_extra_aligned();
let (base_ptr, _) = base.to_ptr_extra_aligned();
let (elem_ty, n) = base.elem_ty_and_len(base_ty);
let elem_size = self.type_size(elem_ty)?.expect("slice element must be sized");
assert!(u64::from(from) <= n - u64::from(to));
let ptr = base_ptr.offset(u64::from(from) * elem_size, &self)?;
let extra = LvalueExtra::Length(n - u64::from(to) - u64::from(from));
(ptr, extra, aligned)
(ptr, extra)
}
};
Ok(Lvalue::Ptr { ptr, extra, aligned })
Ok(Lvalue::Ptr { ptr, extra })
}
pub(super) fn lvalue_ty(&self, lvalue: &mir::Lvalue<'tcx>) -> Ty<'tcx> {

View File

@ -33,6 +33,7 @@ pub use self::eval_context::{
StackPopCleanup,
DynamicLifetime,
TyAndPacked,
PtrAndAlign,
};
pub use self::lvalue::{

View File

@ -13,7 +13,7 @@ use rustc::ty::subst::Substs;
use super::{
EvalResult,
EvalContext, StackPopCleanup, TyAndPacked,
EvalContext, StackPopCleanup, TyAndPacked, PtrAndAlign,
GlobalId, Lvalue,
HasMemory, Kind,
Machine,
@ -182,14 +182,15 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
let ptr = self.memory.allocate(ptr_size, ptr_size, Kind::UninitializedStatic)?;
self.memory.write_usize(ptr, 0)?;
self.memory.mark_static_initalized(ptr.alloc_id, mutability)?;
self.globals.insert(cid, ptr);
self.globals.insert(cid, PtrAndAlign { ptr: ptr.into(), aligned: true });
return Ok(false);
}
let mir = self.load_mir(instance.def)?;
let size = self.type_size_with_substs(mir.return_ty, substs)?.expect("unsized global");
let align = self.type_align_with_substs(mir.return_ty, substs)?;
let ptr = self.memory.allocate(size, align, Kind::UninitializedStatic)?;
self.globals.insert(cid, ptr);
let aligned = !self.is_packed(mir.return_ty)?;
self.globals.insert(cid, PtrAndAlign { ptr: ptr.into(), aligned });
let internally_mutable = !mir.return_ty.is_freeze(
self.tcx,
ty::ParamEnv::empty(Reveal::All),
@ -265,7 +266,8 @@ impl<'a, 'b, 'tcx, M: Machine<'tcx>> Visitor<'tcx> for ConstantExtractor<'a, 'b,
let size = this.ecx.type_size_with_substs(mir.return_ty, this.instance.substs)?.expect("unsized global");
let align = this.ecx.type_align_with_substs(mir.return_ty, this.instance.substs)?;
let ptr = this.ecx.memory.allocate(size, align, Kind::UninitializedStatic)?;
this.ecx.globals.insert(cid, ptr);
let aligned = !this.ecx.is_packed(mir.return_ty)?;
this.ecx.globals.insert(cid, PtrAndAlign { ptr: ptr.into(), aligned });
trace!("pushing stack frame for {:?}", index);
this.ecx.push_stack_frame(this.instance,
constant.span,

View File

@ -17,9 +17,9 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
// However, unaligned accesses will probably make the actual drop implementation fail -- a problem shared
// by rustc.
let val = match self.force_allocation(lval)? {
Lvalue::Ptr { ptr, extra: LvalueExtra::Vtable(vtable), aligned: _ } => ptr.to_value_with_vtable(vtable),
Lvalue::Ptr { ptr, extra: LvalueExtra::Length(len), aligned: _ } => ptr.to_value_with_len(len),
Lvalue::Ptr { ptr, extra: LvalueExtra::None, aligned: _ } => ptr.to_value(),
Lvalue::Ptr { ptr, extra: LvalueExtra::Vtable(vtable) } => ptr.ptr.to_value_with_vtable(vtable),
Lvalue::Ptr { ptr, extra: LvalueExtra::Length(len) } => ptr.ptr.to_value_with_len(len),
Lvalue::Ptr { ptr, extra: LvalueExtra::None } => ptr.ptr.to_value(),
_ => bug!("force_allocation broken"),
};
self.drop(val, instance, ty, span)

View File

@ -6,7 +6,7 @@ use syntax::abi::Abi;
use super::{
EvalError, EvalResult, EvalErrorKind,
EvalContext, eval_context, TyAndPacked,
EvalContext, eval_context, TyAndPacked, PtrAndAlign,
Lvalue,
MemoryPointer,
PrimVal, Value,
@ -311,10 +311,10 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
if self.frame().mir.args_iter().count() == fields.len() + 1 {
let offsets = variant.offsets.iter().map(|s| s.bytes());
match arg_val {
Value::ByRef { ptr, aligned } => {
Value::ByRef(PtrAndAlign { ptr, aligned }) => {
assert!(aligned, "Unaligned ByRef-values cannot occur as function arguments");
for ((offset, ty), arg_local) in offsets.zip(fields).zip(arg_locals) {
let arg = Value::ByRef { ptr: ptr.offset(offset, &self)?, aligned: true};
let arg = Value::by_ref(ptr.offset(offset, &self)?);
let dest = self.eval_lvalue(&mir::Lvalue::Local(arg_local))?;
trace!("writing arg {:?} to {:?} (type: {})", arg, dest, ty);
self.write_value(arg, dest, ty)?;

View File

@ -213,7 +213,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
};
if is_owning {
match query.lval {
Lvalue::Ptr { ptr, extra, aligned: _ } => {
Lvalue::Ptr { ptr, extra } => {
// Determine the size
// FIXME: Can we reuse size_and_align_of_dst for Lvalues?
let len = match self.type_size(query.ty)? {

View File

@ -7,6 +7,7 @@ use super::{
EvalResult,
Memory, MemoryPointer, HasMemory, PointerArithmetic,
Machine,
PtrAndAlign,
};
pub(super) fn bytes_to_f32(bytes: u128) -> f32 {
@ -36,7 +37,7 @@ pub(super) fn f64_to_bytes(f: f64) -> u128 {
/// operations and fat pointers. This idea was taken from rustc's trans.
#[derive(Clone, Copy, Debug)]
pub enum Value {
ByRef { ptr: Pointer, aligned: bool},
ByRef(PtrAndAlign),
ByVal(PrimVal),
ByValPair(PrimVal, PrimVal),
}
@ -133,12 +134,6 @@ impl ::std::convert::From<MemoryPointer> for Pointer {
}
}
impl<'a> ::std::convert::From<&'a MemoryPointer> for Pointer {
fn from(ptr: &'a MemoryPointer) -> Self {
PrimVal::Ptr(*ptr).into()
}
}
/// A `PrimVal` represents an immediate, primitive value existing outside of a
/// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in
/// size. Like a range of bytes in an `Allocation`, a `PrimVal` can either represent the raw bytes
@ -172,7 +167,7 @@ pub enum PrimValKind {
impl<'a, 'tcx: 'a> Value {
#[inline]
pub fn by_ref(ptr: Pointer) -> Self {
Value::ByRef { ptr, aligned: true }
Value::ByRef(PtrAndAlign { ptr, aligned: true })
}
/// Convert the value into a pointer (or a pointer-sized integer). If the value is a ByRef,
@ -180,7 +175,7 @@ impl<'a, 'tcx: 'a> Value {
pub fn into_ptr<M: Machine<'tcx>>(&self, mem: &Memory<'a, 'tcx, M>) -> EvalResult<'tcx, Pointer> {
use self::Value::*;
match *self {
ByRef { ptr, aligned } => {
ByRef(PtrAndAlign { ptr, aligned }) => {
mem.read_maybe_aligned(aligned, |mem| mem.read_ptr(ptr.to_ptr()?) )
},
ByVal(ptr) | ByValPair(ptr, _) => Ok(ptr.into()),
@ -193,7 +188,7 @@ impl<'a, 'tcx: 'a> Value {
) -> EvalResult<'tcx, (Pointer, MemoryPointer)> {
use self::Value::*;
match *self {
ByRef { ptr: ref_ptr, aligned } => {
ByRef(PtrAndAlign { ptr: ref_ptr, aligned }) => {
mem.read_maybe_aligned(aligned, |mem| {
let ptr = mem.read_ptr(ref_ptr.to_ptr()?)?;
let vtable = mem.read_ptr(ref_ptr.offset(mem.pointer_size(), mem.layout)?.to_ptr()?)?;
@ -211,7 +206,7 @@ impl<'a, 'tcx: 'a> Value {
pub(super) fn into_slice<M: Machine<'tcx>>(&self, mem: &Memory<'a, 'tcx, M>) -> EvalResult<'tcx, (Pointer, u64)> {
use self::Value::*;
match *self {
ByRef { ptr: ref_ptr, aligned } => {
ByRef(PtrAndAlign { ptr: ref_ptr, aligned } ) => {
mem.read_maybe_aligned(aligned, |mem| {
let ptr = mem.read_ptr(ref_ptr.to_ptr()?)?;
let len = mem.read_usize(ref_ptr.offset(mem.pointer_size(), mem.layout)?.to_ptr()?)?;

View File

@ -0,0 +1,10 @@
#[repr(packed)]
struct Foo {
i: i32
}
fn main() {
assert_eq!({FOO.i}, 42);
}
static FOO: Foo = Foo { i: 42 };