Hide the RefCell inside InterpretInterner
It was too easy to get this wrong
This commit is contained in:
parent
37d8b9a86c
commit
d26ebec81c
@ -376,18 +376,15 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::AllocId {
|
||||
) {
|
||||
ty::tls::with_opt(|tcx| {
|
||||
let tcx = tcx.expect("can't hash AllocIds during hir lowering");
|
||||
let interner = tcx
|
||||
.interpret_interner
|
||||
.borrow();
|
||||
if let Some(def_id) = interner.get_corresponding_static_def_id(*self) {
|
||||
if let Some(def_id) = tcx.interpret_interner.get_corresponding_static_def_id(*self) {
|
||||
0.hash_stable(hcx, hasher);
|
||||
// statics are unique via their DefId
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
} else if let Some(alloc) = interner.get_alloc(*self) {
|
||||
} else if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
|
||||
// not a static, can't be recursive, hash the allocation
|
||||
1.hash_stable(hcx, hasher);
|
||||
alloc.hash_stable(hcx, hasher);
|
||||
} else if let Some(inst) = interner.get_fn(*self) {
|
||||
} else if let Some(inst) = tcx.interpret_interner.get_fn(*self) {
|
||||
2.hash_stable(hcx, hasher);
|
||||
inst.hash_stable(hcx, hasher);
|
||||
} else {
|
||||
|
@ -1897,7 +1897,6 @@ pub fn print_miri_value<W: Write>(value: Value, ty: Ty, f: &mut W) -> fmt::Resul
|
||||
ty::tls::with(|tcx| {
|
||||
let alloc = tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_alloc(ptr.alloc_id);
|
||||
if let Some(alloc) = alloc {
|
||||
assert_eq!(len as usize as u128, len);
|
||||
|
@ -869,7 +869,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
|
||||
|
||||
pub interpret_interner: RefCell<InterpretInterner<'tcx>>,
|
||||
pub interpret_interner: InterpretInterner<'tcx>,
|
||||
|
||||
layout_interner: RefCell<FxHashSet<&'tcx LayoutDetails>>,
|
||||
|
||||
@ -893,6 +893,11 @@ pub struct GlobalCtxt<'tcx> {
|
||||
/// Everything needed to efficiently work with interned allocations
|
||||
#[derive(Debug, Default)]
|
||||
pub struct InterpretInterner<'tcx> {
|
||||
inner: RefCell<InterpretInternerInner<'tcx>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct InterpretInternerInner<'tcx> {
|
||||
/// Stores the value of constants (and deduplicates the actual memory)
|
||||
allocs: FxHashSet<&'tcx interpret::Allocation>,
|
||||
|
||||
@ -925,14 +930,15 @@ pub struct InterpretInterner<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> InterpretInterner<'tcx> {
|
||||
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> interpret::AllocId {
|
||||
if let Some(&alloc_id) = self.function_cache.get(&instance) {
|
||||
pub fn create_fn_alloc(&self, instance: Instance<'tcx>) -> interpret::AllocId {
|
||||
if let Some(&alloc_id) = self.inner.borrow().function_cache.get(&instance) {
|
||||
return alloc_id;
|
||||
}
|
||||
let id = self.reserve();
|
||||
debug!("creating fn ptr: {}", id);
|
||||
self.functions.insert(id, instance);
|
||||
self.function_cache.insert(instance, id);
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
inner.functions.insert(id, instance);
|
||||
inner.function_cache.insert(instance, id);
|
||||
id
|
||||
}
|
||||
|
||||
@ -940,30 +946,31 @@ impl<'tcx> InterpretInterner<'tcx> {
|
||||
&self,
|
||||
id: interpret::AllocId,
|
||||
) -> Option<Instance<'tcx>> {
|
||||
self.functions.get(&id).cloned()
|
||||
self.inner.borrow().functions.get(&id).cloned()
|
||||
}
|
||||
|
||||
pub fn get_alloc(
|
||||
&self,
|
||||
id: interpret::AllocId,
|
||||
) -> Option<&'tcx interpret::Allocation> {
|
||||
self.alloc_by_id.get(&id).cloned()
|
||||
self.inner.borrow().alloc_by_id.get(&id).cloned()
|
||||
}
|
||||
|
||||
pub fn get_cached(
|
||||
&self,
|
||||
static_id: DefId,
|
||||
) -> Option<interpret::AllocId> {
|
||||
self.alloc_cache.get(&static_id).cloned()
|
||||
self.inner.borrow().alloc_cache.get(&static_id).cloned()
|
||||
}
|
||||
|
||||
pub fn cache(
|
||||
&mut self,
|
||||
&self,
|
||||
static_id: DefId,
|
||||
alloc_id: interpret::AllocId,
|
||||
) {
|
||||
self.global_cache.insert(alloc_id, static_id);
|
||||
if let Some(old) = self.alloc_cache.insert(static_id, alloc_id) {
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
inner.global_cache.insert(alloc_id, static_id);
|
||||
if let Some(old) = inner.alloc_cache.insert(static_id, alloc_id) {
|
||||
bug!("tried to cache {:?}, but was already existing as {:#?}", static_id, old);
|
||||
}
|
||||
}
|
||||
@ -972,15 +979,15 @@ impl<'tcx> InterpretInterner<'tcx> {
|
||||
&self,
|
||||
ptr: interpret::AllocId,
|
||||
) -> Option<DefId> {
|
||||
self.global_cache.get(&ptr).cloned()
|
||||
self.inner.borrow().global_cache.get(&ptr).cloned()
|
||||
}
|
||||
|
||||
pub fn intern_at_reserved(
|
||||
&mut self,
|
||||
&self,
|
||||
id: interpret::AllocId,
|
||||
alloc: &'tcx interpret::Allocation,
|
||||
) {
|
||||
if let Some(old) = self.alloc_by_id.insert(id, alloc) {
|
||||
if let Some(old) = self.inner.borrow_mut().alloc_by_id.insert(id, alloc) {
|
||||
bug!("tried to intern allocation at {}, but was already existing as {:#?}", id, old);
|
||||
}
|
||||
}
|
||||
@ -988,10 +995,11 @@ impl<'tcx> InterpretInterner<'tcx> {
|
||||
/// obtains a new allocation ID that can be referenced but does not
|
||||
/// yet have an allocation backing it.
|
||||
pub fn reserve(
|
||||
&mut self,
|
||||
&self,
|
||||
) -> interpret::AllocId {
|
||||
let next = self.next_id;
|
||||
self.next_id.0 = self.next_id.0
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
let next = inner.next_id;
|
||||
inner.next_id.0 = inner.next_id.0
|
||||
.checked_add(1)
|
||||
.expect("You overflowed a u64 by incrementing by 1... \
|
||||
You've just earned yourself a free drink if we ever meet. \
|
||||
@ -1071,12 +1079,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
self,
|
||||
alloc: interpret::Allocation,
|
||||
) -> &'gcx interpret::Allocation {
|
||||
if let Some(alloc) = self.interpret_interner.borrow().allocs.get(&alloc) {
|
||||
if let Some(alloc) = self.interpret_interner.inner.borrow().allocs.get(&alloc) {
|
||||
return alloc;
|
||||
}
|
||||
|
||||
let interned = self.global_arenas.const_allocs.alloc(alloc);
|
||||
if let Some(prev) = self.interpret_interner.borrow_mut().allocs.replace(interned) {
|
||||
if let Some(prev) = self.interpret_interner.inner.borrow_mut().allocs.replace(interned) {
|
||||
bug!("Tried to overwrite interned Allocation: {:#?}", prev)
|
||||
}
|
||||
interned
|
||||
@ -1085,20 +1093,20 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Allocates a byte or string literal for `mir::interpret`
|
||||
pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
|
||||
// check whether we already allocated this literal or a constant with the same memory
|
||||
if let Some(&alloc_id) = self.interpret_interner.borrow().literal_alloc_cache.get(bytes) {
|
||||
if let Some(&alloc_id) = self.interpret_interner.inner.borrow()
|
||||
.literal_alloc_cache.get(bytes) {
|
||||
return alloc_id;
|
||||
}
|
||||
// create an allocation that just contains these bytes
|
||||
let alloc = interpret::Allocation::from_bytes(bytes);
|
||||
let alloc = self.intern_const_alloc(alloc);
|
||||
|
||||
let mut int = self.interpret_interner.borrow_mut();
|
||||
// the next unique id
|
||||
let id = int.reserve();
|
||||
let id = self.interpret_interner.reserve();
|
||||
// make the allocation identifiable
|
||||
int.alloc_by_id.insert(id, alloc);
|
||||
self.interpret_interner.inner.borrow_mut().alloc_by_id.insert(id, alloc);
|
||||
// cache it for the future
|
||||
int.literal_alloc_cache.insert(bytes.to_owned(), id);
|
||||
self.interpret_interner.inner.borrow_mut().literal_alloc_cache.insert(bytes.to_owned(), id);
|
||||
id
|
||||
}
|
||||
|
||||
@ -1776,7 +1784,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
println!("Substs interner: #{}", self.interners.substs.borrow().len());
|
||||
println!("Region interner: #{}", self.interners.region.borrow().len());
|
||||
println!("Stability interner: #{}", self.stability_interner.borrow().len());
|
||||
println!("Interpret interner: #{}", self.interpret_interner.borrow().allocs.len());
|
||||
println!("Interpret interner: #{}", self.interpret_interner.inner.borrow().allocs.len());
|
||||
println!("Layout interner: #{}", self.layout_interner.borrow().len());
|
||||
}
|
||||
}
|
||||
|
@ -548,11 +548,10 @@ impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, '
|
||||
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
|
||||
const MAX1: usize = usize::max_value() - 1;
|
||||
let tcx = self.tcx;
|
||||
let interpret_interner = || tcx.interpret_interner.borrow_mut();
|
||||
let pos = TyDecoder::position(self);
|
||||
match usize::decode(self)? {
|
||||
::std::usize::MAX => {
|
||||
let alloc_id = interpret_interner().reserve();
|
||||
let alloc_id = tcx.interpret_interner.reserve();
|
||||
trace!("creating alloc id {:?} at {}", alloc_id, pos);
|
||||
// insert early to allow recursive allocs
|
||||
self.interpret_alloc_cache.insert(pos, alloc_id);
|
||||
@ -560,10 +559,10 @@ impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, '
|
||||
let allocation = interpret::Allocation::decode(self)?;
|
||||
trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
|
||||
let allocation = self.tcx.intern_const_alloc(allocation);
|
||||
interpret_interner().intern_at_reserved(alloc_id, allocation);
|
||||
tcx.interpret_interner.intern_at_reserved(alloc_id, allocation);
|
||||
|
||||
if let Some(glob) = Option::<DefId>::decode(self)? {
|
||||
interpret_interner().cache(glob, alloc_id);
|
||||
tcx.interpret_interner.cache(glob, alloc_id);
|
||||
}
|
||||
|
||||
Ok(alloc_id)
|
||||
@ -572,7 +571,7 @@ impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, '
|
||||
trace!("creating fn alloc id at {}", pos);
|
||||
let instance = ty::Instance::decode(self)?;
|
||||
trace!("decoded fn alloc instance: {:?}", instance);
|
||||
let id = interpret_interner().create_fn_alloc(instance);
|
||||
let id = tcx.interpret_interner.create_fn_alloc(instance);
|
||||
trace!("created fn alloc id: {:?}", id);
|
||||
self.interpret_alloc_cache.insert(pos, id);
|
||||
Ok(id)
|
||||
@ -796,14 +795,14 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<
|
||||
// cache the allocation shorthand now, because the allocation itself might recursively
|
||||
// point to itself.
|
||||
self.interpret_alloc_shorthands.insert(*alloc_id, start);
|
||||
if let Some(alloc) = self.tcx.interpret_interner.borrow().get_alloc(*alloc_id) {
|
||||
if let Some(alloc) = self.tcx.interpret_interner.get_alloc(*alloc_id) {
|
||||
trace!("encoding {:?} with {:#?}", alloc_id, alloc);
|
||||
usize::max_value().encode(self)?;
|
||||
alloc.encode(self)?;
|
||||
self.tcx.interpret_interner.borrow()
|
||||
self.tcx.interpret_interner
|
||||
.get_corresponding_static_def_id(*alloc_id)
|
||||
.encode(self)?;
|
||||
} else if let Some(fn_instance) = self.tcx.interpret_interner.borrow().get_fn(*alloc_id) {
|
||||
} else if let Some(fn_instance) = self.tcx.interpret_interner.get_fn(*alloc_id) {
|
||||
trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
|
||||
(usize::max_value() - 1).encode(self)?;
|
||||
fn_instance.encode(self)?;
|
||||
|
@ -283,12 +283,11 @@ impl<'a, 'tcx> SpecializedDecoder<LocalDefId> for DecodeContext<'a, 'tcx> {
|
||||
impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
|
||||
const MAX1: usize = usize::max_value() - 1;
|
||||
let tcx = self.tcx;
|
||||
let interpret_interner = || tcx.unwrap().interpret_interner.borrow_mut();
|
||||
let tcx = self.tcx.unwrap();
|
||||
let pos = self.position();
|
||||
match usize::decode(self)? {
|
||||
::std::usize::MAX => {
|
||||
let alloc_id = interpret_interner().reserve();
|
||||
let alloc_id = tcx.interpret_interner.reserve();
|
||||
trace!("creating alloc id {:?} at {}", alloc_id, pos);
|
||||
// insert early to allow recursive allocs
|
||||
self.interpret_alloc_cache.insert(pos, alloc_id);
|
||||
@ -296,10 +295,10 @@ impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx
|
||||
let allocation = interpret::Allocation::decode(self)?;
|
||||
trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
|
||||
let allocation = self.tcx.unwrap().intern_const_alloc(allocation);
|
||||
interpret_interner().intern_at_reserved(alloc_id, allocation);
|
||||
tcx.interpret_interner.intern_at_reserved(alloc_id, allocation);
|
||||
|
||||
if let Some(glob) = Option::<DefId>::decode(self)? {
|
||||
interpret_interner().cache(glob, alloc_id);
|
||||
tcx.interpret_interner.cache(glob, alloc_id);
|
||||
}
|
||||
|
||||
Ok(alloc_id)
|
||||
@ -308,7 +307,7 @@ impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx
|
||||
trace!("creating fn alloc id at {}", pos);
|
||||
let instance = ty::Instance::decode(self)?;
|
||||
trace!("decoded fn alloc instance: {:?}", instance);
|
||||
let id = interpret_interner().create_fn_alloc(instance);
|
||||
let id = tcx.interpret_interner.create_fn_alloc(instance);
|
||||
trace!("created fn alloc id: {:?}", id);
|
||||
self.interpret_alloc_cache.insert(pos, id);
|
||||
Ok(id)
|
||||
|
@ -205,14 +205,14 @@ impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx
|
||||
// cache the allocation shorthand now, because the allocation itself might recursively
|
||||
// point to itself.
|
||||
self.interpret_alloc_shorthands.insert(*alloc_id, start);
|
||||
if let Some(alloc) = self.tcx.interpret_interner.borrow().get_alloc(*alloc_id) {
|
||||
if let Some(alloc) = self.tcx.interpret_interner.get_alloc(*alloc_id) {
|
||||
trace!("encoding {:?} with {:#?}", alloc_id, alloc);
|
||||
usize::max_value().encode(self)?;
|
||||
alloc.encode(self)?;
|
||||
self.tcx.interpret_interner.borrow()
|
||||
self.tcx.interpret_interner
|
||||
.get_corresponding_static_def_id(*alloc_id)
|
||||
.encode(self)?;
|
||||
} else if let Some(fn_instance) = self.tcx.interpret_interner.borrow().get_fn(*alloc_id) {
|
||||
} else if let Some(fn_instance) = self.tcx.interpret_interner.get_fn(*alloc_id) {
|
||||
trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
|
||||
(usize::max_value() - 1).encode(self)?;
|
||||
fn_instance.encode(self)?;
|
||||
|
@ -263,7 +263,6 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
|
||||
ConstVal::Value(Value::ByVal(PrimVal::Ptr(p))) => {
|
||||
self.tcx()
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_fn(p.alloc_id)
|
||||
.map(|instance| instance.def_id())
|
||||
},
|
||||
@ -1044,7 +1043,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
||||
..
|
||||
}) => match val {
|
||||
ConstVal::Value(Value::ByVal(PrimVal::Ptr(p))) => {
|
||||
let inst = self.tcx().interpret_interner.borrow().get_fn(p.alloc_id);
|
||||
let inst = self.tcx().interpret_interner.get_fn(p.alloc_id);
|
||||
inst.map_or(false, |inst| {
|
||||
Some(inst.def_id()) == self.tcx().lang_items().box_free_fn()
|
||||
})
|
||||
|
@ -192,7 +192,6 @@ impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
|
||||
assert!(is_array_ptr);
|
||||
let alloc = tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_alloc(ptr.alloc_id)
|
||||
.unwrap();
|
||||
assert_eq!(ptr.offset, 0);
|
||||
@ -568,7 +567,6 @@ fn max_slice_length<'p, 'a: 'p, 'tcx: 'a, I>(
|
||||
if is_array_ptr {
|
||||
let alloc = cx.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_alloc(ptr.alloc_id)
|
||||
.unwrap();
|
||||
max_fixed_len = cmp::max(max_fixed_len, alloc.bytes.len() as u64);
|
||||
@ -958,7 +956,6 @@ fn slice_pat_covered_by_constructor(tcx: TyCtxt, _span: Span,
|
||||
assert!(is_array_ptr);
|
||||
tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_alloc(ptr.alloc_id)
|
||||
.unwrap()
|
||||
.bytes
|
||||
@ -1099,7 +1096,6 @@ fn specialize<'p, 'a: 'p, 'tcx: 'a>(
|
||||
assert!(is_array_ptr);
|
||||
let data_len = cx.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_alloc(ptr.alloc_id)
|
||||
.unwrap()
|
||||
.bytes
|
||||
|
@ -106,7 +106,7 @@ fn eval_body_and_ecx<'a, 'mir, 'tcx>(
|
||||
mir = &mir.promoted[index];
|
||||
}
|
||||
let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
|
||||
let alloc = tcx.interpret_interner.borrow().get_cached(cid.instance.def_id());
|
||||
let alloc = tcx.interpret_interner.get_cached(cid.instance.def_id());
|
||||
let alloc = match alloc {
|
||||
Some(alloc) => {
|
||||
assert!(cid.promoted.is_none());
|
||||
@ -121,7 +121,7 @@ fn eval_body_and_ecx<'a, 'mir, 'tcx>(
|
||||
None,
|
||||
)?;
|
||||
if tcx.is_static(cid.instance.def_id()).is_some() {
|
||||
tcx.interpret_interner.borrow_mut().cache(cid.instance.def_id(), ptr.alloc_id);
|
||||
tcx.interpret_interner.cache(cid.instance.def_id(), ptr.alloc_id);
|
||||
}
|
||||
let span = tcx.def_span(cid.instance.def_id());
|
||||
let internally_mutable = !layout.ty.is_freeze(tcx, param_env, span);
|
||||
@ -343,7 +343,6 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
|
||||
Ok(ecx
|
||||
.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_cached(cid.instance.def_id())
|
||||
.expect("uncached static"))
|
||||
}
|
||||
@ -457,13 +456,13 @@ pub fn const_eval_provider<'a, 'tcx>(
|
||||
let span = tcx.def_span(def_id);
|
||||
|
||||
if tcx.is_foreign_item(def_id) {
|
||||
let id = tcx.interpret_interner.borrow().get_cached(def_id);
|
||||
let id = tcx.interpret_interner.get_cached(def_id);
|
||||
let id = match id {
|
||||
// FIXME: due to caches this shouldn't happen, add some assertions
|
||||
Some(id) => id,
|
||||
None => {
|
||||
let id = tcx.interpret_interner.borrow_mut().reserve();
|
||||
tcx.interpret_interner.borrow_mut().cache(def_id, id);
|
||||
let id = tcx.interpret_interner.reserve();
|
||||
tcx.interpret_interner.cache(def_id, id);
|
||||
id
|
||||
},
|
||||
};
|
||||
|
@ -945,7 +945,6 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
|
||||
let cached = self
|
||||
.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_cached(gid.instance.def_id());
|
||||
if let Some(alloc_id) = cached {
|
||||
let layout = self.layout_of(ty)?;
|
||||
|
@ -75,7 +75,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
|
||||
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> MemoryPointer {
|
||||
let id = self.tcx.interpret_interner.borrow_mut().create_fn_alloc(instance);
|
||||
let id = self.tcx.interpret_interner.create_fn_alloc(instance);
|
||||
MemoryPointer::new(id, 0)
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
align,
|
||||
mutable: false,
|
||||
};
|
||||
let id = self.tcx.interpret_interner.borrow_mut().reserve();
|
||||
let id = self.tcx.interpret_interner.reserve();
|
||||
M::add_lock(self, id);
|
||||
match kind {
|
||||
Some(kind @ MemoryKind::Stack) |
|
||||
@ -186,12 +186,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
"uninitializedstatic".to_string(),
|
||||
format!("{:?}", kind),
|
||||
))
|
||||
} else if self.tcx.interpret_interner.borrow().get_fn(ptr.alloc_id).is_some() {
|
||||
} else if self.tcx.interpret_interner.get_fn(ptr.alloc_id).is_some() {
|
||||
return err!(DeallocatedWrongMemoryKind(
|
||||
"function".to_string(),
|
||||
format!("{:?}", kind),
|
||||
))
|
||||
} else if self.tcx.interpret_interner.borrow().get_alloc(ptr.alloc_id).is_some() {
|
||||
} else if self.tcx.interpret_interner.get_alloc(ptr.alloc_id).is_some() {
|
||||
return err!(DeallocatedWrongMemoryKind(
|
||||
"static".to_string(),
|
||||
format!("{:?}", kind),
|
||||
@ -295,11 +295,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
None => match self.uninitialized_statics.get(&id) {
|
||||
Some(alloc) => Ok(alloc),
|
||||
None => {
|
||||
let int = self.tcx.interpret_interner.borrow();
|
||||
// static alloc?
|
||||
int.get_alloc(id)
|
||||
self.tcx.interpret_interner.get_alloc(id)
|
||||
// no alloc? produce an error
|
||||
.ok_or_else(|| if int.get_fn(id).is_some() {
|
||||
.ok_or_else(|| if self.tcx.interpret_interner.get_fn(id).is_some() {
|
||||
EvalErrorKind::DerefFunctionPointer.into()
|
||||
} else {
|
||||
EvalErrorKind::DanglingPointerDeref.into()
|
||||
@ -320,11 +319,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
None => match self.uninitialized_statics.get_mut(&id) {
|
||||
Some(alloc) => Ok(alloc),
|
||||
None => {
|
||||
let int = self.tcx.interpret_interner.borrow();
|
||||
// no alloc or immutable alloc? produce an error
|
||||
if int.get_alloc(id).is_some() {
|
||||
if self.tcx.interpret_interner.get_alloc(id).is_some() {
|
||||
err!(ModifiedConstantMemory)
|
||||
} else if int.get_fn(id).is_some() {
|
||||
} else if self.tcx.interpret_interner.get_fn(id).is_some() {
|
||||
err!(DerefFunctionPointer)
|
||||
} else {
|
||||
err!(DanglingPointerDeref)
|
||||
@ -341,7 +339,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
debug!("reading fn ptr: {}", ptr.alloc_id);
|
||||
self.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_fn(ptr.alloc_id)
|
||||
.ok_or(EvalErrorKind::ExecuteMemory.into())
|
||||
}
|
||||
@ -376,9 +373,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
Some(a) => (a, " (static in the process of initialization)".to_owned()),
|
||||
None => {
|
||||
// static alloc?
|
||||
match self.tcx.interpret_interner.borrow().get_alloc(id) {
|
||||
match self.tcx.interpret_interner.get_alloc(id) {
|
||||
Some(a) => (a, "(immutable)".to_owned()),
|
||||
None => if let Some(func) = self.tcx.interpret_interner.borrow().get_fn(id) {
|
||||
None => if let Some(func) = self.tcx.interpret_interner.get_fn(id) {
|
||||
trace!("{} {}", msg, func);
|
||||
continue;
|
||||
} else {
|
||||
@ -549,7 +546,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
// ensure llvm knows not to put this into immutable memroy
|
||||
alloc.mutable = mutability == Mutability::Mutable;
|
||||
let alloc = self.tcx.intern_const_alloc(alloc);
|
||||
self.tcx.interpret_interner.borrow_mut().intern_at_reserved(alloc_id, alloc);
|
||||
self.tcx.interpret_interner.intern_at_reserved(alloc_id, alloc);
|
||||
// recurse into inner allocations
|
||||
for &alloc in alloc.relocations.values() {
|
||||
self.mark_inner_allocation_initialized(alloc, mutability)?;
|
||||
|
@ -197,7 +197,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
let alloc = self
|
||||
.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_cached(static_.def_id);
|
||||
let layout = self.layout_of(self.place_ty(mir_place))?;
|
||||
if let Some(alloc) = alloc {
|
||||
|
@ -1117,12 +1117,12 @@ fn collect_miri<'a, 'tcx>(
|
||||
alloc_id: AllocId,
|
||||
output: &mut Vec<MonoItem<'tcx>>,
|
||||
) {
|
||||
if let Some(alloc) = tcx.interpret_interner.borrow().get_alloc(alloc_id) {
|
||||
if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
|
||||
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
|
||||
for &inner in alloc.relocations.values() {
|
||||
collect_miri(tcx, inner, output);
|
||||
}
|
||||
} else if let Some(fn_instance) = tcx.interpret_interner.borrow().get_fn(alloc_id) {
|
||||
} else if let Some(fn_instance) = tcx.interpret_interner.get_fn(alloc_id) {
|
||||
if should_monomorphize_locally(tcx, &fn_instance) {
|
||||
trace!("collecting {:?} with {:#?}", alloc_id, fn_instance);
|
||||
output.push(create_fn_mono_item(fn_instance));
|
||||
|
@ -151,18 +151,17 @@ pub fn primval_to_llvm(cx: &CodegenCx,
|
||||
}
|
||||
},
|
||||
PrimVal::Ptr(ptr) => {
|
||||
if let Some(fn_instance) = cx.tcx.interpret_interner.borrow().get_fn(ptr.alloc_id) {
|
||||
if let Some(fn_instance) = cx.tcx.interpret_interner.get_fn(ptr.alloc_id) {
|
||||
callee::get_fn(cx, fn_instance)
|
||||
} else {
|
||||
let static_ = cx
|
||||
.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_corresponding_static_def_id(ptr.alloc_id);
|
||||
let base_addr = if let Some(def_id) = static_ {
|
||||
assert!(cx.tcx.is_static(def_id).is_some());
|
||||
consts::get_static(cx, def_id)
|
||||
} else if let Some(alloc) = cx.tcx.interpret_interner.borrow()
|
||||
} else if let Some(alloc) = cx.tcx.interpret_interner
|
||||
.get_alloc(ptr.alloc_id) {
|
||||
let init = global_initializer(cx, alloc);
|
||||
if alloc.mutable {
|
||||
@ -239,14 +238,12 @@ pub fn trans_static_initializer<'a, 'tcx>(
|
||||
let alloc_id = cx
|
||||
.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_cached(def_id)
|
||||
.expect("global not cached");
|
||||
|
||||
let alloc = cx
|
||||
.tcx
|
||||
.interpret_interner
|
||||
.borrow()
|
||||
.get_alloc(alloc_id)
|
||||
.expect("miri allocation never successfully created");
|
||||
Ok(global_initializer(cx, alloc))
|
||||
|
Loading…
x
Reference in New Issue
Block a user