Rollup merge of #87089 - RalfJung:ctfe-memory-cleanup, r=oli-obk

CTFE engine: small cleanups

I noticed these while preparing a large PR, and figured I'd better send them ahead to not muddy the diff unnecessarily.

- remove remaining use of Pointer in Allocation API (I missed those in https://github.com/rust-lang/rust/pull/85472)
- remove unnecessary deallocate_local hack (this logic does not seem necessary any more)

r? `@oli-obk`
This commit is contained in:
Yuki Okushi 2021-07-13 08:54:39 +09:00 committed by GitHub
commit e46b790b9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 18 additions and 26 deletions

View File

@ -512,7 +512,7 @@ pub fn no_bytes_init(&self) -> bool {
/// Transferring the initialization mask to other allocations.
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a run-length encoding of the initialization mask.
pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed {
pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed {
// Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
// a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
// the source and write it to the destination. Even if we optimized the memory accesses,
@ -526,13 +526,13 @@ pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCo
// where each element toggles the state.
let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
let initial = self.init_mask.get(src.offset);
let initial = self.init_mask.get(range.start);
let mut cur_len = 1;
let mut cur = initial;
for i in 1..size.bytes() {
for i in 1..range.size.bytes() {
// FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit.
if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur {
if self.init_mask.get(range.start + Size::from_bytes(i)) == cur {
cur_len += 1;
} else {
ranges.push(cur_len);
@ -550,24 +550,23 @@ pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCo
pub fn mark_compressed_init_range(
&mut self,
defined: &InitMaskCompressed,
dest: Pointer<Tag>,
size: Size,
range: AllocRange,
repeat: u64,
) {
// An optimization where we can just overwrite an entire range of initialization
// bits if they are going to be uniformly `1` or `0`.
if defined.ranges.len() <= 1 {
self.init_mask.set_range_inbounds(
dest.offset,
dest.offset + size * repeat, // `Size` operations
range.start,
range.start + range.size * repeat, // `Size` operations
defined.initial,
);
return;
}
for mut j in 0..repeat {
j *= size.bytes();
j += dest.offset.bytes();
j *= range.size.bytes();
j += range.start.bytes();
let mut cur = defined.initial;
for range in &defined.ranges {
let old_j = j;

View File

@ -18,8 +18,8 @@
use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
use super::{
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, Operand, Place, PlaceTy,
ScalarMaybeUninit, StackPopJump,
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, MemoryKind, Operand, Place,
PlaceTy, ScalarMaybeUninit, StackPopJump,
};
use crate::transform::validate::equal_up_to_regions;
use crate::util::storage::AlwaysLiveLocals;
@ -900,7 +900,7 @@ fn deallocate_local(&mut self, local: LocalValue<M::PointerTag>) -> InterpResult
// due to the local having ZST type.
let ptr = ptr.assert_ptr();
trace!("deallocating local: {:?}", self.memory.dump_alloc(ptr.alloc_id));
self.memory.deallocate_local(ptr)?;
self.memory.deallocate(ptr, None, MemoryKind::Stack)?;
};
Ok(())
}

View File

@ -276,17 +276,6 @@ pub fn reallocate(
Ok(new_ptr)
}
/// Deallocate a local, or do nothing if that local has been made into a global.
pub fn deallocate_local(&mut self, ptr: Pointer<M::PointerTag>) -> InterpResult<'tcx> {
// The allocation might be already removed by global interning.
// This can only really happen in the CTFE instance, not in miri.
if self.alloc_map.contains_key(&ptr.alloc_id) {
self.deallocate(ptr, None, MemoryKind::Stack)
} else {
Ok(())
}
}
pub fn deallocate(
&mut self,
ptr: Pointer<M::PointerTag>,
@ -1049,7 +1038,7 @@ pub fn copy_repeatedly(
num_copies,
);
// Prepare a copy of the initialization mask.
let compressed = src_alloc.compress_uninit_range(src, size);
let compressed = src_alloc.compress_uninit_range(alloc_range(src.offset, size));
// This checks relocation edges on the src.
let src_bytes = src_alloc
.get_bytes_with_uninit_and_ptr(&tcx, alloc_range(src.offset, size))
@ -1110,7 +1099,11 @@ pub fn copy_repeatedly(
}
// now fill in all the "init" data
dest_alloc.mark_compressed_init_range(&compressed, dest, size, num_copies);
dest_alloc.mark_compressed_init_range(
&compressed,
alloc_range(dest.offset, size),
num_copies,
);
// copy the relocations to the destination
dest_alloc.mark_relocation_range(relocations);