diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index c2645a09140..75cbb55239c 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -512,7 +512,7 @@ pub fn no_bytes_init(&self) -> bool { /// Transferring the initialization mask to other allocations. impl Allocation { /// Creates a run-length encoding of the initialization mask. - pub fn compress_uninit_range(&self, src: Pointer, size: Size) -> InitMaskCompressed { + pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed { // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`), // a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from // the source and write it to the destination. Even if we optimized the memory accesses, @@ -526,13 +526,13 @@ pub fn compress_uninit_range(&self, src: Pointer, size: Size) -> InitMaskCo // where each element toggles the state. let mut ranges = smallvec::SmallVec::<[u64; 1]>::new(); - let initial = self.init_mask.get(src.offset); + let initial = self.init_mask.get(range.start); let mut cur_len = 1; let mut cur = initial; - for i in 1..size.bytes() { + for i in 1..range.size.bytes() { // FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit. - if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur { + if self.init_mask.get(range.start + Size::from_bytes(i)) == cur { cur_len += 1; } else { ranges.push(cur_len); @@ -550,24 +550,23 @@ pub fn compress_uninit_range(&self, src: Pointer, size: Size) -> InitMaskCo pub fn mark_compressed_init_range( &mut self, defined: &InitMaskCompressed, - dest: Pointer, - size: Size, + range: AllocRange, repeat: u64, ) { // An optimization where we can just overwrite an entire range of initialization // bits if they are going to be uniformly `1` or `0`. if defined.ranges.len() <= 1 { self.init_mask.set_range_inbounds( - dest.offset, - dest.offset + size * repeat, // `Size` operations + range.start, + range.start + range.size * repeat, // `Size` operations defined.initial, ); return; } for mut j in 0..repeat { - j *= size.bytes(); - j += dest.offset.bytes(); + j *= range.size.bytes(); + j += range.start.bytes(); let mut cur = defined.initial; for range in &defined.ranges { let old_j = j; diff --git a/compiler/rustc_mir/src/interpret/eval_context.rs b/compiler/rustc_mir/src/interpret/eval_context.rs index 648a7abfdc7..8cd459265df 100644 --- a/compiler/rustc_mir/src/interpret/eval_context.rs +++ b/compiler/rustc_mir/src/interpret/eval_context.rs @@ -18,8 +18,8 @@ use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout}; use super::{ - Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, Operand, Place, PlaceTy, - ScalarMaybeUninit, StackPopJump, + Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, MemoryKind, Operand, Place, + PlaceTy, ScalarMaybeUninit, StackPopJump, }; use crate::transform::validate::equal_up_to_regions; use crate::util::storage::AlwaysLiveLocals; @@ -900,7 +900,7 @@ fn deallocate_local(&mut self, local: LocalValue) -> InterpResult // due to the local having ZST type. let ptr = ptr.assert_ptr(); trace!("deallocating local: {:?}", self.memory.dump_alloc(ptr.alloc_id)); - self.memory.deallocate_local(ptr)?; + self.memory.deallocate(ptr, None, MemoryKind::Stack)?; }; Ok(()) } diff --git a/compiler/rustc_mir/src/interpret/memory.rs b/compiler/rustc_mir/src/interpret/memory.rs index cb929c21850..5f719cc1607 100644 --- a/compiler/rustc_mir/src/interpret/memory.rs +++ b/compiler/rustc_mir/src/interpret/memory.rs @@ -276,17 +276,6 @@ pub fn reallocate( Ok(new_ptr) } - /// Deallocate a local, or do nothing if that local has been made into a global. - pub fn deallocate_local(&mut self, ptr: Pointer) -> InterpResult<'tcx> { - // The allocation might be already removed by global interning. - // This can only really happen in the CTFE instance, not in miri. - if self.alloc_map.contains_key(&ptr.alloc_id) { - self.deallocate(ptr, None, MemoryKind::Stack) - } else { - Ok(()) - } - } - pub fn deallocate( &mut self, ptr: Pointer, @@ -1049,7 +1038,7 @@ pub fn copy_repeatedly( num_copies, ); // Prepare a copy of the initialization mask. - let compressed = src_alloc.compress_uninit_range(src, size); + let compressed = src_alloc.compress_uninit_range(alloc_range(src.offset, size)); // This checks relocation edges on the src. let src_bytes = src_alloc .get_bytes_with_uninit_and_ptr(&tcx, alloc_range(src.offset, size)) @@ -1110,7 +1099,11 @@ pub fn copy_repeatedly( } // now fill in all the "init" data - dest_alloc.mark_compressed_init_range(&compressed, dest, size, num_copies); + dest_alloc.mark_compressed_init_range( + &compressed, + alloc_range(dest.offset, size), + num_copies, + ); // copy the relocations to the destination dest_alloc.mark_relocation_range(relocations);