Move relocation range copies into allocation

This commit is contained in:
Andreas Molzer 2019-08-30 04:17:18 +02:00
parent 6fe31fefd8
commit bee2d3748e
2 changed files with 53 additions and 27 deletions

View File

@ -693,6 +693,56 @@ fn deref_mut(&mut self) -> &mut Self::Target {
}
}
/// A partial, owned list of relocations to transfer into another allocation.
pub struct AllocationRelocations<Tag> {
relative_relocations: Vec<(Size, (Tag, AllocId))>,
}
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
pub fn prepare_relocation_copy(
&self,
cx: &impl HasDataLayout,
src: Pointer<Tag>,
size: Size,
dest: Pointer<Tag>,
length: u64,
) -> AllocationRelocations<Tag> {
let relocations = self.get_relocations(cx, src, size);
if relocations.is_empty() {
return AllocationRelocations { relative_relocations: Vec::new() };
}
let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));
for i in 0..length {
new_relocations.extend(
relocations
.iter()
.map(|&(offset, reloc)| {
// compute offset for current repetition
let dest_offset = dest.offset + (i * size);
(
// shift offsets from source allocation to destination allocation
offset + dest_offset - src.offset,
reloc,
)
})
);
}
AllocationRelocations {
relative_relocations: new_relocations,
}
}
pub fn mark_relocation_range(
&mut self,
relocations: AllocationRelocations<Tag>,
) {
self.relocations.insert_presorted(relocations.relative_relocations);
}
}
////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
////////////////////////////////////////////////////////////////////////////////

View File

@ -808,32 +808,8 @@ pub fn copy_repeatedly(
// since we don't want to keep any relocations at the target.
// (`get_bytes_with_undef_and_ptr` below checks that there are no
// relocations overlapping the edges; those would not be handled correctly).
let relocations = {
let relocations = self.get(src.alloc_id)?.get_relocations(self, src, size);
if relocations.is_empty() {
// nothing to copy, ignore even the `length` loop
Vec::new()
} else {
let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));
for i in 0..length {
new_relocations.extend(
relocations
.iter()
.map(|&(offset, reloc)| {
// compute offset for current repetition
let dest_offset = dest.offset + (i * size);
(
// shift offsets from source allocation to destination allocation
offset + dest_offset - src.offset,
reloc,
)
})
);
}
new_relocations
}
};
let relocations = self.get(src.alloc_id)?
.prepare_relocation_copy(self, src, size, dest, length);
let tcx = self.tcx.tcx;
@ -880,7 +856,7 @@ pub fn copy_repeatedly(
// copy definedness to the destination
self.copy_undef_mask(src, dest, size, length)?;
// copy the relocations to the destination
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);
self.get_mut(dest.alloc_id)?.mark_relocation_range(relocations);
Ok(())
}