Move logic out of machine.rs
This commit is contained in:
parent
a7c832b04a
commit
ceb173d647
@ -135,20 +135,22 @@ impl StoreBufferAlloc {
|
|||||||
|
|
||||||
/// When a non-atomic access happens on a location that has been atomically accessed
|
/// When a non-atomic access happens on a location that has been atomically accessed
|
||||||
/// before without data race, we can determine that the non-atomic access fully happens
|
/// before without data race, we can determine that the non-atomic access fully happens
|
||||||
/// before all the prior atomic accesses so the location no longer needs to exhibit
|
/// after all the prior atomic accesses so the location no longer needs to exhibit
|
||||||
/// any weak memory behaviours until further atomic accesses.
|
/// any weak memory behaviours until further atomic accesses.
|
||||||
pub fn destroy_atomicity<'tcx>(&self, range: AllocRange) {
|
pub fn memory_accessed<'tcx>(&self, range: AllocRange, global: &GlobalState) {
|
||||||
let mut buffers = self.store_buffers.borrow_mut();
|
if !global.ongoing_atomic_access() {
|
||||||
let access_type = buffers.access_type(range);
|
let mut buffers = self.store_buffers.borrow_mut();
|
||||||
match access_type {
|
let access_type = buffers.access_type(range);
|
||||||
AccessType::PerfectlyOverlapping(pos) => {
|
match access_type {
|
||||||
buffers.remove_from_pos(pos);
|
AccessType::PerfectlyOverlapping(pos) => {
|
||||||
}
|
buffers.remove_from_pos(pos);
|
||||||
AccessType::ImperfectlyOverlapping(pos_range) => {
|
}
|
||||||
buffers.remove_pos_range(pos_range);
|
AccessType::ImperfectlyOverlapping(pos_range) => {
|
||||||
}
|
buffers.remove_pos_range(pos_range);
|
||||||
AccessType::Empty(_) => {
|
}
|
||||||
// Do nothing
|
AccessType::Empty(_) => {
|
||||||
|
// The range had no weak behaivours attached, do nothing
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -741,12 +741,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if let Some(weak_memory) = &alloc_extra.weak_memory {
|
if let Some(weak_memory) = &alloc_extra.weak_memory {
|
||||||
if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
|
weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
|
||||||
// This is a non-atomic access. And if we are accessing a previously atomically
|
|
||||||
// accessed location without racing with them, then the location no longer
|
|
||||||
// exhibits weak-memory behaviors until a fresh atomic access happens.
|
|
||||||
weak_memory.destroy_atomicity(range);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -772,9 +767,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if let Some(weak_memory) = &alloc_extra.weak_memory {
|
if let Some(weak_memory) = &alloc_extra.weak_memory {
|
||||||
if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() {
|
weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap());
|
||||||
weak_memory.destroy_atomicity(range);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user