diff --git a/src/concurrency/weak_memory.rs b/src/concurrency/weak_memory.rs index 888f9edceb3..3c692783d14 100644 --- a/src/concurrency/weak_memory.rs +++ b/src/concurrency/weak_memory.rs @@ -135,20 +135,22 @@ impl StoreBufferAlloc { /// When a non-atomic access happens on a location that has been atomically accessed /// before without data race, we can determine that the non-atomic access fully happens - /// before all the prior atomic accesses so the location no longer needs to exhibit + /// after all the prior atomic accesses so the location no longer needs to exhibit /// any weak memory behaviours until further atomic accesses. - pub fn destroy_atomicity<'tcx>(&self, range: AllocRange) { - let mut buffers = self.store_buffers.borrow_mut(); - let access_type = buffers.access_type(range); - match access_type { - AccessType::PerfectlyOverlapping(pos) => { - buffers.remove_from_pos(pos); - } - AccessType::ImperfectlyOverlapping(pos_range) => { - buffers.remove_pos_range(pos_range); - } - AccessType::Empty(_) => { - // Do nothing + pub fn memory_accessed<'tcx>(&self, range: AllocRange, global: &GlobalState) { + if !global.ongoing_atomic_access() { + let mut buffers = self.store_buffers.borrow_mut(); + let access_type = buffers.access_type(range); + match access_type { + AccessType::PerfectlyOverlapping(pos) => { + buffers.remove_from_pos(pos); + } + AccessType::ImperfectlyOverlapping(pos_range) => { + buffers.remove_pos_range(pos_range); + } + AccessType::Empty(_) => { + // The range had no weak behaivours attached, do nothing + } } } } diff --git a/src/machine.rs b/src/machine.rs index b8cb8908700..5ee2d9a9abd 100644 --- a/src/machine.rs +++ b/src/machine.rs @@ -741,12 +741,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { )?; } if let Some(weak_memory) = &alloc_extra.weak_memory { - if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() { - // This is a non-atomic access. And if we are accessing a previously atomically - // accessed location without racing with them, then the location no longer - // exhibits weak-memory behaviors until a fresh atomic access happens. - weak_memory.destroy_atomicity(range); - } + weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap()); } Ok(()) } @@ -772,9 +767,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> { )?; } if let Some(weak_memory) = &alloc_extra.weak_memory { - if !machine.data_race.as_ref().unwrap().ongoing_atomic_access() { - weak_memory.destroy_atomicity(range); - } + weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap()); } Ok(()) }