2022-07-08 11:08:32 -05:00
|
|
|
//@ignore-windows: Concurrency on Windows is not supported yet.
|
|
|
|
//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0
|
2020-11-01 19:46:42 -06:00
|
|
|
|
|
|
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
2022-06-19 22:33:59 -05:00
|
|
|
use std::thread::spawn;
|
2020-11-01 19:46:42 -06:00
|
|
|
|
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
struct EvilSend<T>(pub T);
|
|
|
|
|
|
|
|
unsafe impl<T> Send for EvilSend<T> {}
|
|
|
|
unsafe impl<T> Sync for EvilSend<T> {}
|
|
|
|
|
|
|
|
static SYNC: AtomicUsize = AtomicUsize::new(0);
|
|
|
|
|
|
|
|
pub fn main() {
|
|
|
|
let mut a = 0u32;
|
|
|
|
let b = &mut a as *mut u32;
|
|
|
|
let c = EvilSend(b);
|
|
|
|
|
2020-11-27 13:26:06 -06:00
|
|
|
// Note: this is scheduler-dependent
|
|
|
|
// the operations need to occur in
|
|
|
|
// order:
|
|
|
|
// 1. store release : 1
|
|
|
|
// 2. load acquire : 1
|
|
|
|
// 3. store relaxed : 2
|
|
|
|
// 4. load acquire : 2
|
2020-11-01 19:46:42 -06:00
|
|
|
unsafe {
|
|
|
|
let j1 = spawn(move || {
|
|
|
|
*c.0 = 1;
|
|
|
|
SYNC.store(1, Ordering::Release);
|
|
|
|
});
|
|
|
|
|
|
|
|
let j2 = spawn(move || {
|
|
|
|
if SYNC.load(Ordering::Acquire) == 1 {
|
|
|
|
SYNC.store(2, Ordering::Relaxed);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let j3 = spawn(move || {
|
|
|
|
if SYNC.load(Ordering::Acquire) == 2 {
|
2022-07-11 06:44:55 -05:00
|
|
|
*c.0 //~ ERROR: Data race detected between Read on thread `<unnamed>` and Write on thread `<unnamed>`
|
2020-11-22 11:28:12 -06:00
|
|
|
} else {
|
2020-11-01 19:46:42 -06:00
|
|
|
0
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
j1.join().unwrap();
|
|
|
|
j2.join().unwrap();
|
|
|
|
j3.join().unwrap();
|
|
|
|
}
|
2020-11-02 07:08:09 -06:00
|
|
|
}
|