2022-08-13 08:03:30 -05:00
|
|
|
//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0
|
2020-11-02 06:40:12 -06:00
|
|
|
|
2022-06-20 17:30:34 -05:00
|
|
|
use std::sync::atomic::{fence, AtomicUsize, Ordering};
|
2020-11-01 19:46:42 -06:00
|
|
|
use std::thread::spawn;
|
|
|
|
|
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
struct EvilSend<T>(pub T);
|
|
|
|
|
|
|
|
unsafe impl<T> Send for EvilSend<T> {}
|
|
|
|
unsafe impl<T> Sync for EvilSend<T> {}
|
|
|
|
|
|
|
|
fn test_fence_sync() {
|
2022-08-13 08:03:30 -05:00
|
|
|
static SYNC: AtomicUsize = AtomicUsize::new(0);
|
|
|
|
|
2020-11-01 19:46:42 -06:00
|
|
|
let mut var = 0u32;
|
|
|
|
let ptr = &mut var as *mut u32;
|
|
|
|
let evil_ptr = EvilSend(ptr);
|
2022-04-30 12:40:35 -05:00
|
|
|
|
2020-11-01 19:46:42 -06:00
|
|
|
let j1 = spawn(move || {
|
2022-06-25 22:30:29 -05:00
|
|
|
unsafe { *evil_ptr.0 = 1 };
|
2020-11-01 19:46:42 -06:00
|
|
|
fence(Ordering::Release);
|
2022-04-30 12:40:35 -05:00
|
|
|
SYNC.store(1, Ordering::Relaxed)
|
2020-11-01 19:46:42 -06:00
|
|
|
});
|
|
|
|
|
|
|
|
let j2 = spawn(move || {
|
|
|
|
if SYNC.load(Ordering::Relaxed) == 1 {
|
|
|
|
fence(Ordering::Acquire);
|
|
|
|
unsafe { *evil_ptr.0 }
|
2020-11-22 11:28:12 -06:00
|
|
|
} else {
|
2022-08-13 08:03:30 -05:00
|
|
|
panic!(); // relies on thread 2 going last
|
2020-11-01 19:46:42 -06:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
j1.join().unwrap();
|
|
|
|
j2.join().unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
fn test_multiple_reads() {
|
|
|
|
let mut var = 42u32;
|
|
|
|
let ptr = &mut var as *mut u32;
|
|
|
|
let evil_ptr = EvilSend(ptr);
|
|
|
|
|
2022-06-20 17:30:34 -05:00
|
|
|
let j1 = spawn(move || unsafe { *evil_ptr.0 });
|
|
|
|
let j2 = spawn(move || unsafe { *evil_ptr.0 });
|
|
|
|
let j3 = spawn(move || unsafe { *evil_ptr.0 });
|
|
|
|
let j4 = spawn(move || unsafe { *evil_ptr.0 });
|
2020-11-01 19:46:42 -06:00
|
|
|
|
|
|
|
assert_eq!(j1.join().unwrap(), 42);
|
|
|
|
assert_eq!(j2.join().unwrap(), 42);
|
|
|
|
assert_eq!(j3.join().unwrap(), 42);
|
|
|
|
assert_eq!(j4.join().unwrap(), 42);
|
|
|
|
|
|
|
|
var = 10;
|
|
|
|
assert_eq!(var, 10);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn test_rmw_no_block() {
|
2022-08-13 08:03:30 -05:00
|
|
|
static SYNC: AtomicUsize = AtomicUsize::new(0);
|
|
|
|
|
2020-11-01 19:46:42 -06:00
|
|
|
let mut a = 0u32;
|
|
|
|
let b = &mut a as *mut u32;
|
|
|
|
let c = EvilSend(b);
|
|
|
|
|
|
|
|
unsafe {
|
|
|
|
let j1 = spawn(move || {
|
|
|
|
*c.0 = 1;
|
|
|
|
SYNC.store(1, Ordering::Release);
|
|
|
|
});
|
|
|
|
|
|
|
|
let j2 = spawn(move || {
|
|
|
|
if SYNC.swap(2, Ordering::Relaxed) == 1 {
|
|
|
|
//No op, blocking store removed
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2022-06-20 17:30:34 -05:00
|
|
|
let j3 = spawn(move || if SYNC.load(Ordering::Acquire) == 2 { *c.0 } else { 0 });
|
2020-11-01 19:46:42 -06:00
|
|
|
|
|
|
|
j1.join().unwrap();
|
|
|
|
j2.join().unwrap();
|
|
|
|
let v = j3.join().unwrap();
|
2022-08-13 08:03:30 -05:00
|
|
|
assert!(v == 1 || v == 2); // relies on thread 3 going last
|
2020-11-01 19:46:42 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-06 10:58:32 -06:00
|
|
|
pub fn test_simple_release() {
|
2022-08-13 08:03:30 -05:00
|
|
|
static SYNC: AtomicUsize = AtomicUsize::new(0);
|
|
|
|
|
2020-11-01 19:46:42 -06:00
|
|
|
let mut a = 0u32;
|
|
|
|
let b = &mut a as *mut u32;
|
|
|
|
let c = EvilSend(b);
|
|
|
|
|
|
|
|
unsafe {
|
|
|
|
let j1 = spawn(move || {
|
|
|
|
*c.0 = 1;
|
|
|
|
SYNC.store(1, Ordering::Release);
|
|
|
|
});
|
|
|
|
|
2022-06-20 17:30:34 -05:00
|
|
|
let j2 = spawn(move || if SYNC.load(Ordering::Acquire) == 1 { *c.0 } else { 0 });
|
2020-11-01 19:46:42 -06:00
|
|
|
|
|
|
|
j1.join().unwrap();
|
2022-08-13 08:03:30 -05:00
|
|
|
assert_eq!(j2.join().unwrap(), 1); // relies on thread 2 going last
|
2020-11-01 19:46:42 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn main() {
|
|
|
|
test_fence_sync();
|
|
|
|
test_multiple_reads();
|
|
|
|
test_rmw_no_block();
|
2020-12-06 10:58:32 -06:00
|
|
|
test_simple_release();
|
2020-11-02 07:08:09 -06:00
|
|
|
}
|