2022-07-17 20:54:10 -05:00
|
|
|
//@ignore-target-windows: Concurrency on Windows is not supported yet.
|
2022-07-08 11:08:32 -05:00
|
|
|
//@compile-flags: -Zmiri-ignore-leaks
|
2022-05-25 15:10:00 -05:00
|
|
|
|
|
|
|
// Tests operations not perfomable through C++'s atomic API
|
|
|
|
// but doable in unsafe Rust which we think *should* be fine.
|
|
|
|
// Nonetheless they may be determined as inconsistent with the
|
|
|
|
// memory model in the future.
|
|
|
|
|
|
|
|
#![feature(atomic_from_mut)]
|
|
|
|
|
|
|
|
use std::sync::atomic::AtomicU32;
|
|
|
|
use std::sync::atomic::Ordering::*;
|
|
|
|
use std::thread::spawn;
|
|
|
|
|
|
|
|
fn static_atomic(val: u32) -> &'static AtomicU32 {
|
|
|
|
let ret = Box::leak(Box::new(AtomicU32::new(val)));
|
|
|
|
ret
|
|
|
|
}
|
|
|
|
|
2022-05-29 16:53:57 -05:00
|
|
|
// We allow perfectly overlapping non-atomic and atomic reads to race
|
2022-05-25 15:10:00 -05:00
|
|
|
fn racing_mixed_atomicity_read() {
|
|
|
|
let x = static_atomic(0);
|
|
|
|
x.store(42, Relaxed);
|
|
|
|
|
|
|
|
let j1 = spawn(move || x.load(Relaxed));
|
|
|
|
|
|
|
|
let j2 = spawn(move || {
|
|
|
|
let x_ptr = x as *const AtomicU32 as *const u32;
|
2022-07-24 07:48:58 -05:00
|
|
|
unsafe { x_ptr.read() }
|
2022-05-25 15:10:00 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
let r1 = j1.join().unwrap();
|
|
|
|
let r2 = j2.join().unwrap();
|
|
|
|
|
|
|
|
assert_eq!(r1, 42);
|
|
|
|
assert_eq!(r2, 42);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn main() {
|
|
|
|
racing_mixed_atomicity_read();
|
|
|
|
}
|