rust/tests/pass/atomic.rs

192 lines
6.2 KiB
Rust
Raw Normal View History

2022-07-08 11:08:32 -05:00
//@compile-flags: -Zmiri-strict-provenance
2022-07-06 20:28:47 -05:00
#![feature(strict_provenance, strict_provenance_atomic_ptr)]
use std::sync::atomic::{
compiler_fence, fence, AtomicBool, AtomicIsize, AtomicPtr, AtomicU64, Ordering::*,
};
2017-03-14 07:05:51 -05:00
fn main() {
2019-08-02 12:50:54 -05:00
atomic_bool();
2022-06-29 08:31:11 -05:00
atomic_all_ops();
2019-08-02 13:08:04 -05:00
atomic_u64();
2019-10-05 06:09:23 -05:00
atomic_fences();
2022-07-06 20:28:47 -05:00
atomic_ptr();
2021-01-25 04:41:03 -06:00
weak_sometimes_fails();
2019-08-02 12:50:54 -05:00
}
fn atomic_bool() {
static mut ATOMIC: AtomicBool = AtomicBool::new(false);
unsafe {
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.store(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_or(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_and(false, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
ATOMIC.fetch_nand(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), true);
ATOMIC.fetch_xor(true, SeqCst);
assert_eq!(*ATOMIC.get_mut(), false);
}
}
2022-06-29 08:31:11 -05:00
// There isn't a trait to use to make this generic, so just use a macro
macro_rules! compare_exchange_weak_loop {
($atom:expr, $from:expr, $to:expr, $succ_order:expr, $fail_order:expr) => {
loop {
match $atom.compare_exchange_weak($from, $to, $succ_order, $fail_order) {
Ok(n) => {
assert_eq!(n, $from);
break;
}
Err(n) => assert_eq!(n, $from),
}
}
};
}
2022-06-29 08:31:11 -05:00
/// Make sure we can handle all the intrinsics
fn atomic_all_ops() {
2019-08-02 12:50:54 -05:00
static ATOMIC: AtomicIsize = AtomicIsize::new(0);
2022-06-29 08:31:11 -05:00
static ATOMIC_UNSIGNED: AtomicU64 = AtomicU64::new(0);
2022-07-18 07:20:06 -05:00
let load_orders = [Relaxed, Acquire, SeqCst];
let stored_orders = [Relaxed, Release, SeqCst];
let rmw_orders = [Relaxed, Release, Acquire, AcqRel, SeqCst];
2022-06-29 08:31:11 -05:00
// loads
2022-07-18 07:20:06 -05:00
for o in load_orders {
2022-06-29 08:31:11 -05:00
ATOMIC.load(o);
}
2019-08-02 12:50:54 -05:00
2022-06-29 08:31:11 -05:00
// stores
2022-07-18 07:20:06 -05:00
for o in stored_orders {
2022-06-29 08:31:11 -05:00
ATOMIC.store(1, o);
}
// most RMWs
2022-07-18 07:20:06 -05:00
for o in rmw_orders {
2022-06-29 08:31:11 -05:00
ATOMIC.swap(0, o);
ATOMIC.fetch_or(0, o);
ATOMIC.fetch_xor(0, o);
ATOMIC.fetch_and(0, o);
ATOMIC.fetch_nand(0, o);
ATOMIC.fetch_add(0, o);
ATOMIC.fetch_sub(0, o);
ATOMIC.fetch_min(0, o);
ATOMIC.fetch_max(0, o);
ATOMIC_UNSIGNED.fetch_min(0, o);
ATOMIC_UNSIGNED.fetch_max(0, o);
}
2022-07-18 07:20:06 -05:00
// RMWs with separate failure ordering
for o1 in rmw_orders {
for o2 in load_orders {
let _res = ATOMIC.compare_exchange(0, 0, o1, o2);
let _res = ATOMIC.compare_exchange_weak(0, 0, o1, o2);
}
}
2017-03-14 07:05:51 -05:00
}
2019-08-02 13:08:04 -05:00
fn atomic_u64() {
static ATOMIC: AtomicU64 = AtomicU64::new(0);
ATOMIC.store(1, SeqCst);
assert_eq!(ATOMIC.compare_exchange(0, 0x100, AcqRel, Acquire), Err(1));
2022-07-18 07:20:06 -05:00
assert_eq!(ATOMIC.compare_exchange(0, 1, Release, Relaxed), Err(1));
assert_eq!(ATOMIC.compare_exchange(1, 0, AcqRel, Relaxed), Ok(1));
assert_eq!(ATOMIC.compare_exchange(0, 1, Relaxed, Relaxed), Ok(0));
compare_exchange_weak_loop!(ATOMIC, 1, 0x100, AcqRel, Acquire);
2022-07-18 07:20:06 -05:00
assert_eq!(ATOMIC.compare_exchange_weak(0, 2, Acquire, Relaxed), Err(0x100));
assert_eq!(ATOMIC.compare_exchange_weak(0, 1, Release, Relaxed), Err(0x100));
2019-08-02 13:08:04 -05:00
assert_eq!(ATOMIC.load(Relaxed), 0x100);
assert_eq!(ATOMIC.fetch_max(0x10, SeqCst), 0x100);
assert_eq!(ATOMIC.fetch_max(0x100, SeqCst), 0x100);
assert_eq!(ATOMIC.fetch_max(0x1000, SeqCst), 0x100);
assert_eq!(ATOMIC.fetch_max(0x1000, SeqCst), 0x1000);
assert_eq!(ATOMIC.fetch_max(0x2000, SeqCst), 0x1000);
assert_eq!(ATOMIC.fetch_max(0x2000, SeqCst), 0x2000);
assert_eq!(ATOMIC.fetch_min(0x2000, SeqCst), 0x2000);
assert_eq!(ATOMIC.fetch_min(0x2000, SeqCst), 0x2000);
assert_eq!(ATOMIC.fetch_min(0x1000, SeqCst), 0x2000);
assert_eq!(ATOMIC.fetch_min(0x1000, SeqCst), 0x1000);
assert_eq!(ATOMIC.fetch_min(0x100, SeqCst), 0x1000);
assert_eq!(ATOMIC.fetch_min(0x10, SeqCst), 0x100);
2019-08-02 13:08:04 -05:00
}
2019-10-05 06:09:23 -05:00
fn atomic_fences() {
fence(SeqCst);
fence(Release);
fence(Acquire);
2019-10-09 03:29:08 -05:00
fence(AcqRel);
2020-01-25 13:48:26 -06:00
compiler_fence(SeqCst);
compiler_fence(Release);
compiler_fence(Acquire);
compiler_fence(AcqRel);
2019-10-05 06:09:23 -05:00
}
2021-01-25 04:41:03 -06:00
2022-07-06 20:28:47 -05:00
fn atomic_ptr() {
use std::ptr;
let array: Vec<i32> = (0..100).into_iter().collect(); // a target to point to, to test provenance things
let x = array.as_ptr() as *mut i32;
let ptr = AtomicPtr::<i32>::new(ptr::null_mut());
assert!(ptr.load(Relaxed).addr() == 0);
ptr.store(ptr::invalid_mut(13), SeqCst);
assert!(ptr.swap(x, Relaxed).addr() == 13);
unsafe { assert!(*ptr.load(Acquire) == 0) };
// comparison ignores provenance
assert_eq!(
ptr.compare_exchange(
(&mut 0 as *mut i32).with_addr(x.addr()),
ptr::invalid_mut(0),
SeqCst,
SeqCst
)
.unwrap()
.addr(),
x.addr(),
);
assert_eq!(
ptr.compare_exchange(
(&mut 0 as *mut i32).with_addr(x.addr()),
ptr::invalid_mut(0),
SeqCst,
SeqCst
)
.unwrap_err()
.addr(),
0,
);
ptr.store(x, Relaxed);
assert_eq!(ptr.fetch_ptr_add(13, AcqRel).addr(), x.addr());
unsafe { assert_eq!(*ptr.load(SeqCst), 13) }; // points to index 13 now
assert_eq!(ptr.fetch_ptr_sub(4, AcqRel).addr(), x.addr() + 13 * 4);
unsafe { assert_eq!(*ptr.load(SeqCst), 9) };
assert_eq!(ptr.fetch_or(3, AcqRel).addr(), x.addr() + 9 * 4); // ptr is 4-aligned, so set the last 2 bits
assert_eq!(ptr.fetch_and(!3, AcqRel).addr(), (x.addr() + 9 * 4) | 3); // and unset them again
unsafe { assert_eq!(*ptr.load(SeqCst), 9) };
assert_eq!(ptr.fetch_xor(0xdeadbeef, AcqRel).addr(), x.addr() + 9 * 4);
assert_eq!(ptr.fetch_xor(0xdeadbeef, AcqRel).addr(), (x.addr() + 9 * 4) ^ 0xdeadbeef);
unsafe { assert_eq!(*ptr.load(SeqCst), 9) }; // after XORing twice with the same thing, we get our ptr back
}
2021-01-25 04:41:03 -06:00
fn weak_sometimes_fails() {
2021-01-25 04:52:55 -06:00
let atomic = AtomicBool::new(false);
let tries = 100;
2021-01-25 04:41:03 -06:00
for _ in 0..tries {
2021-01-25 04:52:55 -06:00
let cur = atomic.load(Relaxed);
2021-01-25 04:41:03 -06:00
// Try (weakly) to flip the flag.
2021-01-25 04:52:55 -06:00
if atomic.compare_exchange_weak(cur, !cur, Relaxed, Relaxed).is_err() {
// We failed, so return and skip the panic.
2021-01-25 04:41:03 -06:00
return;
}
}
panic!("compare_exchange_weak succeeded {} tries in a row", tries);
}