From b5bc4e1b0c2e96d91e57b450051f29ab454a1c5b Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 20 Jun 2022 22:02:02 -0700 Subject: [PATCH] add tests that fail due to SRW protectors also do more iterations of weak mem consistency, since now that is no longer the slowest test ;) --- tests/pass/0concurrency_arc_drop.rs | 19 ++++++++++ tests/pass/0weak_memory_consistency.rs | 2 +- .../stacked-borrows/interior_mutability.rs | 35 ++++++++++++++++++- 3 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 tests/pass/0concurrency_arc_drop.rs diff --git a/tests/pass/0concurrency_arc_drop.rs b/tests/pass/0concurrency_arc_drop.rs new file mode 100644 index 00000000000..b5192cd4214 --- /dev/null +++ b/tests/pass/0concurrency_arc_drop.rs @@ -0,0 +1,19 @@ +// ignore-windows: Concurrency on Windows is not supported yet. +use std::sync::Arc; +use std::thread; + +/// Test for Arc::drop bug (https://github.com/rust-lang/rust/issues/55005) +fn main() { + // The bug seems to take up to 700 iterations to reproduce with most seeds (tested 0-9). + for _ in 0..700 { + let arc_1 = Arc::new(()); + let arc_2 = arc_1.clone(); + let thread = thread::spawn(|| drop(arc_2)); + let mut i = 0; + while i < 256 { + i += 1; + } + drop(arc_1); + thread.join().unwrap(); + } +} diff --git a/tests/pass/0weak_memory_consistency.rs b/tests/pass/0weak_memory_consistency.rs index fc9dce0c986..601d8547f8c 100644 --- a/tests/pass/0weak_memory_consistency.rs +++ b/tests/pass/0weak_memory_consistency.rs @@ -214,7 +214,7 @@ fn test_single_thread() { } pub fn main() { - for _ in 0..50 { + for _ in 0..75 { test_single_thread(); test_mixed_access(); test_load_buffering_acq_rel(); diff --git a/tests/pass/stacked-borrows/interior_mutability.rs b/tests/pass/stacked-borrows/interior_mutability.rs index 1ac9706b525..9ee8af45aef 100644 --- a/tests/pass/stacked-borrows/interior_mutability.rs +++ b/tests/pass/stacked-borrows/interior_mutability.rs @@ -1,11 +1,14 @@ +// compile-flags: -Zmiri-tag-raw-pointers use std::cell::{Cell, RefCell, UnsafeCell}; -use std::mem::MaybeUninit; +use std::mem::{self, MaybeUninit}; fn main() { aliasing_mut_and_shr(); aliasing_frz_and_shr(); into_interior_mutability(); unsafe_cell_2phase(); + unsafe_cell_deallocate(); + unsafe_cell_invalidate(); } fn aliasing_mut_and_shr() { @@ -67,3 +70,33 @@ fn unsafe_cell_2phase() { let _val = (*x2.get()).get(0); } } + +/// Make sure we can deallocate an UnsafeCell that was passed to an active fn call. +/// (This is the fix for https://github.com/rust-lang/rust/issues/55005.) +fn unsafe_cell_deallocate() { + fn f(x: &UnsafeCell) { + let b: Box = unsafe { Box::from_raw(x as *const _ as *mut i32) }; + drop(b) + } + + let b = Box::new(0i32); + f(unsafe { mem::transmute(Box::into_raw(b)) }); +} + +/// As a side-effect of the above, we also allow this -- at least for now. +fn unsafe_cell_invalidate() { + fn f(_x: &UnsafeCell, y: *mut i32) { + // Writing to y invalidates x, but that is okay. + unsafe { + *y += 1; + } + } + + let mut x = 0i32; + let raw1 = &mut x as *mut _; + let ref1 = unsafe { &mut *raw1 }; + let raw2 = ref1 as *mut _; + // Now the borrow stack is: raw1, ref2, raw2. + // So using raw1 invalidates raw2. + f(unsafe { mem::transmute(raw2) }, raw1); +}