Auto merge of #2484 - RalfJung:rustup, r=RalfJung

rustup; enable extra const UB checks
This commit is contained in:
bors 2022-08-13 13:04:16 +00:00
commit 50ef22af52
5 changed files with 32 additions and 7 deletions

View File

@ -1 +1 @@
20ffea6938b5839c390252e07940b99e3b6a889a 75b7e52e92c3b00fc891b47f5b2efdff0a2be55a

View File

@ -116,4 +116,5 @@
"-Zmir-opt-level=0", "-Zmir-opt-level=0",
"--cfg=miri", "--cfg=miri",
"-Cdebug-assertions=on", "-Cdebug-assertions=on",
"-Zextra-const-ub-checks",
]; ];

View File

@ -0,0 +1,11 @@
#![feature(const_ptr_read)]
const UNALIGNED_READ: () = unsafe {
let x = &[0u8; 4];
let ptr = x.as_ptr().cast::<u32>();
ptr.read(); //~ERROR: evaluation of constant value failed
};
fn main() {
let _x = UNALIGNED_READ;
}

View File

@ -0,0 +1,9 @@
error[E0080]: evaluation of constant value failed
--> $DIR/const-ub-checks.rs:LL:CC
|
LL | ptr.read();
| ^^^^^^^^^^ accessing memory with alignment ALIGN, but alignment ALIGN is required
error: aborting due to previous error
For more information about this error, try `rustc --explain E0080`.

View File

@ -1,5 +1,5 @@
//@ignore-target-windows: Concurrency on Windows is not supported yet. //@ignore-target-windows: Concurrency on Windows is not supported yet.
//@compile-flags: -Zmiri-disable-weak-memory-emulation //@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0
use std::sync::atomic::{fence, AtomicUsize, Ordering}; use std::sync::atomic::{fence, AtomicUsize, Ordering};
use std::thread::spawn; use std::thread::spawn;
@ -10,9 +10,9 @@
unsafe impl<T> Send for EvilSend<T> {} unsafe impl<T> Send for EvilSend<T> {}
unsafe impl<T> Sync for EvilSend<T> {} unsafe impl<T> Sync for EvilSend<T> {}
fn test_fence_sync() {
static SYNC: AtomicUsize = AtomicUsize::new(0); static SYNC: AtomicUsize = AtomicUsize::new(0);
fn test_fence_sync() {
let mut var = 0u32; let mut var = 0u32;
let ptr = &mut var as *mut u32; let ptr = &mut var as *mut u32;
let evil_ptr = EvilSend(ptr); let evil_ptr = EvilSend(ptr);
@ -28,7 +28,7 @@ fn test_fence_sync() {
fence(Ordering::Acquire); fence(Ordering::Acquire);
unsafe { *evil_ptr.0 } unsafe { *evil_ptr.0 }
} else { } else {
0 panic!(); // relies on thread 2 going last
} }
}); });
@ -56,6 +56,8 @@ fn test_multiple_reads() {
} }
pub fn test_rmw_no_block() { pub fn test_rmw_no_block() {
static SYNC: AtomicUsize = AtomicUsize::new(0);
let mut a = 0u32; let mut a = 0u32;
let b = &mut a as *mut u32; let b = &mut a as *mut u32;
let c = EvilSend(b); let c = EvilSend(b);
@ -77,11 +79,13 @@ pub fn test_rmw_no_block() {
j1.join().unwrap(); j1.join().unwrap();
j2.join().unwrap(); j2.join().unwrap();
let v = j3.join().unwrap(); let v = j3.join().unwrap();
assert!(v == 1 || v == 2); assert!(v == 1 || v == 2); // relies on thread 3 going last
} }
} }
pub fn test_simple_release() { pub fn test_simple_release() {
static SYNC: AtomicUsize = AtomicUsize::new(0);
let mut a = 0u32; let mut a = 0u32;
let b = &mut a as *mut u32; let b = &mut a as *mut u32;
let c = EvilSend(b); let c = EvilSend(b);
@ -95,7 +99,7 @@ pub fn test_simple_release() {
let j2 = spawn(move || if SYNC.load(Ordering::Acquire) == 1 { *c.0 } else { 0 }); let j2 = spawn(move || if SYNC.load(Ordering::Acquire) == 1 { *c.0 } else { 0 });
j1.join().unwrap(); j1.join().unwrap();
assert_eq!(j2.join().unwrap(), 1); assert_eq!(j2.join().unwrap(), 1); // relies on thread 2 going last
} }
} }