Use it in the library, and InstSimplify it away in the easy places

This commit is contained in:
Scott McMurray 2024-04-11 17:01:27 -07:00
parent 4f4442655e
commit de64ff76f8
11 changed files with 328 additions and 28 deletions

@ -9,7 +9,7 @@ use crate::MemFlags;
use rustc_hir as hir;
use rustc_middle::mir;
use rustc_middle::mir::Operand;
use rustc_middle::mir::{AggregateKind, Operand};
use rustc_middle::ty::cast::{CastTy, IntTy};
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, adjustment::PointerCoercion, Instance, Ty, TyCtxt};
@ -720,6 +720,24 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandRef { val: OperandValue::Immediate(static_), layout }
}
mir::Rvalue::Use(ref operand) => self.codegen_operand(bx, operand),
mir::Rvalue::Aggregate(box mir::AggregateKind::RawPtr(..), ref fields) => {
let ty = rvalue.ty(self.mir, self.cx.tcx());
let layout = self.cx.layout_of(self.monomorphize(ty));
let [data, meta] = &*fields.raw else {
bug!("RawPtr fields: {fields:?}");
};
let data = self.codegen_operand(bx, data);
let meta = self.codegen_operand(bx, meta);
match (data.val, meta.val) {
(p @ OperandValue::Immediate(_), OperandValue::ZeroSized) => {
OperandRef { val: p, layout }
}
(OperandValue::Immediate(p), OperandValue::Immediate(m)) => {
OperandRef { val: OperandValue::Pair(p, m), layout }
}
_ => bug!("RawPtr operands {data:?} {meta:?}"),
}
}
mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) => {
// According to `rvalue_creates_operand`, only ZST
// aggregate rvalues are allowed to be operands.
@ -1032,6 +1050,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mir::Rvalue::ThreadLocalRef(_) |
mir::Rvalue::Use(..) => // (*)
true,
// This always produces a `ty::RawPtr`, so will be Immediate or Pair
mir::Rvalue::Aggregate(box AggregateKind::RawPtr(..), ..) => true,
mir::Rvalue::Repeat(..) |
mir::Rvalue::Aggregate(..) => {
let ty = rvalue.ty(self.mir, self.cx.tcx());

@ -603,6 +603,17 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(self.read_immediate(op)?.to_scalar())
}
pub fn read_mem_place_meta(
&self,
op: &impl Readable<'tcx, M::Provenance>,
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>> {
Ok(if op.layout().is_zst() {
MemPlaceMeta::None
} else {
MemPlaceMeta::Meta(self.read_scalar(op)?)
})
}
// Pointer-sized reads are fairly common and need target layout access, so we wrap them in
// convenience functions.

@ -9,7 +9,7 @@ use rustc_middle::mir;
use rustc_middle::ty::layout::LayoutOf;
use rustc_target::abi::{FieldIdx, FIRST_VARIANT};
use super::{ImmTy, InterpCx, InterpResult, Machine, PlaceTy, Projectable, Scalar};
use super::{ImmTy, Immediate, InterpCx, InterpResult, Machine, PlaceTy, Projectable, Scalar};
use crate::util;
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
@ -303,6 +303,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let variant_dest = self.project_downcast(dest, variant_index)?;
(variant_index, variant_dest, active_field_index)
}
mir::AggregateKind::RawPtr(..) => {
// Trying to `project_field` into pointers tends not to work,
// so build the `Immediate` from the parts directly.
let [data, meta] = &operands.raw else {
bug!("{kind:?} should have 2 operands, had {operands:?}");
};
let data = self.eval_operand(data, None)?;
let data = self.read_pointer(&data)?;
let meta = self.eval_operand(meta, None)?;
let meta = self.read_mem_place_meta(&meta)?;
let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
self.copy_op(&ptr, dest)?;
return Ok(());
}
_ => (FIRST_VARIANT, dest.clone(), None),
};
if active_field_index.is_some() {

@ -2786,6 +2786,7 @@ pub unsafe fn vtable_align(_ptr: *const ()) -> usize {
/// change the possible layouts of pointers.
#[rustc_nounwind]
#[unstable(feature = "core_intrinsics", issue = "none")]
#[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
#[rustc_intrinsic]
#[rustc_intrinsic_must_be_overridden]
#[cfg(not(bootstrap))]

@ -2,6 +2,8 @@
use crate::fmt;
use crate::hash::{Hash, Hasher};
#[cfg(not(bootstrap))]
use crate::intrinsics::aggregate_raw_ptr;
use crate::marker::Freeze;
/// Provides the pointer metadata type of any pointed-to type.
@ -113,10 +115,17 @@ pub const fn from_raw_parts<T: ?Sized>(
data_pointer: *const (),
metadata: <T as Pointee>::Metadata,
) -> *const T {
// SAFETY: Accessing the value from the `PtrRepr` union is safe since *const T
// and PtrComponents<T> have the same memory layouts. Only std can make this
// guarantee.
unsafe { PtrRepr { components: PtrComponents { data_pointer, metadata } }.const_ptr }
#[cfg(bootstrap)]
{
// SAFETY: Accessing the value from the `PtrRepr` union is safe since *const T
// and PtrComponents<T> have the same memory layouts. Only std can make this
// guarantee.
unsafe { PtrRepr { components: PtrComponents { data_pointer, metadata } }.const_ptr }
}
#[cfg(not(bootstrap))]
{
aggregate_raw_ptr(data_pointer, metadata)
}
}
/// Performs the same functionality as [`from_raw_parts`], except that a
@ -130,10 +139,17 @@ pub const fn from_raw_parts_mut<T: ?Sized>(
data_pointer: *mut (),
metadata: <T as Pointee>::Metadata,
) -> *mut T {
// SAFETY: Accessing the value from the `PtrRepr` union is safe since *const T
// and PtrComponents<T> have the same memory layouts. Only std can make this
// guarantee.
unsafe { PtrRepr { components: PtrComponents { data_pointer, metadata } }.mut_ptr }
#[cfg(bootstrap)]
{
// SAFETY: Accessing the value from the `PtrRepr` union is safe since *const T
// and PtrComponents<T> have the same memory layouts. Only std can make this
// guarantee.
unsafe { PtrRepr { components: PtrComponents { data_pointer, metadata } }.mut_ptr }
}
#[cfg(not(bootstrap))]
{
aggregate_raw_ptr(data_pointer, metadata)
}
}
#[repr(C)]

@ -1163,3 +1163,11 @@ fn test_null_array_as_slice() {
assert!(ptr.is_null());
assert_eq!(ptr.len(), 4);
}
#[test]
fn test_ptr_from_raw_parts_in_const() {
const EMPTY_SLICE_PTR: *const [i32] =
std::ptr::slice_from_raw_parts(std::ptr::without_provenance(123), 456);
assert_eq!(EMPTY_SLICE_PTR.addr(), 123);
assert_eq!(EMPTY_SLICE_PTR.len(), 456);
}

@ -2678,3 +2678,16 @@ fn test_get_many_mut_duplicate() {
let mut v = vec![1, 2, 3, 4, 5];
assert!(v.get_many_mut([1, 3, 3, 4]).is_err());
}
#[test]
fn test_slice_from_raw_parts_in_const() {
static FANCY: i32 = 4;
static FANCY_SLICE: &[i32] = unsafe { std::slice::from_raw_parts(&FANCY, 1) };
assert_eq!(FANCY_SLICE.as_ptr(), std::ptr::addr_of!(FANCY));
assert_eq!(FANCY_SLICE.len(), 1);
const EMPTY_SLICE: &[i32] =
unsafe { std::slice::from_raw_parts(std::ptr::without_provenance(123456), 0) };
assert_eq!(EMPTY_SLICE.as_ptr().addr(), 123456);
assert_eq!(EMPTY_SLICE.len(), 0);
}

@ -4,16 +4,66 @@ fn slice_ptr_get_unchecked_range(_1: *const [u32], _2: std::ops::Range<usize>) -
debug slice => _1;
debug index => _2;
let mut _0: *const [u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined std::ptr::const_ptr::<impl *const [u32]>::get_unchecked::<std::ops::Range<usize>>) {
debug self => _1;
debug index => _2;
debug ((index: std::ops::Range<usize>).0: usize) => _3;
debug ((index: std::ops::Range<usize>).1: usize) => _4;
scope 2 (inlined <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked) {
debug ((self: std::ops::Range<usize>).0: usize) => _3;
debug ((self: std::ops::Range<usize>).1: usize) => _4;
debug slice => _1;
let _5: usize;
let mut _6: *const u32;
let mut _7: *const u32;
scope 3 {
debug new_len => _5;
scope 6 (inlined std::ptr::const_ptr::<impl *const [u32]>::as_ptr) {
debug self => _1;
}
scope 7 (inlined std::ptr::const_ptr::<impl *const u32>::add) {
debug self => _6;
debug count => _3;
}
scope 8 (inlined slice_from_raw_parts::<u32>) {
debug data => _7;
debug len => _5;
let mut _8: *const ();
scope 9 (inlined std::ptr::const_ptr::<impl *const u32>::cast::<()>) {
debug self => _7;
}
scope 10 (inlined std::ptr::from_raw_parts::<[u32]>) {
debug data_pointer => _8;
debug metadata => _5;
}
}
}
scope 4 (inlined std::ptr::const_ptr::<impl *const [u32]>::len) {
debug self => _1;
scope 5 (inlined std::ptr::metadata::<[u32]>) {
debug ptr => _1;
}
}
}
}
bb0: {
_0 = <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked(move _2, move _1) -> [return: bb1, unwind unreachable];
}
bb1: {
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = SubUnchecked(_4, _3);
StorageLive(_7);
StorageLive(_6);
_6 = _1 as *const u32 (PtrToPtr);
_7 = Offset(_6, _3);
StorageDead(_6);
StorageLive(_8);
_8 = _7 as *const () (PtrToPtr);
_0 = *const [u32] from (_8, _5);
StorageDead(_8);
StorageDead(_7);
StorageDead(_5);
return;
}
}

@ -4,16 +4,66 @@ fn slice_ptr_get_unchecked_range(_1: *const [u32], _2: std::ops::Range<usize>) -
debug slice => _1;
debug index => _2;
let mut _0: *const [u32];
let mut _3: usize;
let mut _4: usize;
scope 1 (inlined std::ptr::const_ptr::<impl *const [u32]>::get_unchecked::<std::ops::Range<usize>>) {
debug self => _1;
debug index => _2;
debug ((index: std::ops::Range<usize>).0: usize) => _3;
debug ((index: std::ops::Range<usize>).1: usize) => _4;
scope 2 (inlined <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked) {
debug ((self: std::ops::Range<usize>).0: usize) => _3;
debug ((self: std::ops::Range<usize>).1: usize) => _4;
debug slice => _1;
let _5: usize;
let mut _6: *const u32;
let mut _7: *const u32;
scope 3 {
debug new_len => _5;
scope 6 (inlined std::ptr::const_ptr::<impl *const [u32]>::as_ptr) {
debug self => _1;
}
scope 7 (inlined std::ptr::const_ptr::<impl *const u32>::add) {
debug self => _6;
debug count => _3;
}
scope 8 (inlined slice_from_raw_parts::<u32>) {
debug data => _7;
debug len => _5;
let mut _8: *const ();
scope 9 (inlined std::ptr::const_ptr::<impl *const u32>::cast::<()>) {
debug self => _7;
}
scope 10 (inlined std::ptr::from_raw_parts::<[u32]>) {
debug data_pointer => _8;
debug metadata => _5;
}
}
}
scope 4 (inlined std::ptr::const_ptr::<impl *const [u32]>::len) {
debug self => _1;
scope 5 (inlined std::ptr::metadata::<[u32]>) {
debug ptr => _1;
}
}
}
}
bb0: {
_0 = <std::ops::Range<usize> as SliceIndex<[u32]>>::get_unchecked(move _2, move _1) -> [return: bb1, unwind continue];
}
bb1: {
_3 = move (_2.0: usize);
_4 = move (_2.1: usize);
StorageLive(_5);
_5 = SubUnchecked(_4, _3);
StorageLive(_7);
StorageLive(_6);
_6 = _1 as *const u32 (PtrToPtr);
_7 = Offset(_6, _3);
StorageDead(_6);
StorageLive(_8);
_8 = _7 as *const () (PtrToPtr);
_0 = *const [u32] from (_8, _5);
StorageDead(_8);
StorageDead(_7);
StorageDead(_5);
return;
}
}

@ -3,12 +3,70 @@
fn vec_deref_to_slice(_1: &Vec<u8>) -> &[u8] {
debug v => _1;
let mut _0: &[u8];
bb0: {
_0 = <Vec<u8> as Deref>::deref(move _1) -> [return: bb1, unwind unreachable];
scope 1 (inlined <Vec<u8> as Deref>::deref) {
debug self => _1;
let mut _4: *const u8;
let mut _5: usize;
scope 2 (inlined Vec::<u8>::as_ptr) {
debug self => _1;
let mut _2: &alloc::raw_vec::RawVec<u8>;
scope 3 (inlined alloc::raw_vec::RawVec::<u8>::ptr) {
debug self => _2;
let mut _3: std::ptr::NonNull<u8>;
scope 4 (inlined Unique::<u8>::as_ptr) {
debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _3;
debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>;
scope 5 (inlined NonNull::<u8>::as_ptr) {
debug self => _3;
}
}
}
}
scope 6 (inlined std::slice::from_raw_parts::<'_, u8>) {
debug data => _4;
debug len => _5;
let _7: *const [u8];
scope 7 (inlined core::ub_checks::check_language_ub) {
scope 8 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
scope 9 (inlined std::mem::size_of::<u8>) {
}
scope 10 (inlined align_of::<u8>) {
}
scope 11 (inlined slice_from_raw_parts::<u8>) {
debug data => _4;
debug len => _5;
let mut _6: *const ();
scope 12 (inlined std::ptr::const_ptr::<impl *const u8>::cast::<()>) {
debug self => _4;
}
scope 13 (inlined std::ptr::from_raw_parts::<[u8]>) {
debug data_pointer => _6;
debug metadata => _5;
}
}
}
}
bb1: {
bb0: {
StorageLive(_4);
StorageLive(_2);
_2 = &((*_1).0: alloc::raw_vec::RawVec<u8>);
StorageLive(_3);
_3 = ((((*_1).0: alloc::raw_vec::RawVec<u8>).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>);
_4 = (_3.0: *const u8);
StorageDead(_3);
StorageDead(_2);
StorageLive(_5);
_5 = ((*_1).1: usize);
StorageLive(_6);
_6 = _4 as *const () (PtrToPtr);
_7 = *const [u8] from (_6, _5);
StorageDead(_6);
StorageDead(_5);
StorageDead(_4);
_0 = &(*_7);
return;
}
}

@ -3,12 +3,70 @@
fn vec_deref_to_slice(_1: &Vec<u8>) -> &[u8] {
debug v => _1;
let mut _0: &[u8];
bb0: {
_0 = <Vec<u8> as Deref>::deref(move _1) -> [return: bb1, unwind continue];
scope 1 (inlined <Vec<u8> as Deref>::deref) {
debug self => _1;
let mut _4: *const u8;
let mut _5: usize;
scope 2 (inlined Vec::<u8>::as_ptr) {
debug self => _1;
let mut _2: &alloc::raw_vec::RawVec<u8>;
scope 3 (inlined alloc::raw_vec::RawVec::<u8>::ptr) {
debug self => _2;
let mut _3: std::ptr::NonNull<u8>;
scope 4 (inlined Unique::<u8>::as_ptr) {
debug ((self: Unique<u8>).0: std::ptr::NonNull<u8>) => _3;
debug ((self: Unique<u8>).1: std::marker::PhantomData<u8>) => const PhantomData::<u8>;
scope 5 (inlined NonNull::<u8>::as_ptr) {
debug self => _3;
}
}
}
}
scope 6 (inlined std::slice::from_raw_parts::<'_, u8>) {
debug data => _4;
debug len => _5;
let _7: *const [u8];
scope 7 (inlined core::ub_checks::check_language_ub) {
scope 8 (inlined core::ub_checks::check_language_ub::runtime) {
}
}
scope 9 (inlined std::mem::size_of::<u8>) {
}
scope 10 (inlined align_of::<u8>) {
}
scope 11 (inlined slice_from_raw_parts::<u8>) {
debug data => _4;
debug len => _5;
let mut _6: *const ();
scope 12 (inlined std::ptr::const_ptr::<impl *const u8>::cast::<()>) {
debug self => _4;
}
scope 13 (inlined std::ptr::from_raw_parts::<[u8]>) {
debug data_pointer => _6;
debug metadata => _5;
}
}
}
}
bb1: {
bb0: {
StorageLive(_4);
StorageLive(_2);
_2 = &((*_1).0: alloc::raw_vec::RawVec<u8>);
StorageLive(_3);
_3 = ((((*_1).0: alloc::raw_vec::RawVec<u8>).0: std::ptr::Unique<u8>).0: std::ptr::NonNull<u8>);
_4 = (_3.0: *const u8);
StorageDead(_3);
StorageDead(_2);
StorageLive(_5);
_5 = ((*_1).1: usize);
StorageLive(_6);
_6 = _4 as *const () (PtrToPtr);
_7 = *const [u8] from (_6, _5);
StorageDead(_6);
StorageDead(_5);
StorageDead(_4);
_0 = &(*_7);
return;
}
}