Auto merge of #2071 - RalfJung:provenance, r=RalfJung
adjust for provenance cleanup This is the Miri side of https://github.com/rust-lang/rust/pull/96165
This commit is contained in:
commit
c9039de613
@ -1 +1 @@
|
|||||||
c8422403f775126c40d558838d321c063554c822
|
27af5175497936ea3413bef5816e7c0172514b9c
|
||||||
|
@ -999,7 +999,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
|
|||||||
if let Some(data_race) = &this.machine.data_race {
|
if let Some(data_race) = &this.machine.data_race {
|
||||||
if data_race.multi_threaded.get() {
|
if data_race.multi_threaded.get() {
|
||||||
let size = place.layout.size;
|
let size = place.layout.size;
|
||||||
let (alloc_id, base_offset, ptr) = this.ptr_get_alloc_id(place.ptr)?;
|
let (alloc_id, base_offset, _tag) = this.ptr_get_alloc_id(place.ptr)?;
|
||||||
// Load and log the atomic operation.
|
// Load and log the atomic operation.
|
||||||
// Note that atomic loads are possible even from read-only allocations, so `get_alloc_extra_mut` is not an option.
|
// Note that atomic loads are possible even from read-only allocations, so `get_alloc_extra_mut` is not an option.
|
||||||
let alloc_meta = &this.get_alloc_extra(alloc_id)?.data_race.as_ref().unwrap();
|
let alloc_meta = &this.get_alloc_extra(alloc_id)?.data_race.as_ref().unwrap();
|
||||||
@ -1007,7 +1007,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
|
|||||||
"Atomic op({}) with ordering {:?} on {:?} (size={})",
|
"Atomic op({}) with ordering {:?} on {:?} (size={})",
|
||||||
description,
|
description,
|
||||||
&atomic,
|
&atomic,
|
||||||
ptr,
|
place.ptr,
|
||||||
size.bytes()
|
size.bytes()
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -1039,7 +1039,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
|
|||||||
{
|
{
|
||||||
log::trace!(
|
log::trace!(
|
||||||
"Updated atomic memory({:?}, size={}) to {:#?}",
|
"Updated atomic memory({:?}, size={}) to {:#?}",
|
||||||
ptr,
|
place.ptr,
|
||||||
size.bytes(),
|
size.bytes(),
|
||||||
range.atomic_ops
|
range.atomic_ops
|
||||||
);
|
);
|
||||||
|
@ -271,8 +271,6 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
/// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
|
/// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
|
||||||
/// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
|
/// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
|
||||||
/// The range is relative to `place`.
|
/// The range is relative to `place`.
|
||||||
///
|
|
||||||
/// Assumes that the `place` has a proper pointer in it.
|
|
||||||
fn visit_freeze_sensitive(
|
fn visit_freeze_sensitive(
|
||||||
&self,
|
&self,
|
||||||
place: &MPlaceTy<'tcx, Tag>,
|
place: &MPlaceTy<'tcx, Tag>,
|
||||||
@ -290,33 +288,30 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
// Store how far we proceeded into the place so far. Everything to the left of
|
// Store how far we proceeded into the place so far. Everything to the left of
|
||||||
// this offset has already been handled, in the sense that the frozen parts
|
// this offset has already been handled, in the sense that the frozen parts
|
||||||
// have had `action` called on them.
|
// have had `action` called on them.
|
||||||
let ptr = place.ptr.into_pointer_or_addr().unwrap();
|
let start_addr = place.ptr.addr();
|
||||||
let start_offset = ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters
|
let mut cur_addr = start_addr;
|
||||||
let mut cur_offset = start_offset;
|
|
||||||
// Called when we detected an `UnsafeCell` at the given offset and size.
|
// Called when we detected an `UnsafeCell` at the given offset and size.
|
||||||
// Calls `action` and advances `cur_ptr`.
|
// Calls `action` and advances `cur_ptr`.
|
||||||
let mut unsafe_cell_action = |unsafe_cell_ptr: Pointer<Option<Tag>>,
|
let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer<Option<Tag>>,
|
||||||
unsafe_cell_size: Size| {
|
unsafe_cell_size: Size| {
|
||||||
let unsafe_cell_ptr = unsafe_cell_ptr.into_pointer_or_addr().unwrap();
|
|
||||||
debug_assert_eq!(unsafe_cell_ptr.provenance, ptr.provenance);
|
|
||||||
// We assume that we are given the fields in increasing offset order,
|
// We assume that we are given the fields in increasing offset order,
|
||||||
// and nothing else changes.
|
// and nothing else changes.
|
||||||
let unsafe_cell_offset = unsafe_cell_ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters
|
let unsafe_cell_addr = unsafe_cell_ptr.addr();
|
||||||
assert!(unsafe_cell_offset >= cur_offset);
|
assert!(unsafe_cell_addr >= cur_addr);
|
||||||
let frozen_size = unsafe_cell_offset - cur_offset;
|
let frozen_size = unsafe_cell_addr - cur_addr;
|
||||||
// Everything between the cur_ptr and this `UnsafeCell` is frozen.
|
// Everything between the cur_ptr and this `UnsafeCell` is frozen.
|
||||||
if frozen_size != Size::ZERO {
|
if frozen_size != Size::ZERO {
|
||||||
action(alloc_range(cur_offset - start_offset, frozen_size), /*frozen*/ true)?;
|
action(alloc_range(cur_addr - start_addr, frozen_size), /*frozen*/ true)?;
|
||||||
}
|
}
|
||||||
cur_offset += frozen_size;
|
cur_addr += frozen_size;
|
||||||
// This `UnsafeCell` is NOT frozen.
|
// This `UnsafeCell` is NOT frozen.
|
||||||
if unsafe_cell_size != Size::ZERO {
|
if unsafe_cell_size != Size::ZERO {
|
||||||
action(
|
action(
|
||||||
alloc_range(cur_offset - start_offset, unsafe_cell_size),
|
alloc_range(cur_addr - start_addr, unsafe_cell_size),
|
||||||
/*frozen*/ false,
|
/*frozen*/ false,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
cur_offset += unsafe_cell_size;
|
cur_addr += unsafe_cell_size;
|
||||||
// Done
|
// Done
|
||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
@ -334,7 +329,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
.unwrap_or_else(|| place.layout.size);
|
.unwrap_or_else(|| place.layout.size);
|
||||||
// Now handle this `UnsafeCell`, unless it is empty.
|
// Now handle this `UnsafeCell`, unless it is empty.
|
||||||
if unsafe_cell_size != Size::ZERO {
|
if unsafe_cell_size != Size::ZERO {
|
||||||
unsafe_cell_action(place.ptr, unsafe_cell_size)
|
unsafe_cell_action(&place.ptr, unsafe_cell_size)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -344,7 +339,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
}
|
}
|
||||||
// The part between the end_ptr and the end of the place is also frozen.
|
// The part between the end_ptr and the end of the place is also frozen.
|
||||||
// So pretend there is a 0-sized `UnsafeCell` at the end.
|
// So pretend there is a 0-sized `UnsafeCell` at the end.
|
||||||
unsafe_cell_action(place.ptr.wrapping_offset(size, this), Size::ZERO)?;
|
unsafe_cell_action(&place.ptr.offset(size, this)?, Size::ZERO)?;
|
||||||
// Done!
|
// Done!
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
|
||||||
@ -428,9 +423,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
let mut places =
|
let mut places =
|
||||||
fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?;
|
fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?;
|
||||||
// we just compare offsets, the abs. value never matters
|
// we just compare offsets, the abs. value never matters
|
||||||
places.sort_by_key(|place| {
|
places.sort_by_key(|place| place.ptr.addr());
|
||||||
place.ptr.into_pointer_or_addr().unwrap().into_parts().1 as Size
|
|
||||||
});
|
|
||||||
self.walk_aggregate(place, places.into_iter().map(Ok))
|
self.walk_aggregate(place, places.into_iter().map(Ok))
|
||||||
}
|
}
|
||||||
FieldsShape::Union { .. } | FieldsShape::Primitive => {
|
FieldsShape::Union { .. } | FieldsShape::Primitive => {
|
||||||
@ -777,6 +770,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
/// Mark a machine allocation that was just created as immutable.
|
/// Mark a machine allocation that was just created as immutable.
|
||||||
fn mark_immutable(&mut self, mplace: &MemPlace<Tag>) {
|
fn mark_immutable(&mut self, mplace: &MemPlace<Tag>) {
|
||||||
let this = self.eval_context_mut();
|
let this = self.eval_context_mut();
|
||||||
|
// This got just allocated, so there definitely is a pointer here.
|
||||||
this.alloc_mark_immutable(mplace.ptr.into_pointer_or_addr().unwrap().provenance.alloc_id)
|
this.alloc_mark_immutable(mplace.ptr.into_pointer_or_addr().unwrap().provenance.alloc_id)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
@ -128,7 +128,7 @@ impl<'mir, 'tcx> GlobalStateInner {
|
|||||||
|
|
||||||
/// Convert a relative (tcx) pointer to an absolute address.
|
/// Convert a relative (tcx) pointer to an absolute address.
|
||||||
pub fn rel_ptr_to_addr(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 {
|
pub fn rel_ptr_to_addr(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 {
|
||||||
let (alloc_id, offset) = ptr.into_parts(); // offset is relative
|
let (alloc_id, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
|
||||||
let base_addr = GlobalStateInner::alloc_base_addr(ecx, alloc_id);
|
let base_addr = GlobalStateInner::alloc_base_addr(ecx, alloc_id);
|
||||||
|
|
||||||
// Add offset with the right kind of pointer-overflowing arithmetic.
|
// Add offset with the right kind of pointer-overflowing arithmetic.
|
||||||
@ -137,7 +137,7 @@ impl<'mir, 'tcx> GlobalStateInner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn abs_ptr_to_rel(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<Tag>) -> Size {
|
pub fn abs_ptr_to_rel(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<Tag>) -> Size {
|
||||||
let (tag, addr) = ptr.into_parts(); // addr is absolute
|
let (tag, addr) = ptr.into_parts(); // addr is absolute (Tag provenance)
|
||||||
let base_addr = GlobalStateInner::alloc_base_addr(ecx, tag.alloc_id);
|
let base_addr = GlobalStateInner::alloc_base_addr(ecx, tag.alloc_id);
|
||||||
|
|
||||||
// Wrapping "addr - base_addr"
|
// Wrapping "addr - base_addr"
|
||||||
|
@ -360,6 +360,7 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
|
|||||||
name: &str,
|
name: &str,
|
||||||
ptr: Pointer<Option<Tag>>,
|
ptr: Pointer<Option<Tag>>,
|
||||||
) {
|
) {
|
||||||
|
// This got just allocated, so there definitely is a pointer here.
|
||||||
let ptr = ptr.into_pointer_or_addr().unwrap();
|
let ptr = ptr.into_pointer_or_addr().unwrap();
|
||||||
this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
|
this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
|
||||||
}
|
}
|
||||||
@ -431,11 +432,13 @@ impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx>
|
|||||||
/// Machine hook implementations.
|
/// Machine hook implementations.
|
||||||
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
||||||
type MemoryKind = MiriMemoryKind;
|
type MemoryKind = MiriMemoryKind;
|
||||||
|
type ExtraFnVal = Dlsym;
|
||||||
|
|
||||||
type FrameExtra = FrameData<'tcx>;
|
type FrameExtra = FrameData<'tcx>;
|
||||||
type AllocExtra = AllocExtra;
|
type AllocExtra = AllocExtra;
|
||||||
|
|
||||||
type PointerTag = Tag;
|
type PointerTag = Tag;
|
||||||
type ExtraFnVal = Dlsym;
|
type TagExtra = SbTag;
|
||||||
|
|
||||||
type MemoryMap =
|
type MemoryMap =
|
||||||
MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
|
MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
|
||||||
@ -607,9 +610,9 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
|||||||
fn ptr_get_alloc(
|
fn ptr_get_alloc(
|
||||||
ecx: &MiriEvalContext<'mir, 'tcx>,
|
ecx: &MiriEvalContext<'mir, 'tcx>,
|
||||||
ptr: Pointer<Self::PointerTag>,
|
ptr: Pointer<Self::PointerTag>,
|
||||||
) -> (AllocId, Size) {
|
) -> (AllocId, Size, Self::TagExtra) {
|
||||||
let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
|
let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
|
||||||
(ptr.provenance.alloc_id, rel)
|
(ptr.provenance.alloc_id, rel, ptr.provenance.sb)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
@ -617,16 +620,16 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
|||||||
_tcx: TyCtxt<'tcx>,
|
_tcx: TyCtxt<'tcx>,
|
||||||
machine: &Self,
|
machine: &Self,
|
||||||
alloc_extra: &AllocExtra,
|
alloc_extra: &AllocExtra,
|
||||||
tag: Tag,
|
(alloc_id, tag): (AllocId, Self::TagExtra),
|
||||||
range: AllocRange,
|
range: AllocRange,
|
||||||
) -> InterpResult<'tcx> {
|
) -> InterpResult<'tcx> {
|
||||||
if let Some(data_race) = &alloc_extra.data_race {
|
if let Some(data_race) = &alloc_extra.data_race {
|
||||||
data_race.read(tag.alloc_id, range, machine.data_race.as_ref().unwrap())?;
|
data_race.read(alloc_id, range, machine.data_race.as_ref().unwrap())?;
|
||||||
}
|
}
|
||||||
if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
|
if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
|
||||||
stacked_borrows.memory_read(
|
stacked_borrows.memory_read(
|
||||||
tag.alloc_id,
|
alloc_id,
|
||||||
tag.sb,
|
tag,
|
||||||
range,
|
range,
|
||||||
machine.stacked_borrows.as_ref().unwrap(),
|
machine.stacked_borrows.as_ref().unwrap(),
|
||||||
)
|
)
|
||||||
@ -640,16 +643,16 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
|||||||
_tcx: TyCtxt<'tcx>,
|
_tcx: TyCtxt<'tcx>,
|
||||||
machine: &mut Self,
|
machine: &mut Self,
|
||||||
alloc_extra: &mut AllocExtra,
|
alloc_extra: &mut AllocExtra,
|
||||||
tag: Tag,
|
(alloc_id, tag): (AllocId, Self::TagExtra),
|
||||||
range: AllocRange,
|
range: AllocRange,
|
||||||
) -> InterpResult<'tcx> {
|
) -> InterpResult<'tcx> {
|
||||||
if let Some(data_race) = &mut alloc_extra.data_race {
|
if let Some(data_race) = &mut alloc_extra.data_race {
|
||||||
data_race.write(tag.alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
data_race.write(alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
||||||
}
|
}
|
||||||
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
|
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
|
||||||
stacked_borrows.memory_written(
|
stacked_borrows.memory_written(
|
||||||
tag.alloc_id,
|
alloc_id,
|
||||||
tag.sb,
|
tag,
|
||||||
range,
|
range,
|
||||||
machine.stacked_borrows.as_mut().unwrap(),
|
machine.stacked_borrows.as_mut().unwrap(),
|
||||||
)
|
)
|
||||||
@ -663,19 +666,19 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
|||||||
_tcx: TyCtxt<'tcx>,
|
_tcx: TyCtxt<'tcx>,
|
||||||
machine: &mut Self,
|
machine: &mut Self,
|
||||||
alloc_extra: &mut AllocExtra,
|
alloc_extra: &mut AllocExtra,
|
||||||
tag: Tag,
|
(alloc_id, tag): (AllocId, Self::TagExtra),
|
||||||
range: AllocRange,
|
range: AllocRange,
|
||||||
) -> InterpResult<'tcx> {
|
) -> InterpResult<'tcx> {
|
||||||
if Some(tag.alloc_id) == machine.tracked_alloc_id {
|
if Some(alloc_id) == machine.tracked_alloc_id {
|
||||||
register_diagnostic(NonHaltingDiagnostic::FreedAlloc(tag.alloc_id));
|
register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
|
||||||
}
|
}
|
||||||
if let Some(data_race) = &mut alloc_extra.data_race {
|
if let Some(data_race) = &mut alloc_extra.data_race {
|
||||||
data_race.deallocate(tag.alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
data_race.deallocate(alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
||||||
}
|
}
|
||||||
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
|
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
|
||||||
stacked_borrows.memory_deallocated(
|
stacked_borrows.memory_deallocated(
|
||||||
tag.alloc_id,
|
alloc_id,
|
||||||
tag.sb,
|
tag,
|
||||||
range,
|
range,
|
||||||
machine.stacked_borrows.as_mut().unwrap(),
|
machine.stacked_borrows.as_mut().unwrap(),
|
||||||
)
|
)
|
||||||
|
@ -124,7 +124,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
|
|
||||||
let ptr = this.read_pointer(ptr)?;
|
let ptr = this.read_pointer(ptr)?;
|
||||||
// Take apart the pointer, we need its pieces.
|
// Take apart the pointer, we need its pieces.
|
||||||
let (alloc_id, offset, ptr) = this.ptr_get_alloc_id(ptr)?;
|
let (alloc_id, offset, _tag) = this.ptr_get_alloc_id(ptr)?;
|
||||||
|
|
||||||
let fn_instance =
|
let fn_instance =
|
||||||
if let Some(GlobalAlloc::Function(instance)) = this.tcx.get_global_alloc(alloc_id) {
|
if let Some(GlobalAlloc::Function(instance)) = this.tcx.get_global_alloc(alloc_id) {
|
||||||
|
@ -83,10 +83,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
}
|
}
|
||||||
|
|
||||||
let ptr = this.read_pointer(ptr_op)?;
|
let ptr = this.read_pointer(ptr_op)?;
|
||||||
if let Ok(ptr) = ptr.into_pointer_or_addr() {
|
if let Ok((alloc_id, _offset, _)) = this.ptr_try_get_alloc_id(ptr) {
|
||||||
// Only do anything if we can identify the allocation this goes to.
|
// Only do anything if we can identify the allocation this goes to.
|
||||||
let (_, cur_align) =
|
let (_, cur_align) = this.get_alloc_size_and_align(alloc_id, AllocCheck::MaybeDead)?;
|
||||||
this.get_alloc_size_and_align(ptr.provenance.alloc_id, AllocCheck::MaybeDead)?;
|
|
||||||
if cur_align.bytes() >= req_align {
|
if cur_align.bytes() >= req_align {
|
||||||
// If the allocation alignment is at least the required alignment we use the
|
// If the allocation alignment is at least the required alignment we use the
|
||||||
// real implementation.
|
// real implementation.
|
||||||
|
@ -702,8 +702,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
|||||||
);
|
);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
let (alloc_id, base_offset, ptr) = this.ptr_get_alloc_id(place.ptr)?;
|
let (alloc_id, base_offset, orig_tag) = this.ptr_get_alloc_id(place.ptr)?;
|
||||||
let orig_tag = ptr.provenance.sb;
|
|
||||||
|
|
||||||
// Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
|
// Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
|
||||||
let (alloc_size, _) =
|
let (alloc_size, _) =
|
||||||
|
48
tests/run-pass/issue-miri-2068.rs
Normal file
48
tests/run-pass/issue-miri-2068.rs
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
#![feature(pin_macro)]
|
||||||
|
|
||||||
|
use core::future::Future;
|
||||||
|
use core::pin::Pin;
|
||||||
|
use core::task::{Context, Poll};
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
struct NopWaker;
|
||||||
|
|
||||||
|
impl std::task::Wake for NopWaker {
|
||||||
|
fn wake(self: Arc<Self>) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fuzzing_block_on<O, F: Future<Output = O>>(fut: F) -> O {
|
||||||
|
let mut fut = std::pin::pin!(fut);
|
||||||
|
let waker = std::task::Waker::from(Arc::new(NopWaker));
|
||||||
|
let mut context = std::task::Context::from_waker(&waker);
|
||||||
|
loop {
|
||||||
|
match fut.as_mut().poll(&mut context) {
|
||||||
|
Poll::Ready(v) => return v,
|
||||||
|
Poll::Pending => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LastFuture<S> {
|
||||||
|
last: S,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> Future for LastFuture<S>
|
||||||
|
where
|
||||||
|
Self: Unpin,
|
||||||
|
S: Unpin + Copy,
|
||||||
|
{
|
||||||
|
type Output = S;
|
||||||
|
|
||||||
|
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
|
return Poll::Ready(self.last);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
fuzzing_block_on(async {
|
||||||
|
LastFuture { last: &0u32 }.await;
|
||||||
|
LastFuture { last: Option::<u32>::None }.await;
|
||||||
|
});
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user