Merge from rustc
This commit is contained in:
commit
7a599d758d
22
Cargo.lock
22
Cargo.lock
@ -722,6 +722,18 @@ version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
|
||||
|
||||
[[package]]
|
||||
name = "coverage-dump"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"leb128",
|
||||
"md-5",
|
||||
"miniz_oxide",
|
||||
"regex",
|
||||
"rustc-demangle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage_test_macros"
|
||||
version = "0.0.0"
|
||||
@ -2041,6 +2053,12 @@ version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||
|
||||
[[package]]
|
||||
name = "leb128"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
|
||||
|
||||
[[package]]
|
||||
name = "levenshtein"
|
||||
version = "1.0.5"
|
||||
@ -4228,7 +4246,6 @@ dependencies = [
|
||||
"measureme",
|
||||
"memoffset",
|
||||
"rustc-rayon-core",
|
||||
"rustc_ast",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_hir",
|
||||
@ -4437,15 +4454,12 @@ dependencies = [
|
||||
name = "rustc_traits"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc_ast",
|
||||
"rustc_data_structures",
|
||||
"rustc_hir",
|
||||
"rustc_infer",
|
||||
"rustc_middle",
|
||||
"rustc_span",
|
||||
"rustc_target",
|
||||
"rustc_trait_selection",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
|
@ -43,6 +43,7 @@ members = [
|
||||
"src/tools/generate-windows-sys",
|
||||
"src/tools/rustdoc-gui-test",
|
||||
"src/tools/opt-dist",
|
||||
"src/tools/coverage-dump",
|
||||
]
|
||||
|
||||
exclude = [
|
||||
|
@ -10,9 +10,9 @@ Language
|
||||
- [expand: Change how `#![cfg(FALSE)]` behaves on crate root](https://github.com/rust-lang/rust/pull/110141/)
|
||||
- [Stabilize inline asm for LoongArch64](https://github.com/rust-lang/rust/pull/111235/)
|
||||
- [Uplift `clippy::undropped_manually_drops` lint](https://github.com/rust-lang/rust/pull/111530/)
|
||||
- [Uplift `clippy::invalid_utf8_in_unchecked` lint](https://github.com/rust-lang/rust/pull/111543/)
|
||||
- [Uplift `clippy::cast_ref_to_mut` lint](https://github.com/rust-lang/rust/pull/111567/)
|
||||
- [Uplift `clippy::cmp_nan` lint](https://github.com/rust-lang/rust/pull/111818/)
|
||||
- [Uplift `clippy::invalid_utf8_in_unchecked` lint](https://github.com/rust-lang/rust/pull/111543/) as `invalid_from_utf8_unchecked` and `invalid_from_utf8`
|
||||
- [Uplift `clippy::cast_ref_to_mut` lint](https://github.com/rust-lang/rust/pull/111567/) as `invalid_reference_casting`
|
||||
- [Uplift `clippy::cmp_nan` lint](https://github.com/rust-lang/rust/pull/111818/) as `invalid_nan_comparisons`
|
||||
- [resolve: Remove artificial import ambiguity errors](https://github.com/rust-lang/rust/pull/112086/)
|
||||
- [Don't require associated types with Self: Sized bounds in `dyn Trait` objects](https://github.com/rust-lang/rust/pull/112319/)
|
||||
|
||||
|
@ -213,14 +213,10 @@ impl AttrTokenStream {
|
||||
.into_iter()
|
||||
}
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
let mut outer_attrs = Vec::new();
|
||||
let mut inner_attrs = Vec::new();
|
||||
for attr in &data.attrs {
|
||||
match attr.style {
|
||||
crate::AttrStyle::Outer => outer_attrs.push(attr),
|
||||
crate::AttrStyle::Inner => inner_attrs.push(attr),
|
||||
}
|
||||
}
|
||||
let idx = data
|
||||
.attrs
|
||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
||||
|
||||
let mut target_tokens: Vec<_> = data
|
||||
.tokens
|
||||
@ -265,10 +261,10 @@ impl AttrTokenStream {
|
||||
"Failed to find trailing delimited group in: {target_tokens:?}"
|
||||
);
|
||||
}
|
||||
let mut flat: SmallVec<[_; 1]> = SmallVec::new();
|
||||
let mut flat: SmallVec<[_; 1]> =
|
||||
SmallVec::with_capacity(target_tokens.len() + outer_attrs.len());
|
||||
for attr in outer_attrs {
|
||||
// FIXME: Make this more efficient
|
||||
flat.extend(attr.tokens().0.clone().iter().cloned());
|
||||
flat.extend(attr.tokens().0.iter().cloned());
|
||||
}
|
||||
flat.extend(target_tokens);
|
||||
flat.into_iter()
|
||||
|
@ -603,7 +603,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
|
||||
fn visit_statement_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
flow_state: &Flows<'cx, 'tcx>,
|
||||
stmt: &'cx Statement<'tcx>,
|
||||
location: Location,
|
||||
@ -673,7 +673,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
|
||||
fn visit_terminator_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
flow_state: &Flows<'cx, 'tcx>,
|
||||
term: &'cx Terminator<'tcx>,
|
||||
loc: Location,
|
||||
@ -784,7 +784,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
||||
|
||||
fn visit_terminator_after_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
flow_state: &Flows<'cx, 'tcx>,
|
||||
term: &'cx Terminator<'tcx>,
|
||||
loc: Location,
|
||||
|
@ -82,12 +82,9 @@ impl DebugContext {
|
||||
match tcx.sess.source_map().lookup_line(span.lo()) {
|
||||
Ok(SourceFileAndLine { sf: file, line }) => {
|
||||
let line_pos = file.lines(|lines| lines[line]);
|
||||
let col = file.relative_position(span.lo()) - line_pos;
|
||||
|
||||
(
|
||||
file,
|
||||
u64::try_from(line).unwrap() + 1,
|
||||
u64::from((span.lo() - line_pos).to_u32()) + 1,
|
||||
)
|
||||
(file, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1)
|
||||
}
|
||||
Err(file) => (file, 0, 0),
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
|
||||
debug_context.scopes[scope] = DebugScope {
|
||||
file_start_pos: file.start_pos,
|
||||
file_end_pos: file.end_pos,
|
||||
file_end_pos: file.end_position(),
|
||||
..debug_context.scopes[scope]
|
||||
};
|
||||
instantiated.insert(scope);
|
||||
@ -120,7 +120,7 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
dbg_scope,
|
||||
inlined_at: inlined_at.or(parent_scope.inlined_at),
|
||||
file_start_pos: loc.file.start_pos,
|
||||
file_end_pos: loc.file.end_pos,
|
||||
file_end_pos: loc.file.end_position(),
|
||||
};
|
||||
instantiated.insert(scope);
|
||||
}
|
||||
|
@ -267,7 +267,7 @@ impl CodegenCx<'_, '_> {
|
||||
|
||||
// Use 1-based indexing.
|
||||
let line = (line + 1) as u32;
|
||||
let col = (pos - line_pos).to_u32() + 1;
|
||||
let col = (file.relative_position(pos) - line_pos).to_u32() + 1;
|
||||
|
||||
(file, line, col)
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
|
||||
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
|
||||
// we leave alignment checks off, since this `ecx` will not be used for further evaluation anyway
|
||||
|
||||
debug!("eval_body_using_ecx done: {:?}", *ret);
|
||||
debug!("eval_body_using_ecx done: {:?}", ret);
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
@ -147,7 +147,7 @@ pub(super) fn op_to_const<'tcx>(
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
let to_const_value = |mplace: &MPlaceTy<'_>| {
|
||||
debug!("to_const_value(mplace: {:?})", mplace);
|
||||
match mplace.ptr.into_parts() {
|
||||
match mplace.ptr().into_parts() {
|
||||
(Some(alloc_id), offset) => {
|
||||
let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
ConstValue::ByRef { alloc, offset }
|
||||
@ -370,7 +370,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
|
||||
inner = true;
|
||||
}
|
||||
};
|
||||
let alloc_id = mplace.ptr.provenance.unwrap();
|
||||
let alloc_id = mplace.ptr().provenance.unwrap();
|
||||
|
||||
// Validation failed, report an error. This is always a hard error.
|
||||
if let Err(error) = validation {
|
||||
|
@ -30,7 +30,7 @@ pub(crate) fn const_caller_location(
|
||||
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
|
||||
bug!("intern_const_alloc_recursive should not error in this case")
|
||||
}
|
||||
ConstValue::Scalar(Scalar::from_maybe_pointer(loc_place.ptr, &tcx))
|
||||
ConstValue::Scalar(Scalar::from_maybe_pointer(loc_place.ptr(), &tcx))
|
||||
}
|
||||
|
||||
// We forbid type-level constants that contain more than `VALTREE_MAX_NODES` nodes.
|
||||
|
@ -5,7 +5,7 @@ use crate::const_eval::CanAccessStatics;
|
||||
use crate::interpret::MPlaceTy;
|
||||
use crate::interpret::{
|
||||
intern_const_alloc_recursive, ConstValue, ImmTy, Immediate, InternKind, MemPlaceMeta,
|
||||
MemoryKind, Place, Projectable, Scalar,
|
||||
MemoryKind, PlaceTy, Projectable, Scalar,
|
||||
};
|
||||
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
|
||||
@ -318,7 +318,7 @@ fn valtree_into_mplace<'tcx>(
|
||||
let len_scalar = Scalar::from_target_usize(len as u64, &tcx);
|
||||
|
||||
Immediate::ScalarPair(
|
||||
Scalar::from_maybe_pointer((*pointee_place).ptr, &tcx),
|
||||
Scalar::from_maybe_pointer(pointee_place.ptr(), &tcx),
|
||||
len_scalar,
|
||||
)
|
||||
}
|
||||
@ -383,5 +383,5 @@ fn valtree_into_mplace<'tcx>(
|
||||
}
|
||||
|
||||
fn dump_place<'tcx>(ecx: &CompileTimeEvalContext<'tcx, 'tcx>, place: &MPlaceTy<'tcx>) {
|
||||
trace!("{:?}", ecx.dump_place(Place::Ptr(**place)));
|
||||
trace!("{:?}", ecx.dump_place(&PlaceTy::from(place.clone())));
|
||||
}
|
||||
|
@ -21,8 +21,8 @@ use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayou
|
||||
|
||||
use super::{
|
||||
AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
|
||||
MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, PointerArithmetic, Provenance,
|
||||
Scalar, StackPopJump,
|
||||
MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, Pointer, PointerArithmetic,
|
||||
Projectable, Provenance, Scalar, StackPopJump,
|
||||
};
|
||||
use crate::errors::{self, ErroneousConstUsed};
|
||||
use crate::util;
|
||||
@ -155,17 +155,26 @@ pub enum StackPopCleanup {
|
||||
}
|
||||
|
||||
/// State of a local variable including a memoized layout
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
pub struct LocalState<'tcx, Prov: Provenance = AllocId> {
|
||||
pub value: LocalValue<Prov>,
|
||||
value: LocalValue<Prov>,
|
||||
/// Don't modify if `Some`, this is only used to prevent computing the layout twice.
|
||||
/// Avoids computing the layout of locals that are never actually initialized.
|
||||
pub layout: Cell<Option<TyAndLayout<'tcx>>>,
|
||||
layout: Cell<Option<TyAndLayout<'tcx>>>,
|
||||
}
|
||||
|
||||
impl<Prov: Provenance> std::fmt::Debug for LocalState<'_, Prov> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("LocalState")
|
||||
.field("value", &self.value)
|
||||
.field("ty", &self.layout.get().map(|l| l.ty))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// Current value of a local variable
|
||||
#[derive(Copy, Clone, Debug)] // Miri debug-prints these
|
||||
pub enum LocalValue<Prov: Provenance = AllocId> {
|
||||
pub(super) enum LocalValue<Prov: Provenance = AllocId> {
|
||||
/// This local is not currently alive, and cannot be used at all.
|
||||
Dead,
|
||||
/// A normal, live local.
|
||||
@ -176,10 +185,27 @@ pub enum LocalValue<Prov: Provenance = AllocId> {
|
||||
Live(Operand<Prov>),
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> LocalState<'tcx, Prov> {
|
||||
impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
|
||||
pub fn make_live_uninit(&mut self) {
|
||||
self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
|
||||
}
|
||||
|
||||
/// This is a hack because Miri needs a way to visit all the provenance in a `LocalState`
|
||||
/// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type
|
||||
/// private.
|
||||
pub fn as_mplace_or_imm(
|
||||
&self,
|
||||
) -> Option<Either<(Pointer<Option<Prov>>, MemPlaceMeta<Prov>), Immediate<Prov>>> {
|
||||
match self.value {
|
||||
LocalValue::Dead => None,
|
||||
LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))),
|
||||
LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the local's value or error if the local is not yet live or not live anymore.
|
||||
#[inline(always)]
|
||||
pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
|
||||
pub(super) fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
|
||||
match &self.value {
|
||||
LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
|
||||
LocalValue::Live(val) => Ok(val),
|
||||
@ -189,10 +215,10 @@ impl<'tcx, Prov: Provenance + 'static> LocalState<'tcx, Prov> {
|
||||
/// Overwrite the local. If the local can be overwritten in place, return a reference
|
||||
/// to do so; otherwise return the `MemPlace` to consult instead.
|
||||
///
|
||||
/// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
|
||||
/// anywhere else. You may be invalidating machine invariants if you do!
|
||||
/// Note: Before calling this, call the `before_access_local_mut` machine hook! You may be
|
||||
/// invalidating machine invariants otherwise!
|
||||
#[inline(always)]
|
||||
pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
|
||||
pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
|
||||
match &mut self.value {
|
||||
LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
|
||||
LocalValue::Live(val) => Ok(val),
|
||||
@ -694,7 +720,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
&self,
|
||||
mplace: &MPlaceTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx, Option<(Size, Align)>> {
|
||||
self.size_and_align_of(&mplace.meta, &mplace.layout)
|
||||
self.size_and_align_of(&mplace.meta(), &mplace.layout)
|
||||
}
|
||||
|
||||
#[instrument(skip(self, body, return_place, return_to_block), level = "debug")]
|
||||
@ -826,7 +852,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
.expect("return place should always be live");
|
||||
let dest = self.frame().return_place.clone();
|
||||
let err = self.copy_op(&op, &dest, /*allow_transmute*/ true);
|
||||
trace!("return value: {:?}", self.dump_place(*dest));
|
||||
trace!("return value: {:?}", self.dump_place(&dest));
|
||||
// We delay actually short-circuiting on this error until *after* the stack frame is
|
||||
// popped, since we want this error to be attributed to the caller, whose type defines
|
||||
// this transmute.
|
||||
@ -974,7 +1000,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
}
|
||||
// Need to allocate some memory, since `Immediate::Uninit` cannot be unsized.
|
||||
let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?;
|
||||
Operand::Indirect(*dest_place)
|
||||
Operand::Indirect(*dest_place.mplace())
|
||||
} else {
|
||||
assert!(!meta.has_meta()); // we're dropping the metadata
|
||||
// Just make this an efficient immediate.
|
||||
@ -1068,8 +1094,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn dump_place(&self, place: Place<M::Provenance>) -> PlacePrinter<'_, 'mir, 'tcx, M> {
|
||||
PlacePrinter { ecx: self, place }
|
||||
pub fn dump_place(
|
||||
&self,
|
||||
place: &PlaceTy<'tcx, M::Provenance>,
|
||||
) -> PlacePrinter<'_, 'mir, 'tcx, M> {
|
||||
PlacePrinter { ecx: self, place: *place.place() }
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
|
@ -25,7 +25,7 @@ use rustc_ast::Mutability;
|
||||
|
||||
use super::{
|
||||
AllocId, Allocation, ConstAllocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy,
|
||||
ValueVisitor,
|
||||
Projectable, ValueVisitor,
|
||||
};
|
||||
use crate::const_eval;
|
||||
use crate::errors::{DanglingPtrInFinal, UnsupportedUntypedPointer};
|
||||
@ -177,7 +177,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
||||
if let ty::Dynamic(_, _, ty::Dyn) =
|
||||
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
|
||||
{
|
||||
let ptr = mplace.meta.unwrap_meta().to_pointer(&tcx)?;
|
||||
let ptr = mplace.meta().unwrap_meta().to_pointer(&tcx)?;
|
||||
if let Some(alloc_id) = ptr.provenance {
|
||||
// Explicitly choose const mode here, since vtables are immutable, even
|
||||
// if the reference of the fat pointer is mutable.
|
||||
@ -191,7 +191,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
||||
}
|
||||
// Check if we have encountered this pointer+layout combination before.
|
||||
// Only recurse for allocation-backed pointers.
|
||||
if let Some(alloc_id) = mplace.ptr.provenance {
|
||||
if let Some(alloc_id) = mplace.ptr().provenance {
|
||||
// Compute the mode with which we intern this. Our goal here is to make as many
|
||||
// statics as we can immutable so they can be placed in read-only memory by LLVM.
|
||||
let ref_mode = match self.mode {
|
||||
@ -267,7 +267,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
||||
|
||||
// If there is no provenance in this allocation, it does not contain references
|
||||
// that point to another allocation, and we can avoid the interning walk.
|
||||
if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
|
||||
if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr(), size, align)? {
|
||||
if !alloc.has_provenance() {
|
||||
return Ok(false);
|
||||
}
|
||||
@ -353,7 +353,7 @@ pub fn intern_const_alloc_recursive<
|
||||
leftover_allocations,
|
||||
// The outermost allocation must exist, because we allocated it with
|
||||
// `Memory::allocate`.
|
||||
ret.ptr.provenance.unwrap(),
|
||||
ret.ptr().provenance.unwrap(),
|
||||
base_intern_mode,
|
||||
Some(ret.layout.ty),
|
||||
);
|
||||
@ -466,7 +466,7 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
|
||||
) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
|
||||
let dest = self.allocate(layout, MemoryKind::Stack)?;
|
||||
f(self, &dest.clone().into())?;
|
||||
let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
|
||||
let mut alloc = self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap()).unwrap().1;
|
||||
alloc.mutability = Mutability::Not;
|
||||
Ok(self.tcx.mk_const_alloc(alloc))
|
||||
}
|
||||
|
@ -466,7 +466,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
_ => return Ok(false),
|
||||
}
|
||||
|
||||
trace!("{:?}", self.dump_place(**dest));
|
||||
trace!("{:?}", self.dump_place(dest));
|
||||
self.go_to_block(ret);
|
||||
Ok(true)
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ use crate::const_eval::CheckAlignment;
|
||||
|
||||
use super::{
|
||||
AllocBytes, AllocId, AllocRange, Allocation, ConstAllocation, FnArg, Frame, ImmTy, InterpCx,
|
||||
InterpResult, MPlaceTy, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar,
|
||||
InterpResult, MPlaceTy, MemoryKind, OpTy, PlaceTy, Pointer, Provenance, Scalar,
|
||||
};
|
||||
|
||||
/// Data returned by Machine::stack_pop,
|
||||
@ -237,22 +237,22 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
|
||||
right: &ImmTy<'tcx, Self::Provenance>,
|
||||
) -> InterpResult<'tcx, (Scalar<Self::Provenance>, bool, Ty<'tcx>)>;
|
||||
|
||||
/// Called to write the specified `local` from the `frame`.
|
||||
/// Called before writing the specified `local` of the `frame`.
|
||||
/// Since writing a ZST is not actually accessing memory or locals, this is never invoked
|
||||
/// for ZST reads.
|
||||
///
|
||||
/// Due to borrow checker trouble, we indicate the `frame` as an index rather than an `&mut
|
||||
/// Frame`.
|
||||
#[inline]
|
||||
fn access_local_mut<'a>(
|
||||
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
|
||||
frame: usize,
|
||||
local: mir::Local,
|
||||
) -> InterpResult<'tcx, &'a mut Operand<Self::Provenance>>
|
||||
#[inline(always)]
|
||||
fn before_access_local_mut<'a>(
|
||||
_ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
|
||||
_frame: usize,
|
||||
_local: mir::Local,
|
||||
) -> InterpResult<'tcx>
|
||||
where
|
||||
'tcx: 'mir,
|
||||
{
|
||||
ecx.stack_mut()[frame].locals[local].access_mut()
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Called before a basic block terminator is executed.
|
||||
|
@ -20,16 +20,21 @@ mod visitor;
|
||||
|
||||
pub use rustc_middle::mir::interpret::*; // have all the `interpret` symbols in one place: here
|
||||
|
||||
pub use self::eval_context::{Frame, FrameInfo, InterpCx, LocalState, LocalValue, StackPopCleanup};
|
||||
pub use self::eval_context::{Frame, FrameInfo, InterpCx, StackPopCleanup};
|
||||
pub use self::intern::{intern_const_alloc_recursive, InternKind};
|
||||
pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackPopJump};
|
||||
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
|
||||
pub use self::operand::{ImmTy, Immediate, OpTy, Operand, Readable};
|
||||
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy, Writeable};
|
||||
pub use self::operand::{ImmTy, Immediate, OpTy, Readable};
|
||||
pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable};
|
||||
pub use self::projection::Projectable;
|
||||
pub use self::terminator::FnArg;
|
||||
pub use self::validity::{CtfeValidationMode, RefTracking};
|
||||
pub use self::visitor::ValueVisitor;
|
||||
|
||||
use self::{
|
||||
operand::Operand,
|
||||
place::{MemPlace, Place},
|
||||
};
|
||||
|
||||
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
|
||||
use eval_context::{from_known_layout, mir_assign_valid_types};
|
||||
|
@ -88,7 +88,7 @@ impl<Prov: Provenance> Immediate<Prov> {
|
||||
|
||||
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
|
||||
// as input for binary and cast operations.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
pub struct ImmTy<'tcx, Prov: Provenance = AllocId> {
|
||||
imm: Immediate<Prov>,
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
@ -134,6 +134,12 @@ impl<Prov: Provenance> std::fmt::Display for ImmTy<'_, Prov> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Prov: Provenance> std::fmt::Debug for ImmTy<'_, Prov> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ImmTy").field("imm", &self.imm).field("ty", &self.layout.ty).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> std::ops::Deref for ImmTy<'tcx, Prov> {
|
||||
type Target = Immediate<Prov>;
|
||||
#[inline(always)]
|
||||
@ -142,51 +148,6 @@ impl<'tcx, Prov: Provenance> std::ops::Deref for ImmTy<'tcx, Prov> {
|
||||
}
|
||||
}
|
||||
|
||||
/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
|
||||
/// or still in memory. The latter is an optimization, to delay reading that chunk of
|
||||
/// memory and to avoid having to store arbitrary-sized data here.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Operand<Prov: Provenance = AllocId> {
|
||||
Immediate(Immediate<Prov>),
|
||||
Indirect(MemPlace<Prov>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OpTy<'tcx, Prov: Provenance = AllocId> {
|
||||
op: Operand<Prov>, // Keep this private; it helps enforce invariants.
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
/// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
|
||||
/// it needs to have a different alignment than the field type would usually have.
|
||||
/// So we represent this here with a separate field that "overwrites" `layout.align`.
|
||||
/// This means `layout.align` should never be used for an `OpTy`!
|
||||
/// `None` means "alignment does not matter since this is a by-value operand"
|
||||
/// (`Operand::Immediate`); this field is only relevant for `Operand::Indirect`.
|
||||
/// Also CTFE ignores alignment anyway, so this is for Miri only.
|
||||
pub align: Option<Align>,
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> std::ops::Deref for OpTy<'tcx, Prov> {
|
||||
type Target = Operand<Prov>;
|
||||
#[inline(always)]
|
||||
fn deref(&self) -> &Operand<Prov> {
|
||||
&self.op
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
|
||||
OpTy { op: Operand::Indirect(*mplace), layout: mplace.layout, align: Some(mplace.align) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn from(val: ImmTy<'tcx, Prov>) -> Self {
|
||||
OpTy { op: Operand::Immediate(val.imm), layout: val.layout, align: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
|
||||
#[inline]
|
||||
pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
|
||||
@ -319,7 +280,61 @@ impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for ImmTy<'tcx, Prov> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> {
|
||||
/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
|
||||
/// or still in memory. The latter is an optimization, to delay reading that chunk of
|
||||
/// memory and to avoid having to store arbitrary-sized data here.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(super) enum Operand<Prov: Provenance = AllocId> {
|
||||
Immediate(Immediate<Prov>),
|
||||
Indirect(MemPlace<Prov>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpTy<'tcx, Prov: Provenance = AllocId> {
|
||||
op: Operand<Prov>, // Keep this private; it helps enforce invariants.
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
/// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
|
||||
/// it needs to have a different alignment than the field type would usually have.
|
||||
/// So we represent this here with a separate field that "overwrites" `layout.align`.
|
||||
/// This means `layout.align` should never be used for an `OpTy`!
|
||||
/// `None` means "alignment does not matter since this is a by-value operand"
|
||||
/// (`Operand::Immediate`); this field is only relevant for `Operand::Indirect`.
|
||||
/// Also CTFE ignores alignment anyway, so this is for Miri only.
|
||||
pub align: Option<Align>,
|
||||
}
|
||||
|
||||
impl<Prov: Provenance> std::fmt::Debug for OpTy<'_, Prov> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("OpTy").field("op", &self.op).field("ty", &self.layout.ty).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn from(val: ImmTy<'tcx, Prov>) -> Self {
|
||||
OpTy { op: Operand::Immediate(val.imm), layout: val.layout, align: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
|
||||
OpTy {
|
||||
op: Operand::Indirect(*mplace.mplace()),
|
||||
layout: mplace.layout,
|
||||
align: Some(mplace.align),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
pub(super) fn op(&self) -> &Operand<Prov> {
|
||||
&self.op
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn layout(&self) -> TyAndLayout<'tcx> {
|
||||
self.layout
|
||||
@ -328,7 +343,7 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Pr
|
||||
#[inline]
|
||||
fn meta(&self) -> MemPlaceMeta<Prov> {
|
||||
match self.as_mplace_or_imm() {
|
||||
Left(mplace) => mplace.meta,
|
||||
Left(mplace) => mplace.meta(),
|
||||
Right(_) => {
|
||||
debug_assert!(self.layout.is_sized(), "unsized immediates are not a thing");
|
||||
MemPlaceMeta::None
|
||||
@ -362,18 +377,19 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Pr
|
||||
}
|
||||
}
|
||||
|
||||
/// The `Readable` trait describes interpreter values that one can read from.
|
||||
pub trait Readable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
|
||||
fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>>;
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Readable<'tcx, Prov> for OpTy<'tcx, Prov> {
|
||||
impl<'tcx, Prov: Provenance> Readable<'tcx, Prov> for OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
|
||||
self.as_mplace_or_imm()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Readable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
||||
impl<'tcx, Prov: Provenance> Readable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
|
||||
Left(self.clone())
|
||||
@ -535,7 +551,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
/// Turn the wide MPlace into a string (must already be dereferenced!)
|
||||
pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, &str> {
|
||||
let len = mplace.len(self)?;
|
||||
let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len))?;
|
||||
let bytes = self.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len))?;
|
||||
let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?;
|
||||
Ok(str)
|
||||
}
|
||||
@ -630,7 +646,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
op = self.project(&op, elem)?
|
||||
}
|
||||
|
||||
trace!("eval_place_to_op: got {:?}", *op);
|
||||
trace!("eval_place_to_op: got {:?}", op);
|
||||
// Sanity-check the type we ended up with.
|
||||
debug_assert!(
|
||||
mir_assign_valid_types(
|
||||
@ -673,7 +689,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
self.eval_mir_constant(&c, Some(constant.span), layout)?
|
||||
}
|
||||
};
|
||||
trace!("{:?}: {:?}", mir_op, *op);
|
||||
trace!("{:?}: {:?}", mir_op, op);
|
||||
Ok(op)
|
||||
}
|
||||
|
||||
|
@ -51,7 +51,7 @@ impl<Prov: Provenance> MemPlaceMeta<Prov> {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
|
||||
pub struct MemPlace<Prov: Provenance = AllocId> {
|
||||
pub(super) struct MemPlace<Prov: Provenance = AllocId> {
|
||||
/// The pointer can be a pure integer, with the `None` provenance.
|
||||
pub ptr: Pointer<Option<Prov>>,
|
||||
/// Metadata for unsized places. Interpretation is up to the type.
|
||||
@ -60,68 +60,6 @@ pub struct MemPlace<Prov: Provenance = AllocId> {
|
||||
pub meta: MemPlaceMeta<Prov>,
|
||||
}
|
||||
|
||||
/// A MemPlace with its layout. Constructing it is only possible in this module.
|
||||
#[derive(Clone, Hash, Eq, PartialEq, Debug)]
|
||||
pub struct MPlaceTy<'tcx, Prov: Provenance = AllocId> {
|
||||
mplace: MemPlace<Prov>,
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
/// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
|
||||
/// it needs to have a different alignment than the field type would usually have.
|
||||
/// So we represent this here with a separate field that "overwrites" `layout.align`.
|
||||
/// This means `layout.align` should never be used for a `MPlaceTy`!
|
||||
pub align: Align,
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> std::ops::Deref for MPlaceTy<'tcx, Prov> {
|
||||
type Target = MemPlace<Prov>;
|
||||
#[inline(always)]
|
||||
fn deref(&self) -> &MemPlace<Prov> {
|
||||
&self.mplace
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Place<Prov: Provenance = AllocId> {
|
||||
/// A place referring to a value allocated in the `Memory` system.
|
||||
Ptr(MemPlace<Prov>),
|
||||
|
||||
/// To support alloc-free locals, we are able to write directly to a local. The offset indicates
|
||||
/// where in the local this place is located; if it is `None`, no projection has been applied.
|
||||
/// Such projections are meaningful even if the offset is 0, since they can change layouts.
|
||||
/// (Without that optimization, we'd just always be a `MemPlace`.)
|
||||
/// Note that this only stores the frame index, not the thread this frame belongs to -- that is
|
||||
/// implicit. This means a `Place` must never be moved across interpreter thread boundaries!
|
||||
///
|
||||
/// This variant shall not be used for unsized types -- those must always live in memory.
|
||||
Local { frame: usize, local: mir::Local, offset: Option<Size> },
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PlaceTy<'tcx, Prov: Provenance = AllocId> {
|
||||
place: Place<Prov>, // Keep this private; it helps enforce invariants.
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
/// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
|
||||
/// it needs to have a different alignment than the field type would usually have.
|
||||
/// So we represent this here with a separate field that "overwrites" `layout.align`.
|
||||
/// This means `layout.align` should never be used for a `PlaceTy`!
|
||||
pub align: Align,
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> std::ops::Deref for PlaceTy<'tcx, Prov> {
|
||||
type Target = Place<Prov>;
|
||||
#[inline(always)]
|
||||
fn deref(&self) -> &Place<Prov> {
|
||||
&self.place
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for PlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
|
||||
PlaceTy { place: Place::Ptr(*mplace), layout: mplace.layout, align: mplace.align }
|
||||
}
|
||||
}
|
||||
|
||||
impl<Prov: Provenance> MemPlace<Prov> {
|
||||
#[inline(always)]
|
||||
pub fn from_ptr(ptr: Pointer<Option<Prov>>) -> Self {
|
||||
@ -165,6 +103,27 @@ impl<Prov: Provenance> MemPlace<Prov> {
|
||||
}
|
||||
}
|
||||
|
||||
/// A MemPlace with its layout. Constructing it is only possible in this module.
|
||||
#[derive(Clone, Hash, Eq, PartialEq)]
|
||||
pub struct MPlaceTy<'tcx, Prov: Provenance = AllocId> {
|
||||
mplace: MemPlace<Prov>,
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
/// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
|
||||
/// it needs to have a different alignment than the field type would usually have.
|
||||
/// So we represent this here with a separate field that "overwrites" `layout.align`.
|
||||
/// This means `layout.align` should never be used for a `MPlaceTy`!
|
||||
pub align: Align,
|
||||
}
|
||||
|
||||
impl<Prov: Provenance> std::fmt::Debug for MPlaceTy<'_, Prov> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("MPlaceTy")
|
||||
.field("mplace", &self.mplace)
|
||||
.field("ty", &self.layout.ty)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
|
||||
/// Produces a MemPlace that works for ZST but nothing else.
|
||||
/// Conceptually this is a new allocation, but it doesn't actually create an allocation so you
|
||||
@ -194,9 +153,29 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
|
||||
align: layout.align.abi,
|
||||
}
|
||||
}
|
||||
|
||||
/// Adjust the provenance of the main pointer (metadata is unaffected).
|
||||
pub fn map_provenance(self, f: impl FnOnce(Option<Prov>) -> Option<Prov>) -> Self {
|
||||
MPlaceTy { mplace: self.mplace.map_provenance(f), ..self }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub(super) fn mplace(&self) -> &MemPlace<Prov> {
|
||||
&self.mplace
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn ptr(&self) -> Pointer<Option<Prov>> {
|
||||
self.mplace.ptr
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_ref(&self, cx: &impl HasDataLayout) -> Immediate<Prov> {
|
||||
self.mplace.to_ref(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
||||
impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn layout(&self) -> TyAndLayout<'tcx> {
|
||||
self.layout
|
||||
@ -204,7 +183,7 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for MPlaceTy<'tcx
|
||||
|
||||
#[inline(always)]
|
||||
fn meta(&self) -> MemPlaceMeta<Prov> {
|
||||
self.meta
|
||||
self.mplace.meta
|
||||
}
|
||||
|
||||
fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
|
||||
@ -229,7 +208,76 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for MPlaceTy<'tcx
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(super) enum Place<Prov: Provenance = AllocId> {
|
||||
/// A place referring to a value allocated in the `Memory` system.
|
||||
Ptr(MemPlace<Prov>),
|
||||
|
||||
/// To support alloc-free locals, we are able to write directly to a local. The offset indicates
|
||||
/// where in the local this place is located; if it is `None`, no projection has been applied.
|
||||
/// Such projections are meaningful even if the offset is 0, since they can change layouts.
|
||||
/// (Without that optimization, we'd just always be a `MemPlace`.)
|
||||
/// Note that this only stores the frame index, not the thread this frame belongs to -- that is
|
||||
/// implicit. This means a `Place` must never be moved across interpreter thread boundaries!
|
||||
///
|
||||
/// This variant shall not be used for unsized types -- those must always live in memory.
|
||||
Local { frame: usize, local: mir::Local, offset: Option<Size> },
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PlaceTy<'tcx, Prov: Provenance = AllocId> {
|
||||
place: Place<Prov>, // Keep this private; it helps enforce invariants.
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
/// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct:
|
||||
/// it needs to have a different alignment than the field type would usually have.
|
||||
/// So we represent this here with a separate field that "overwrites" `layout.align`.
|
||||
/// This means `layout.align` should never be used for a `PlaceTy`!
|
||||
pub align: Align,
|
||||
}
|
||||
|
||||
impl<Prov: Provenance> std::fmt::Debug for PlaceTy<'_, Prov> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("PlaceTy").field("place", &self.place).field("ty", &self.layout.ty).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for PlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
|
||||
PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout, align: mplace.align }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
pub(super) fn place(&self) -> &Place<Prov> {
|
||||
&self.place
|
||||
}
|
||||
|
||||
/// A place is either an mplace or some local.
|
||||
#[inline(always)]
|
||||
pub fn as_mplace_or_local(
|
||||
&self,
|
||||
) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local, Option<Size>)> {
|
||||
match self.place {
|
||||
Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout, align: self.align }),
|
||||
Place::Local { frame, local, offset } => Right((frame, local, offset)),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
|
||||
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
|
||||
self.as_mplace_or_local().left().unwrap_or_else(|| {
|
||||
bug!(
|
||||
"PlaceTy of type {} was a local when it was expected to be an MPlace",
|
||||
self.layout.ty
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn layout(&self) -> TyAndLayout<'tcx> {
|
||||
self.layout
|
||||
@ -238,7 +286,7 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for PlaceTy<'tcx,
|
||||
#[inline]
|
||||
fn meta(&self) -> MemPlaceMeta<Prov> {
|
||||
match self.as_mplace_or_local() {
|
||||
Left(mplace) => mplace.meta,
|
||||
Left(mplace) => mplace.meta(),
|
||||
Right(_) => {
|
||||
debug_assert!(self.layout.is_sized(), "unsized locals should live in memory");
|
||||
MemPlaceMeta::None
|
||||
@ -286,11 +334,11 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for PlaceTy<'tcx,
|
||||
impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
pub fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
|
||||
match **self {
|
||||
match self.op() {
|
||||
Operand::Indirect(mplace) => {
|
||||
Left(MPlaceTy { mplace, layout: self.layout, align: self.align.unwrap() })
|
||||
Left(MPlaceTy { mplace: *mplace, layout: self.layout, align: self.align.unwrap() })
|
||||
}
|
||||
Operand::Immediate(imm) => Right(ImmTy::from_immediate(imm, self.layout)),
|
||||
Operand::Immediate(imm) => Right(ImmTy::from_immediate(*imm, self.layout)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -306,30 +354,7 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> PlaceTy<'tcx, Prov> {
|
||||
/// A place is either an mplace or some local.
|
||||
#[inline(always)]
|
||||
pub fn as_mplace_or_local(
|
||||
&self,
|
||||
) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local, Option<Size>)> {
|
||||
match **self {
|
||||
Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout, align: self.align }),
|
||||
Place::Local { frame, local, offset } => Right((frame, local, offset)),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
|
||||
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
|
||||
self.as_mplace_or_local().left().unwrap_or_else(|| {
|
||||
bug!(
|
||||
"PlaceTy of type {} was a local when it was expected to be an MPlace",
|
||||
self.layout.ty
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The `Weiteable` trait describes interpreter values that can be written to.
|
||||
pub trait Writeable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
|
||||
fn as_mplace_or_local(
|
||||
&self,
|
||||
@ -341,7 +366,7 @@ pub trait Writeable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
|
||||
) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>>;
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Writeable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
|
||||
impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn as_mplace_or_local(
|
||||
&self,
|
||||
@ -360,7 +385,7 @@ impl<'tcx, Prov: Provenance + 'static> Writeable<'tcx, Prov> for PlaceTy<'tcx, P
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Prov: Provenance + 'static> Writeable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
||||
impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
||||
#[inline(always)]
|
||||
fn as_mplace_or_local(
|
||||
&self,
|
||||
@ -381,7 +406,7 @@ impl<'tcx, Prov: Provenance + 'static> Writeable<'tcx, Prov> for MPlaceTy<'tcx,
|
||||
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
|
||||
impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
|
||||
where
|
||||
Prov: Provenance + 'static,
|
||||
Prov: Provenance,
|
||||
M: Machine<'mir, 'tcx, Provenance = Prov>,
|
||||
{
|
||||
/// Take a value, which represents a (thin or wide) reference, and make it a place.
|
||||
@ -415,7 +440,7 @@ where
|
||||
&self,
|
||||
mplace: &MPlaceTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
|
||||
let imm = mplace.to_ref(self);
|
||||
let imm = mplace.mplace.to_ref(self);
|
||||
let layout = self.layout_of(Ty::new_mut_ptr(self.tcx.tcx, mplace.layout.ty))?;
|
||||
Ok(ImmTy::from_immediate(imm, layout))
|
||||
}
|
||||
@ -449,7 +474,7 @@ where
|
||||
.size_and_align_of_mplace(&mplace)?
|
||||
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
|
||||
// Due to packed places, only `mplace.align` matters.
|
||||
self.get_ptr_alloc(mplace.ptr, size, mplace.align)
|
||||
self.get_ptr_alloc(mplace.ptr(), size, mplace.align)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -462,7 +487,7 @@ where
|
||||
.size_and_align_of_mplace(&mplace)?
|
||||
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
|
||||
// Due to packed places, only `mplace.align` matters.
|
||||
self.get_ptr_alloc_mut(mplace.ptr, size, mplace.align)
|
||||
self.get_ptr_alloc_mut(mplace.ptr(), size, mplace.align)
|
||||
}
|
||||
|
||||
/// Check if this mplace is dereferenceable and sufficiently aligned.
|
||||
@ -473,7 +498,7 @@ where
|
||||
// Due to packed places, only `mplace.align` matters.
|
||||
let align =
|
||||
if M::enforce_alignment(self).should_check() { mplace.align } else { Align::ONE };
|
||||
self.check_ptr_access_align(mplace.ptr, size, align, CheckInAllocMsg::DerefTest)?;
|
||||
self.check_ptr_access_align(mplace.ptr(), size, align, CheckInAllocMsg::DerefTest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -542,7 +567,7 @@ where
|
||||
place = self.project(&place, elem)?
|
||||
}
|
||||
|
||||
trace!("{:?}", self.dump_place(place.place));
|
||||
trace!("{:?}", self.dump_place(&place));
|
||||
// Sanity-check the type we ended up with.
|
||||
debug_assert!(
|
||||
mir_assign_valid_types(
|
||||
@ -618,7 +643,8 @@ where
|
||||
// just fall back to the indirect path.
|
||||
dest.force_mplace(self)?
|
||||
} else {
|
||||
match M::access_local_mut(self, frame, local)? {
|
||||
M::before_access_local_mut(self, frame, local)?;
|
||||
match self.stack_mut()[frame].locals[local].access_mut()? {
|
||||
Operand::Immediate(local_val) => {
|
||||
// Local can be updated in-place.
|
||||
*local_val = src;
|
||||
@ -738,7 +764,8 @@ where
|
||||
// FIXME: share the logic with `write_immediate_no_validate`.
|
||||
dest.force_mplace(self)?
|
||||
} else {
|
||||
match M::access_local_mut(self, frame, local)? {
|
||||
M::before_access_local_mut(self, frame, local)?;
|
||||
match self.stack_mut()[frame].locals[local].access_mut()? {
|
||||
Operand::Immediate(local) => {
|
||||
*local = Immediate::Uninit;
|
||||
return Ok(());
|
||||
@ -832,7 +859,7 @@ where
|
||||
*src_val,
|
||||
src.layout(),
|
||||
dest_mem.align,
|
||||
*dest_mem,
|
||||
dest_mem.mplace,
|
||||
)
|
||||
};
|
||||
}
|
||||
@ -859,7 +886,12 @@ where
|
||||
// (Or as the `Assign` docs put it, assignments "not producing primitives" must be
|
||||
// non-overlapping.)
|
||||
self.mem_copy(
|
||||
src.ptr, src.align, dest.ptr, dest.align, dest_size, /*nonoverlapping*/ true,
|
||||
src.ptr(),
|
||||
src.align,
|
||||
dest.ptr(),
|
||||
dest.align,
|
||||
dest_size,
|
||||
/*nonoverlapping*/ true,
|
||||
)
|
||||
}
|
||||
|
||||
@ -874,7 +906,8 @@ where
|
||||
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
|
||||
let mplace = match place.place {
|
||||
Place::Local { frame, local, offset } => {
|
||||
let whole_local = match M::access_local_mut(self, frame, local)? {
|
||||
M::before_access_local_mut(self, frame, local)?;
|
||||
let whole_local = match self.stack_mut()[frame].locals[local].access_mut()? {
|
||||
&mut Operand::Immediate(local_val) => {
|
||||
// We need to make an allocation.
|
||||
|
||||
@ -894,16 +927,16 @@ where
|
||||
local_val,
|
||||
local_layout,
|
||||
local_layout.align.abi,
|
||||
*mplace,
|
||||
mplace.mplace,
|
||||
)?;
|
||||
}
|
||||
M::after_local_allocated(self, frame, local, &mplace)?;
|
||||
// Now we can call `access_mut` again, asserting it goes well, and actually
|
||||
// overwrite things. This points to the entire allocation, not just the part
|
||||
// the place refers to, i.e. we do this before we apply `offset`.
|
||||
*M::access_local_mut(self, frame, local).unwrap() =
|
||||
Operand::Indirect(*mplace);
|
||||
*mplace
|
||||
*self.stack_mut()[frame].locals[local].access_mut().unwrap() =
|
||||
Operand::Indirect(mplace.mplace);
|
||||
mplace.mplace
|
||||
}
|
||||
&mut Operand::Indirect(mplace) => mplace, // this already was an indirect local
|
||||
};
|
||||
@ -1011,12 +1044,12 @@ where
|
||||
matches!(mplace.layout.ty.kind(), ty::Dynamic(_, _, ty::Dyn)),
|
||||
"`unpack_dyn_trait` only makes sense on `dyn*` types"
|
||||
);
|
||||
let vtable = mplace.meta.unwrap_meta().to_pointer(self)?;
|
||||
let vtable = mplace.meta().unwrap_meta().to_pointer(self)?;
|
||||
let (ty, _) = self.get_ptr_vtable(vtable)?;
|
||||
let layout = self.layout_of(ty)?;
|
||||
|
||||
let mplace = MPlaceTy {
|
||||
mplace: MemPlace { meta: MemPlaceMeta::None, ..**mplace },
|
||||
mplace: MemPlace { meta: MemPlaceMeta::None, ..mplace.mplace },
|
||||
layout,
|
||||
align: layout.align.abi,
|
||||
};
|
||||
|
@ -89,7 +89,7 @@ pub trait Projectable<'tcx, Prov: Provenance>: Sized + std::fmt::Debug {
|
||||
}
|
||||
|
||||
/// A type representing iteration over the elements of an array.
|
||||
pub struct ArrayIterator<'tcx, 'a, Prov: Provenance + 'static, P: Projectable<'tcx, Prov>> {
|
||||
pub struct ArrayIterator<'tcx, 'a, Prov: Provenance, P: Projectable<'tcx, Prov>> {
|
||||
base: &'a P,
|
||||
range: Range<u64>,
|
||||
stride: Size,
|
||||
@ -97,9 +97,7 @@ pub struct ArrayIterator<'tcx, 'a, Prov: Provenance + 'static, P: Projectable<'t
|
||||
_phantom: PhantomData<Prov>, // otherwise it says `Prov` is never used...
|
||||
}
|
||||
|
||||
impl<'tcx, 'a, Prov: Provenance + 'static, P: Projectable<'tcx, Prov>>
|
||||
ArrayIterator<'tcx, 'a, Prov, P>
|
||||
{
|
||||
impl<'tcx, 'a, Prov: Provenance, P: Projectable<'tcx, Prov>> ArrayIterator<'tcx, 'a, Prov, P> {
|
||||
/// Should be the same `ecx` on each call, and match the one used to create the iterator.
|
||||
pub fn next<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
|
||||
&mut self,
|
||||
@ -113,7 +111,7 @@ impl<'tcx, 'a, Prov: Provenance + 'static, P: Projectable<'tcx, Prov>>
|
||||
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
|
||||
impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
|
||||
where
|
||||
Prov: Provenance + 'static,
|
||||
Prov: Provenance,
|
||||
M: Machine<'mir, 'tcx, Provenance = Prov>,
|
||||
{
|
||||
/// Offset a pointer to project to a field of a struct/union. Unlike `place_field`, this is
|
||||
|
@ -204,7 +204,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
// avoid writing each operand individually and instead just make many copies
|
||||
// of the first element.
|
||||
let elem_size = first.layout.size;
|
||||
let first_ptr = first.ptr;
|
||||
let first_ptr = first.ptr();
|
||||
let rest_ptr = first_ptr.offset(elem_size, self)?;
|
||||
// For the alignment of `rest_ptr`, we crucially do *not* use `first.align` as
|
||||
// that place might be more aligned than its type mandates (a `u8` array could
|
||||
@ -305,7 +305,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
trace!("{:?}", self.dump_place(*dest));
|
||||
trace!("{:?}", self.dump_place(&dest));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -793,7 +793,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
throw_ub_custom!(fluent::const_eval_dyn_star_call_vtable_mismatch);
|
||||
}
|
||||
|
||||
(vptr, dyn_ty, recv.ptr)
|
||||
(vptr, dyn_ty, recv.ptr())
|
||||
} else {
|
||||
// Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
|
||||
// (For that reason we also cannot use `unpack_dyn_trait`.)
|
||||
@ -810,7 +810,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
assert!(receiver_place.layout.is_unsized());
|
||||
|
||||
// Get the required information from the vtable.
|
||||
let vptr = receiver_place.meta.unwrap_meta().to_pointer(self)?;
|
||||
let vptr = receiver_place.meta().unwrap_meta().to_pointer(self)?;
|
||||
let (dyn_ty, dyn_trait) = self.get_ptr_vtable(vptr)?;
|
||||
if dyn_trait != data.principal() {
|
||||
throw_ub_custom!(fluent::const_eval_dyn_call_vtable_mismatch);
|
||||
@ -819,7 +819,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
// It might be surprising that we use a pointer as the receiver even if this
|
||||
// is a by-val case; this works because by-val passing of an unsized `dyn
|
||||
// Trait` to a function is actually desugared to a pointer.
|
||||
(vptr, dyn_ty, receiver_place.ptr)
|
||||
(vptr, dyn_ty, receiver_place.ptr())
|
||||
};
|
||||
|
||||
// Now determine the actual method to call. We can do that in two different ways and
|
||||
|
@ -360,7 +360,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
||||
// Handle wide pointers.
|
||||
// Check metadata early, for better diagnostics
|
||||
if place.layout.is_unsized() {
|
||||
self.check_wide_ptr_meta(place.meta, place.layout)?;
|
||||
self.check_wide_ptr_meta(place.meta(), place.layout)?;
|
||||
}
|
||||
// Make sure this is dereferenceable and all.
|
||||
let size_and_align = try_validation!(
|
||||
@ -379,7 +379,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
||||
// Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
|
||||
try_validation!(
|
||||
self.ecx.check_ptr_access_align(
|
||||
place.ptr,
|
||||
place.ptr(),
|
||||
size,
|
||||
align,
|
||||
CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
|
||||
@ -414,7 +414,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
||||
if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
|
||||
// Proceed recursively even for ZST, no reason to skip them!
|
||||
// `!` is a ZST and we want to validate it.
|
||||
if let Ok((alloc_id, _offset, _prov)) = self.ecx.ptr_try_get_alloc_id(place.ptr) {
|
||||
if let Ok((alloc_id, _offset, _prov)) = self.ecx.ptr_try_get_alloc_id(place.ptr()) {
|
||||
// Let's see what kind of memory this points to.
|
||||
let alloc_kind = self.ecx.tcx.try_get_global_alloc(alloc_id);
|
||||
match alloc_kind {
|
||||
@ -521,7 +521,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
||||
let place =
|
||||
self.ecx.ref_to_mplace(&self.read_immediate(value, ExpectedKind::RawPtr)?)?;
|
||||
if place.layout.is_unsized() {
|
||||
self.check_wide_ptr_meta(place.meta, place.layout)?;
|
||||
self.check_wide_ptr_meta(place.meta(), place.layout)?;
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
@ -739,7 +739,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
let mplace = op.assert_mem_place(); // strings are unsized and hence never immediate
|
||||
let len = mplace.len(self.ecx)?;
|
||||
try_validation!(
|
||||
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len)),
|
||||
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
|
||||
self.path,
|
||||
Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
|
||||
Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
|
||||
@ -789,7 +789,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
// to reject those pointers, we just do not have the machinery to
|
||||
// talk about parts of a pointer.
|
||||
// We also accept uninit, for consistency with the slow path.
|
||||
let alloc = self.ecx.get_ptr_alloc(mplace.ptr, size, mplace.align)?.expect("we already excluded size 0");
|
||||
let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size, mplace.align)?.expect("we already excluded size 0");
|
||||
|
||||
match alloc.get_bytes_strip_provenance() {
|
||||
// In the happy case, we needn't check anything else.
|
||||
|
@ -54,7 +54,7 @@ fn might_permit_raw_init_strict<'tcx>(
|
||||
|
||||
if kind == ValidityRequirement::Zero {
|
||||
cx.write_bytes_ptr(
|
||||
allocated.ptr,
|
||||
allocated.ptr(),
|
||||
std::iter::repeat(0_u8).take(ty.layout.size().bytes_usize()),
|
||||
)
|
||||
.expect("failed to write bytes for zero valid check");
|
||||
|
@ -41,12 +41,9 @@
|
||||
//! [^2] `MTLockRef` is a typedef.
|
||||
|
||||
pub use crate::marker::*;
|
||||
use parking_lot::Mutex;
|
||||
use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{BuildHasher, Hash};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::panic::{catch_unwind, resume_unwind, AssertUnwindSafe};
|
||||
|
||||
mod lock;
|
||||
pub use lock::{Lock, LockGuard};
|
||||
@ -54,6 +51,11 @@ pub use lock::{Lock, LockGuard};
|
||||
mod worker_local;
|
||||
pub use worker_local::{Registry, WorkerLocal};
|
||||
|
||||
mod parallel;
|
||||
#[cfg(parallel_compiler)]
|
||||
pub use parallel::scope;
|
||||
pub use parallel::{join, par_for_each_in, par_map, parallel_guard};
|
||||
|
||||
pub use std::sync::atomic::Ordering;
|
||||
pub use std::sync::atomic::Ordering::SeqCst;
|
||||
|
||||
@ -105,37 +107,6 @@ mod mode {
|
||||
|
||||
pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
|
||||
|
||||
/// A guard used to hold panics that occur during a parallel section to later by unwound.
|
||||
/// This is used for the parallel compiler to prevent fatal errors from non-deterministically
|
||||
/// hiding errors by ensuring that everything in the section has completed executing before
|
||||
/// continuing with unwinding. It's also used for the non-parallel code to ensure error message
|
||||
/// output match the parallel compiler for testing purposes.
|
||||
pub struct ParallelGuard {
|
||||
panic: Mutex<Option<Box<dyn Any + std::marker::Send + 'static>>>,
|
||||
}
|
||||
|
||||
impl ParallelGuard {
|
||||
pub fn run<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
|
||||
catch_unwind(AssertUnwindSafe(f))
|
||||
.map_err(|err| {
|
||||
*self.panic.lock() = Some(err);
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
/// This gives access to a fresh parallel guard in the closure and will unwind any panics
|
||||
/// caught in it after the closure returns.
|
||||
#[inline]
|
||||
pub fn parallel_guard<R>(f: impl FnOnce(&ParallelGuard) -> R) -> R {
|
||||
let guard = ParallelGuard { panic: Mutex::new(None) };
|
||||
let ret = f(&guard);
|
||||
if let Some(panic) = guard.panic.into_inner() {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(not(parallel_compiler))] {
|
||||
use std::ops::Add;
|
||||
@ -227,44 +198,6 @@ cfg_if! {
|
||||
pub type AtomicU32 = Atomic<u32>;
|
||||
pub type AtomicU64 = Atomic<u64>;
|
||||
|
||||
pub fn join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where A: FnOnce() -> RA,
|
||||
B: FnOnce() -> RB
|
||||
{
|
||||
let (a, b) = parallel_guard(|guard| {
|
||||
let a = guard.run(oper_a);
|
||||
let b = guard.run(oper_b);
|
||||
(a, b)
|
||||
});
|
||||
(a.unwrap(), b.unwrap())
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! parallel {
|
||||
($($blocks:block),*) => {{
|
||||
$crate::sync::parallel_guard(|guard| {
|
||||
$(guard.run(|| $blocks);)*
|
||||
});
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn par_for_each_in<T: IntoIterator>(t: T, mut for_each: impl FnMut(T::Item) + Sync + Send) {
|
||||
parallel_guard(|guard| {
|
||||
t.into_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub fn par_map<T: IntoIterator, R, C: FromIterator<R>>(
|
||||
t: T,
|
||||
mut map: impl FnMut(<<T as IntoIterator>::IntoIter as Iterator>::Item) -> R,
|
||||
) -> C {
|
||||
parallel_guard(|guard| {
|
||||
t.into_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
})
|
||||
}
|
||||
|
||||
pub use std::rc::Rc as Lrc;
|
||||
pub use std::rc::Weak as Weak;
|
||||
pub use std::cell::Ref as ReadGuard;
|
||||
@ -370,105 +303,6 @@ cfg_if! {
|
||||
|
||||
use std::thread;
|
||||
|
||||
#[inline]
|
||||
pub fn join<A, B, RA: DynSend, RB: DynSend>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where
|
||||
A: FnOnce() -> RA + DynSend,
|
||||
B: FnOnce() -> RB + DynSend,
|
||||
{
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let oper_a = FromDyn::from(oper_a);
|
||||
let oper_b = FromDyn::from(oper_b);
|
||||
let (a, b) = rayon::join(move || FromDyn::from(oper_a.into_inner()()), move || FromDyn::from(oper_b.into_inner()()));
|
||||
(a.into_inner(), b.into_inner())
|
||||
} else {
|
||||
let (a, b) = parallel_guard(|guard| {
|
||||
let a = guard.run(oper_a);
|
||||
let b = guard.run(oper_b);
|
||||
(a, b)
|
||||
});
|
||||
(a.unwrap(), b.unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
// This function only works when `mode::is_dyn_thread_safe()`.
|
||||
pub fn scope<'scope, OP, R>(op: OP) -> R
|
||||
where
|
||||
OP: FnOnce(&rayon::Scope<'scope>) -> R + DynSend,
|
||||
R: DynSend,
|
||||
{
|
||||
let op = FromDyn::from(op);
|
||||
rayon::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner()
|
||||
}
|
||||
|
||||
/// Runs a list of blocks in parallel. The first block is executed immediately on
|
||||
/// the current thread. Use that for the longest running block.
|
||||
#[macro_export]
|
||||
macro_rules! parallel {
|
||||
(impl $fblock:block [$($c:expr,)*] [$block:expr $(, $rest:expr)*]) => {
|
||||
parallel!(impl $fblock [$block, $($c,)*] [$($rest),*])
|
||||
};
|
||||
(impl $fblock:block [$($blocks:expr,)*] []) => {
|
||||
::rustc_data_structures::sync::scope(|s| {
|
||||
$(let block = rustc_data_structures::sync::FromDyn::from(|| $blocks);
|
||||
s.spawn(move |_| block.into_inner()());)*
|
||||
(|| $fblock)();
|
||||
});
|
||||
};
|
||||
($fblock:block, $($blocks:block),*) => {
|
||||
if rustc_data_structures::sync::is_dyn_thread_safe() {
|
||||
// Reverse the order of the later blocks since Rayon executes them in reverse order
|
||||
// when using a single thread. This ensures the execution order matches that
|
||||
// of a single threaded rustc.
|
||||
parallel!(impl $fblock [] [$($blocks),*]);
|
||||
} else {
|
||||
$crate::sync::parallel_guard(|guard| {
|
||||
guard.run(|| $fblock);
|
||||
$(guard.run(|| $blocks);)*
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelIterator};
|
||||
|
||||
pub fn par_for_each_in<I, T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>>(
|
||||
t: T,
|
||||
for_each: impl Fn(I) + DynSync + DynSend
|
||||
) {
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let for_each = FromDyn::from(for_each);
|
||||
t.into_par_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
} else {
|
||||
t.into_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn par_map<
|
||||
I,
|
||||
T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>,
|
||||
R: std::marker::Send,
|
||||
C: FromIterator<R> + FromParallelIterator<R>
|
||||
>(
|
||||
t: T,
|
||||
map: impl Fn(I) -> R + DynSync + DynSend
|
||||
) -> C {
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let map = FromDyn::from(map);
|
||||
t.into_par_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
} else {
|
||||
t.into_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// This makes locks panic if they are already held.
|
||||
/// It is only useful when you are running in a single thread
|
||||
const ERROR_CHECKING: bool = false;
|
||||
|
188
compiler/rustc_data_structures/src/sync/parallel.rs
Normal file
188
compiler/rustc_data_structures/src/sync/parallel.rs
Normal file
@ -0,0 +1,188 @@
|
||||
//! This module defines parallel operations that are implemented in
|
||||
//! one way for the serial compiler, and another way the parallel compiler.
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use std::any::Any;
|
||||
use std::panic::{catch_unwind, resume_unwind, AssertUnwindSafe};
|
||||
|
||||
#[cfg(not(parallel_compiler))]
|
||||
pub use disabled::*;
|
||||
#[cfg(parallel_compiler)]
|
||||
pub use enabled::*;
|
||||
|
||||
/// A guard used to hold panics that occur during a parallel section to later by unwound.
|
||||
/// This is used for the parallel compiler to prevent fatal errors from non-deterministically
|
||||
/// hiding errors by ensuring that everything in the section has completed executing before
|
||||
/// continuing with unwinding. It's also used for the non-parallel code to ensure error message
|
||||
/// output match the parallel compiler for testing purposes.
|
||||
pub struct ParallelGuard {
|
||||
panic: Mutex<Option<Box<dyn Any + Send + 'static>>>,
|
||||
}
|
||||
|
||||
impl ParallelGuard {
|
||||
pub fn run<R>(&self, f: impl FnOnce() -> R) -> Option<R> {
|
||||
catch_unwind(AssertUnwindSafe(f))
|
||||
.map_err(|err| {
|
||||
*self.panic.lock() = Some(err);
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
/// This gives access to a fresh parallel guard in the closure and will unwind any panics
|
||||
/// caught in it after the closure returns.
|
||||
#[inline]
|
||||
pub fn parallel_guard<R>(f: impl FnOnce(&ParallelGuard) -> R) -> R {
|
||||
let guard = ParallelGuard { panic: Mutex::new(None) };
|
||||
let ret = f(&guard);
|
||||
if let Some(panic) = guard.panic.into_inner() {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
mod disabled {
|
||||
use crate::sync::parallel_guard;
|
||||
|
||||
#[macro_export]
|
||||
#[cfg(not(parallel_compiler))]
|
||||
macro_rules! parallel {
|
||||
($($blocks:block),*) => {{
|
||||
$crate::sync::parallel_guard(|guard| {
|
||||
$(guard.run(|| $blocks);)*
|
||||
});
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where
|
||||
A: FnOnce() -> RA,
|
||||
B: FnOnce() -> RB,
|
||||
{
|
||||
let (a, b) = parallel_guard(|guard| {
|
||||
let a = guard.run(oper_a);
|
||||
let b = guard.run(oper_b);
|
||||
(a, b)
|
||||
});
|
||||
(a.unwrap(), b.unwrap())
|
||||
}
|
||||
|
||||
pub fn par_for_each_in<T: IntoIterator>(t: T, mut for_each: impl FnMut(T::Item)) {
|
||||
parallel_guard(|guard| {
|
||||
t.into_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub fn par_map<T: IntoIterator, R, C: FromIterator<R>>(
|
||||
t: T,
|
||||
mut map: impl FnMut(<<T as IntoIterator>::IntoIter as Iterator>::Item) -> R,
|
||||
) -> C {
|
||||
parallel_guard(|guard| t.into_iter().filter_map(|i| guard.run(|| map(i))).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
mod enabled {
|
||||
use crate::sync::{mode, parallel_guard, DynSend, DynSync, FromDyn};
|
||||
|
||||
/// Runs a list of blocks in parallel. The first block is executed immediately on
|
||||
/// the current thread. Use that for the longest running block.
|
||||
#[macro_export]
|
||||
macro_rules! parallel {
|
||||
(impl $fblock:block [$($c:expr,)*] [$block:expr $(, $rest:expr)*]) => {
|
||||
parallel!(impl $fblock [$block, $($c,)*] [$($rest),*])
|
||||
};
|
||||
(impl $fblock:block [$($blocks:expr,)*] []) => {
|
||||
::rustc_data_structures::sync::scope(|s| {
|
||||
$(let block = rustc_data_structures::sync::FromDyn::from(|| $blocks);
|
||||
s.spawn(move |_| block.into_inner()());)*
|
||||
(|| $fblock)();
|
||||
});
|
||||
};
|
||||
($fblock:block, $($blocks:block),*) => {
|
||||
if rustc_data_structures::sync::is_dyn_thread_safe() {
|
||||
// Reverse the order of the later blocks since Rayon executes them in reverse order
|
||||
// when using a single thread. This ensures the execution order matches that
|
||||
// of a single threaded rustc.
|
||||
parallel!(impl $fblock [] [$($blocks),*]);
|
||||
} else {
|
||||
$crate::sync::parallel_guard(|guard| {
|
||||
guard.run(|| $fblock);
|
||||
$(guard.run(|| $blocks);)*
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// This function only works when `mode::is_dyn_thread_safe()`.
|
||||
pub fn scope<'scope, OP, R>(op: OP) -> R
|
||||
where
|
||||
OP: FnOnce(&rayon::Scope<'scope>) -> R + DynSend,
|
||||
R: DynSend,
|
||||
{
|
||||
let op = FromDyn::from(op);
|
||||
rayon::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn join<A, B, RA: DynSend, RB: DynSend>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where
|
||||
A: FnOnce() -> RA + DynSend,
|
||||
B: FnOnce() -> RB + DynSend,
|
||||
{
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let oper_a = FromDyn::from(oper_a);
|
||||
let oper_b = FromDyn::from(oper_b);
|
||||
let (a, b) = rayon::join(
|
||||
move || FromDyn::from(oper_a.into_inner()()),
|
||||
move || FromDyn::from(oper_b.into_inner()()),
|
||||
);
|
||||
(a.into_inner(), b.into_inner())
|
||||
} else {
|
||||
super::disabled::join(oper_a, oper_b)
|
||||
}
|
||||
}
|
||||
|
||||
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelIterator};
|
||||
|
||||
pub fn par_for_each_in<I, T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>>(
|
||||
t: T,
|
||||
for_each: impl Fn(I) + DynSync + DynSend,
|
||||
) {
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let for_each = FromDyn::from(for_each);
|
||||
t.into_par_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
} else {
|
||||
t.into_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn par_map<
|
||||
I,
|
||||
T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>,
|
||||
R: std::marker::Send,
|
||||
C: FromIterator<R> + FromParallelIterator<R>,
|
||||
>(
|
||||
t: T,
|
||||
map: impl Fn(I) -> R + DynSync + DynSend,
|
||||
) -> C {
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let map = FromDyn::from(map);
|
||||
t.into_par_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
} else {
|
||||
t.into_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -91,7 +91,7 @@ fn annotation_type_for_level(level: Level) -> AnnotationType {
|
||||
}
|
||||
Level::Warning(_) => AnnotationType::Warning,
|
||||
Level::Note | Level::OnceNote => AnnotationType::Note,
|
||||
Level::Help => AnnotationType::Help,
|
||||
Level::Help | Level::OnceHelp => AnnotationType::Help,
|
||||
// FIXME(#59346): Not sure how to map this level
|
||||
Level::FailureNote => AnnotationType::Error,
|
||||
Level::Allow => panic!("Should not call with Allow"),
|
||||
|
@ -270,6 +270,7 @@ impl Diagnostic {
|
||||
| Level::Note
|
||||
| Level::OnceNote
|
||||
| Level::Help
|
||||
| Level::OnceHelp
|
||||
| Level::Allow
|
||||
| Level::Expect(_) => false,
|
||||
}
|
||||
@ -532,6 +533,13 @@ impl Diagnostic {
|
||||
self
|
||||
}
|
||||
|
||||
/// Prints the span with a help above it.
|
||||
/// This is like [`Diagnostic::help()`], but it gets its own span.
|
||||
pub fn help_once(&mut self, msg: impl Into<SubdiagnosticMessage>) -> &mut Self {
|
||||
self.sub(Level::OnceHelp, msg, MultiSpan::new(), None);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a help message attached to this diagnostic with a customizable highlighted message.
|
||||
pub fn highlighted_help(&mut self, msg: Vec<(String, Style)>) -> &mut Self {
|
||||
self.sub_with_highlights(Level::Help, msg, MultiSpan::new(), None);
|
||||
|
@ -2348,7 +2348,13 @@ impl FileWithAnnotatedLines {
|
||||
}
|
||||
|
||||
let label = label.as_ref().map(|m| {
|
||||
emitter.translate_message(m, args).map_err(Report::new).unwrap().to_string()
|
||||
normalize_whitespace(
|
||||
&emitter
|
||||
.translate_message(m, &args)
|
||||
.map_err(Report::new)
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
)
|
||||
});
|
||||
|
||||
if lo.line != hi.line {
|
||||
|
@ -1390,7 +1390,7 @@ impl HandlerInner {
|
||||
debug!(?self.emitted_diagnostics);
|
||||
let already_emitted_sub = |sub: &mut SubDiagnostic| {
|
||||
debug!(?sub);
|
||||
if sub.level != Level::OnceNote {
|
||||
if sub.level != Level::OnceNote && sub.level != Level::OnceHelp {
|
||||
return false;
|
||||
}
|
||||
let mut hasher = StableHasher::new();
|
||||
@ -1792,6 +1792,8 @@ pub enum Level {
|
||||
/// A note that is only emitted once.
|
||||
OnceNote,
|
||||
Help,
|
||||
/// A help that is only emitted once.
|
||||
OnceHelp,
|
||||
FailureNote,
|
||||
Allow,
|
||||
Expect(LintExpectationId),
|
||||
@ -1816,7 +1818,7 @@ impl Level {
|
||||
Note | OnceNote => {
|
||||
spec.set_fg(Some(Color::Green)).set_intense(true);
|
||||
}
|
||||
Help => {
|
||||
Help | OnceHelp => {
|
||||
spec.set_fg(Some(Color::Cyan)).set_intense(true);
|
||||
}
|
||||
FailureNote => {}
|
||||
@ -1831,7 +1833,7 @@ impl Level {
|
||||
Fatal | Error { .. } => "error",
|
||||
Warning(_) => "warning",
|
||||
Note | OnceNote => "note",
|
||||
Help => "help",
|
||||
Help | OnceHelp => "help",
|
||||
FailureNote => "failure-note",
|
||||
Allow => panic!("Shouldn't call on allowed error"),
|
||||
Expect(_) => panic!("Shouldn't call on expected error"),
|
||||
|
@ -152,7 +152,7 @@ pub mod nested_filter {
|
||||
/// visit fn bodies for fns that it encounters, and closure bodies, but
|
||||
/// skip over nested item-like things.
|
||||
///
|
||||
/// See the comments on `ItemLikeVisitor` for more details on the overall
|
||||
/// See the comments at [`rustc_hir::intravisit`] for more details on the overall
|
||||
/// visit strategy.
|
||||
pub trait NestedFilter<'hir> {
|
||||
type Map: Map<'hir>;
|
||||
@ -229,8 +229,8 @@ pub trait Visitor<'v>: Sized {
|
||||
/// `Self::NestedFilter` is `nested_filter::None`, this method does
|
||||
/// nothing. **You probably don't want to override this method** --
|
||||
/// instead, override [`Self::NestedFilter`] or use the "shallow" or
|
||||
/// "deep" visit patterns described on
|
||||
/// `itemlikevisit::ItemLikeVisitor`. The only reason to override
|
||||
/// "deep" visit patterns described at
|
||||
/// [`rustc_hir::intravisit`]. The only reason to override
|
||||
/// this method is if you want a nested pattern but cannot supply a
|
||||
/// [`Map`]; see `nested_visit_map` for advice.
|
||||
fn visit_nested_item(&mut self, id: ItemId) {
|
||||
|
@ -2205,27 +2205,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
err.span_note(span, format!("type parameter `{name}` defined here"));
|
||||
}
|
||||
});
|
||||
|
||||
match tcx.named_bound_var(hir_id) {
|
||||
Some(rbv::ResolvedArg::LateBound(debruijn, index, _)) => {
|
||||
let name =
|
||||
tcx.hir().name(tcx.hir().local_def_id_to_hir_id(def_id.expect_local()));
|
||||
let br = ty::BoundTy {
|
||||
var: ty::BoundVar::from_u32(index),
|
||||
kind: ty::BoundTyKind::Param(def_id, name),
|
||||
};
|
||||
Ty::new_bound(tcx, debruijn, br)
|
||||
}
|
||||
Some(rbv::ResolvedArg::EarlyBound(_)) => {
|
||||
let def_id = def_id.expect_local();
|
||||
let item_def_id = tcx.hir().ty_param_owner(def_id);
|
||||
let generics = tcx.generics_of(item_def_id);
|
||||
let index = generics.param_def_id_to_index[&def_id.to_def_id()];
|
||||
Ty::new_param(tcx, index, tcx.hir().ty_param_name(def_id))
|
||||
}
|
||||
Some(rbv::ResolvedArg::Error(guar)) => Ty::new_error(tcx, guar),
|
||||
arg => bug!("unexpected bound var resolution for {hir_id:?}: {arg:?}"),
|
||||
}
|
||||
self.hir_id_to_bound_ty(hir_id)
|
||||
}
|
||||
Res::SelfTyParam { .. } => {
|
||||
// `Self` in trait or type alias.
|
||||
@ -2394,6 +2374,57 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
}
|
||||
}
|
||||
|
||||
// Converts a hir id corresponding to a type parameter to
|
||||
// a early-bound `ty::Param` or late-bound `ty::Bound`.
|
||||
pub(crate) fn hir_id_to_bound_ty(&self, hir_id: hir::HirId) -> Ty<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
match tcx.named_bound_var(hir_id) {
|
||||
Some(rbv::ResolvedArg::LateBound(debruijn, index, def_id)) => {
|
||||
let name = tcx.item_name(def_id);
|
||||
let br = ty::BoundTy {
|
||||
var: ty::BoundVar::from_u32(index),
|
||||
kind: ty::BoundTyKind::Param(def_id, name),
|
||||
};
|
||||
Ty::new_bound(tcx, debruijn, br)
|
||||
}
|
||||
Some(rbv::ResolvedArg::EarlyBound(def_id)) => {
|
||||
let def_id = def_id.expect_local();
|
||||
let item_def_id = tcx.hir().ty_param_owner(def_id);
|
||||
let generics = tcx.generics_of(item_def_id);
|
||||
let index = generics.param_def_id_to_index[&def_id.to_def_id()];
|
||||
Ty::new_param(tcx, index, tcx.hir().ty_param_name(def_id))
|
||||
}
|
||||
Some(rbv::ResolvedArg::Error(guar)) => Ty::new_error(tcx, guar),
|
||||
arg => bug!("unexpected bound var resolution for {hir_id:?}: {arg:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
// Converts a hir id corresponding to a const parameter to
|
||||
// a early-bound `ConstKind::Param` or late-bound `ConstKind::Bound`.
|
||||
pub(crate) fn hir_id_to_bound_const(
|
||||
&self,
|
||||
hir_id: hir::HirId,
|
||||
param_ty: Ty<'tcx>,
|
||||
) -> Const<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
match tcx.named_bound_var(hir_id) {
|
||||
Some(rbv::ResolvedArg::EarlyBound(def_id)) => {
|
||||
// Find the name and index of the const parameter by indexing the generics of
|
||||
// the parent item and construct a `ParamConst`.
|
||||
let item_def_id = tcx.parent(def_id);
|
||||
let generics = tcx.generics_of(item_def_id);
|
||||
let index = generics.param_def_id_to_index[&def_id];
|
||||
let name = tcx.item_name(def_id);
|
||||
ty::Const::new_param(tcx, ty::ParamConst::new(index, name), param_ty)
|
||||
}
|
||||
Some(rbv::ResolvedArg::LateBound(debruijn, index, _)) => {
|
||||
ty::Const::new_bound(tcx, debruijn, ty::BoundVar::from_u32(index), param_ty)
|
||||
}
|
||||
Some(rbv::ResolvedArg::Error(guar)) => ty::Const::new_error(tcx, guar, param_ty),
|
||||
arg => bug!("unexpected bound var resolution for {:?}: {arg:?}", hir_id),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the programmer's textual representation of a type into our
|
||||
/// internal notion of a type.
|
||||
pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
|
@ -162,8 +162,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
};
|
||||
|
||||
let generics = tcx.generics_of(def_id);
|
||||
let parent_count = generics.parent_count as u32;
|
||||
let has_own_self = generics.has_self && parent_count == 0;
|
||||
|
||||
// Below we'll consider the bounds on the type parameters (including `Self`)
|
||||
// and the explicit where-clauses, but to get the full set of predicates
|
||||
@ -189,17 +187,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
predicates.insert((trait_ref.to_predicate(tcx), tcx.def_span(def_id)));
|
||||
}
|
||||
|
||||
// Collect the region predicates that were declared inline as
|
||||
// well. In the case of parameters declared on a fn or method, we
|
||||
// have to be careful to only iterate over early-bound regions.
|
||||
let mut index = parent_count
|
||||
+ has_own_self as u32
|
||||
+ super::early_bound_lifetimes_from_generics(tcx, ast_generics).count() as u32;
|
||||
|
||||
trace!(?predicates);
|
||||
trace!(?ast_generics);
|
||||
trace!(?generics);
|
||||
|
||||
// Collect the predicates that were written inline by the user on each
|
||||
// type parameter (e.g., `<T: Foo>`). Also add `ConstArgHasType` predicates
|
||||
// for each const parameter.
|
||||
@ -208,10 +195,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
// We already dealt with early bound lifetimes above.
|
||||
GenericParamKind::Lifetime { .. } => (),
|
||||
GenericParamKind::Type { .. } => {
|
||||
let name = param.name.ident().name;
|
||||
let param_ty = ty::ParamTy::new(index, name).to_ty(tcx);
|
||||
index += 1;
|
||||
|
||||
let param_ty = icx.astconv().hir_id_to_bound_ty(param.hir_id);
|
||||
let mut bounds = Bounds::default();
|
||||
// Params are implicitly sized unless a `?Sized` bound is found
|
||||
icx.astconv().add_implicitly_sized(
|
||||
@ -225,23 +209,16 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
predicates.extend(bounds.clauses());
|
||||
trace!(?predicates);
|
||||
}
|
||||
GenericParamKind::Const { .. } => {
|
||||
let name = param.name.ident().name;
|
||||
let param_const = ty::ParamConst::new(index, name);
|
||||
|
||||
hir::GenericParamKind::Const { .. } => {
|
||||
let ct_ty = tcx
|
||||
.type_of(param.def_id.to_def_id())
|
||||
.no_bound_vars()
|
||||
.expect("const parameters cannot be generic");
|
||||
|
||||
let ct = ty::Const::new_param(tcx, param_const, ct_ty);
|
||||
|
||||
let ct = icx.astconv().hir_id_to_bound_const(param.hir_id, ct_ty);
|
||||
predicates.insert((
|
||||
ty::ClauseKind::ConstArgHasType(ct, ct_ty).to_predicate(tcx),
|
||||
param.span,
|
||||
));
|
||||
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -252,8 +229,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
match predicate {
|
||||
hir::WherePredicate::BoundPredicate(bound_pred) => {
|
||||
let ty = icx.to_ty(bound_pred.bounded_ty);
|
||||
let bound_vars = icx.tcx.late_bound_vars(bound_pred.hir_id);
|
||||
|
||||
let bound_vars = tcx.late_bound_vars(bound_pred.hir_id);
|
||||
// Keep the type around in a dummy predicate, in case of no bounds.
|
||||
// That way, `where Ty:` is not a complete noop (see #53696) and `Ty`
|
||||
// is still checked for WF.
|
||||
@ -296,7 +272,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
_ => bug!(),
|
||||
};
|
||||
let pred = ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(r1, r2))
|
||||
.to_predicate(icx.tcx);
|
||||
.to_predicate(tcx);
|
||||
(pred, span)
|
||||
}))
|
||||
}
|
||||
|
@ -849,6 +849,7 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> {
|
||||
fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
|
||||
let scope = Scope::TraitRefBoundary { s: self.scope };
|
||||
self.with(scope, |this| {
|
||||
walk_list!(this, visit_generic_param, generics.params);
|
||||
for param in generics.params {
|
||||
match param.kind {
|
||||
GenericParamKind::Lifetime { .. } => {}
|
||||
@ -865,90 +866,86 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
for predicate in generics.predicates {
|
||||
match predicate {
|
||||
&hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
|
||||
hir_id,
|
||||
bounded_ty,
|
||||
bounds,
|
||||
bound_generic_params,
|
||||
origin,
|
||||
..
|
||||
}) => {
|
||||
let (bound_vars, binders): (FxIndexMap<LocalDefId, ResolvedArg>, Vec<_>) =
|
||||
bound_generic_params
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(late_bound_idx, param)| {
|
||||
let pair = ResolvedArg::late(late_bound_idx as u32, param);
|
||||
let r = late_arg_as_bound_arg(this.tcx, &pair.1, param);
|
||||
(pair, r)
|
||||
})
|
||||
.unzip();
|
||||
this.record_late_bound_vars(hir_id, binders.clone());
|
||||
// Even if there are no lifetimes defined here, we still wrap it in a binder
|
||||
// scope. If there happens to be a nested poly trait ref (an error), that
|
||||
// will be `Concatenating` anyways, so we don't have to worry about the depth
|
||||
// being wrong.
|
||||
let scope = Scope::Binder {
|
||||
hir_id,
|
||||
bound_vars,
|
||||
s: this.scope,
|
||||
scope_type: BinderScopeType::Normal,
|
||||
where_bound_origin: Some(origin),
|
||||
};
|
||||
this.with(scope, |this| {
|
||||
this.visit_ty(&bounded_ty);
|
||||
walk_list!(this, visit_param_bound, bounds);
|
||||
})
|
||||
}
|
||||
&hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
|
||||
lifetime,
|
||||
bounds,
|
||||
..
|
||||
}) => {
|
||||
this.visit_lifetime(lifetime);
|
||||
walk_list!(this, visit_param_bound, bounds);
|
||||
walk_list!(this, visit_where_predicate, generics.predicates);
|
||||
})
|
||||
}
|
||||
|
||||
if lifetime.res != hir::LifetimeName::Static {
|
||||
for bound in bounds {
|
||||
let hir::GenericBound::Outlives(lt) = bound else {
|
||||
continue;
|
||||
};
|
||||
if lt.res != hir::LifetimeName::Static {
|
||||
continue;
|
||||
}
|
||||
this.insert_lifetime(lt, ResolvedArg::StaticLifetime);
|
||||
this.tcx.struct_span_lint_hir(
|
||||
lint::builtin::UNUSED_LIFETIMES,
|
||||
lifetime.hir_id,
|
||||
lifetime.ident.span,
|
||||
format!(
|
||||
"unnecessary lifetime parameter `{}`",
|
||||
lifetime.ident
|
||||
),
|
||||
|lint| {
|
||||
let help = format!(
|
||||
"you can use the `'static` lifetime directly, in place of `{}`",
|
||||
lifetime.ident,
|
||||
);
|
||||
lint.help(help)
|
||||
},
|
||||
);
|
||||
}
|
||||
fn visit_where_predicate(&mut self, predicate: &'tcx hir::WherePredicate<'tcx>) {
|
||||
match predicate {
|
||||
&hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
|
||||
hir_id,
|
||||
bounded_ty,
|
||||
bounds,
|
||||
bound_generic_params,
|
||||
origin,
|
||||
..
|
||||
}) => {
|
||||
let (bound_vars, binders): (FxIndexMap<LocalDefId, ResolvedArg>, Vec<_>) =
|
||||
bound_generic_params
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(late_bound_idx, param)| {
|
||||
let pair = ResolvedArg::late(late_bound_idx as u32, param);
|
||||
let r = late_arg_as_bound_arg(self.tcx, &pair.1, param);
|
||||
(pair, r)
|
||||
})
|
||||
.unzip();
|
||||
self.record_late_bound_vars(hir_id, binders.clone());
|
||||
// Even if there are no lifetimes defined here, we still wrap it in a binder
|
||||
// scope. If there happens to be a nested poly trait ref (an error), that
|
||||
// will be `Concatenating` anyways, so we don't have to worry about the depth
|
||||
// being wrong.
|
||||
let scope = Scope::Binder {
|
||||
hir_id,
|
||||
bound_vars,
|
||||
s: self.scope,
|
||||
scope_type: BinderScopeType::Normal,
|
||||
where_bound_origin: Some(origin),
|
||||
};
|
||||
self.with(scope, |this| {
|
||||
walk_list!(this, visit_generic_param, bound_generic_params);
|
||||
this.visit_ty(&bounded_ty);
|
||||
walk_list!(this, visit_param_bound, bounds);
|
||||
})
|
||||
}
|
||||
&hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
|
||||
lifetime,
|
||||
bounds,
|
||||
..
|
||||
}) => {
|
||||
self.visit_lifetime(lifetime);
|
||||
walk_list!(self, visit_param_bound, bounds);
|
||||
|
||||
if lifetime.res != hir::LifetimeName::Static {
|
||||
for bound in bounds {
|
||||
let hir::GenericBound::Outlives(lt) = bound else {
|
||||
continue;
|
||||
};
|
||||
if lt.res != hir::LifetimeName::Static {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
&hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
|
||||
lhs_ty,
|
||||
rhs_ty,
|
||||
..
|
||||
}) => {
|
||||
this.visit_ty(lhs_ty);
|
||||
this.visit_ty(rhs_ty);
|
||||
self.insert_lifetime(lt, ResolvedArg::StaticLifetime);
|
||||
self.tcx.struct_span_lint_hir(
|
||||
lint::builtin::UNUSED_LIFETIMES,
|
||||
lifetime.hir_id,
|
||||
lifetime.ident.span,
|
||||
format!("unnecessary lifetime parameter `{}`", lifetime.ident),
|
||||
|lint| {
|
||||
let help = format!(
|
||||
"you can use the `'static` lifetime directly, in place of `{}`",
|
||||
lifetime.ident,
|
||||
);
|
||||
lint.help(help)
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
&hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { lhs_ty, rhs_ty, .. }) => {
|
||||
self.visit_ty(lhs_ty);
|
||||
self.visit_ty(rhs_ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_param_bound(&mut self, bound: &'tcx hir::GenericBound<'tcx>) {
|
||||
@ -986,6 +983,18 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> {
|
||||
intravisit::walk_anon_const(this, c);
|
||||
});
|
||||
}
|
||||
|
||||
fn visit_generic_param(&mut self, p: &'tcx GenericParam<'tcx>) {
|
||||
match p.kind {
|
||||
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
|
||||
self.resolve_type_ref(p.def_id, p.hir_id);
|
||||
}
|
||||
GenericParamKind::Lifetime { .. } => {
|
||||
// No need to resolve lifetime params, we don't use them for things
|
||||
// like implicit `?Sized` or const-param-has-ty predicates.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn object_lifetime_default(tcx: TyCtxt<'_>, param_def_id: LocalDefId) -> ObjectLifetimeDefault {
|
||||
|
@ -1001,8 +1001,22 @@ impl EarlyLintPass for UnusedDocComment {
|
||||
warn_if_doc(cx, arm_span, "match arms", &arm.attrs);
|
||||
}
|
||||
|
||||
fn check_pat(&mut self, cx: &EarlyContext<'_>, pat: &ast::Pat) {
|
||||
if let ast::PatKind::Struct(_, _, fields, _) = &pat.kind {
|
||||
for field in fields {
|
||||
warn_if_doc(cx, field.span, "pattern fields", &field.attrs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
|
||||
warn_if_doc(cx, expr.span, "expressions", &expr.attrs);
|
||||
|
||||
if let ExprKind::Struct(s) = &expr.kind {
|
||||
for field in &s.fields {
|
||||
warn_if_doc(cx, field.span, "expression fields", &field.attrs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_generic_param(&mut self, cx: &EarlyContext<'_>, param: &ast::GenericParam) {
|
||||
|
@ -1202,7 +1202,11 @@ LLVMRustCreateThinLTOData(LLVMRustThinLTOModule *modules,
|
||||
|
||||
Ret->ModuleMap[module->identifier] = mem_buffer;
|
||||
|
||||
#if LLVM_VERSION_GE(18, 0)
|
||||
if (Error Err = readModuleSummaryIndex(mem_buffer, Ret->Index)) {
|
||||
#else
|
||||
if (Error Err = readModuleSummaryIndex(mem_buffer, Ret->Index, i)) {
|
||||
#endif
|
||||
LLVMRustSetLastError(toString(std::move(Err)).c_str());
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -1501,11 +1501,12 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
||||
|
||||
// We can't reuse an existing SourceFile, so allocate a new one
|
||||
// containing the information we need.
|
||||
let original_end_pos = source_file_to_import.end_position();
|
||||
let rustc_span::SourceFile {
|
||||
mut name,
|
||||
src_hash,
|
||||
start_pos,
|
||||
end_pos,
|
||||
start_pos: original_start_pos,
|
||||
source_len,
|
||||
lines,
|
||||
multibyte_chars,
|
||||
non_narrow_chars,
|
||||
@ -1547,35 +1548,32 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
||||
// on `try_to_translate_virtual_to_real`).
|
||||
try_to_translate_virtual_to_real(&mut name);
|
||||
|
||||
let source_length = (end_pos - start_pos).to_usize();
|
||||
|
||||
let local_version = sess.source_map().new_imported_source_file(
|
||||
name,
|
||||
src_hash,
|
||||
name_hash,
|
||||
source_length,
|
||||
source_len.to_u32(),
|
||||
self.cnum,
|
||||
lines,
|
||||
multibyte_chars,
|
||||
non_narrow_chars,
|
||||
normalized_pos,
|
||||
start_pos,
|
||||
source_file_index,
|
||||
);
|
||||
debug!(
|
||||
"CrateMetaData::imported_source_files alloc \
|
||||
source_file {:?} original (start_pos {:?} end_pos {:?}) \
|
||||
translated (start_pos {:?} end_pos {:?})",
|
||||
source_file {:?} original (start_pos {:?} source_len {:?}) \
|
||||
translated (start_pos {:?} source_len {:?})",
|
||||
local_version.name,
|
||||
start_pos,
|
||||
end_pos,
|
||||
original_start_pos,
|
||||
source_len,
|
||||
local_version.start_pos,
|
||||
local_version.end_pos
|
||||
local_version.source_len
|
||||
);
|
||||
|
||||
ImportedSourceFile {
|
||||
original_start_pos: start_pos,
|
||||
original_end_pos: end_pos,
|
||||
original_start_pos,
|
||||
original_end_pos,
|
||||
translated_source_file: local_version,
|
||||
}
|
||||
})
|
||||
|
@ -80,12 +80,56 @@ macro_rules! define_dep_nodes {
|
||||
}
|
||||
|
||||
/// This enum serves as an index into arrays built by `make_dep_kind_array`.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Encodable, Decodable)]
|
||||
// This enum has more than u8::MAX variants so we need some kind of multi-byte
|
||||
// encoding. The derived Encodable/Decodable uses leb128 encoding which is
|
||||
// dense when only considering this enum. But DepKind is encoded in a larger
|
||||
// struct, and there we can take advantage of the unused bits in the u16.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
#[allow(non_camel_case_types)]
|
||||
#[repr(u16)]
|
||||
pub enum DepKind {
|
||||
$( $( #[$attr] )* $variant),*
|
||||
}
|
||||
|
||||
impl DepKind {
|
||||
// This const implements two things: A bounds check so that we can decode
|
||||
// a DepKind from a u16 with just one check, and a const check that the
|
||||
// discriminants of the variants have been assigned consecutively from 0
|
||||
// so that just the one comparison suffices to check that the u16 can be
|
||||
// transmuted to a DepKind.
|
||||
const VARIANTS: u16 = {
|
||||
let deps: &[DepKind] = &[$(DepKind::$variant,)*];
|
||||
let mut i = 0;
|
||||
while i < deps.len() {
|
||||
if i as u16 != deps[i] as u16 {
|
||||
panic!();
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
deps.len() as u16
|
||||
};
|
||||
}
|
||||
|
||||
impl<S: rustc_serialize::Encoder> rustc_serialize::Encodable<S> for DepKind {
|
||||
#[inline]
|
||||
fn encode(&self, s: &mut S) {
|
||||
s.emit_u16(*self as u16);
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: rustc_serialize::Decoder> rustc_serialize::Decodable<D> for DepKind {
|
||||
#[inline]
|
||||
fn decode(d: &mut D) -> DepKind {
|
||||
let discrim = d.read_u16();
|
||||
assert!(discrim < DepKind::VARIANTS);
|
||||
// SAFETY: DepKind::VARIANTS checks that the discriminant values permit
|
||||
// this one check to soundly guard the transmute.
|
||||
unsafe {
|
||||
std::mem::transmute::<u16, DepKind>(discrim)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn dep_kind_from_label_string(label: &str) -> Result<DepKind, ()> {
|
||||
match label {
|
||||
$(stringify!($variant) => Ok(DepKind::$variant),)*
|
||||
@ -114,6 +158,8 @@ rustc_query_append!(define_dep_nodes![
|
||||
[] fn CompileMonoItem() -> (),
|
||||
]);
|
||||
|
||||
static_assert_size!(DepKind, 2);
|
||||
|
||||
// WARNING: `construct` is generic and does not know that `CompileCodegenUnit` takes `Symbol`s as keys.
|
||||
// Be very careful changing this type signature!
|
||||
pub(crate) fn make_compile_codegen_unit(tcx: TyCtxt<'_>, name: Symbol) -> DepNode {
|
||||
|
@ -225,6 +225,9 @@ pub fn explain_lint_level_source(
|
||||
err.note_once(format!(
|
||||
"`{flag} {hyphen_case_lint_name}` implied by `{flag} {hyphen_case_flag_val}`"
|
||||
));
|
||||
err.help_once(format!(
|
||||
"to override `{flag} {hyphen_case_flag_val}` add `#[allow({name})]`"
|
||||
));
|
||||
}
|
||||
}
|
||||
LintLevelSource::Node { name: lint_attr_name, span, reason, .. } => {
|
||||
|
@ -103,7 +103,7 @@ impl<T: HasDataLayout> PointerArithmetic for T {}
|
||||
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
|
||||
/// some global state.
|
||||
/// The `Debug` rendering is used to display bare provenance, and for the default impl of `fmt`.
|
||||
pub trait Provenance: Copy + fmt::Debug {
|
||||
pub trait Provenance: Copy + fmt::Debug + 'static {
|
||||
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
|
||||
/// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are
|
||||
/// different from what the Abstract Machine prescribes, so the interpreter must prevent any
|
||||
|
@ -22,7 +22,7 @@ use rustc_span::hygiene::{
|
||||
ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextData,
|
||||
};
|
||||
use rustc_span::source_map::{SourceMap, StableSourceFileId};
|
||||
use rustc_span::{BytePos, ExpnData, ExpnHash, Pos, SourceFile, Span};
|
||||
use rustc_span::{BytePos, ExpnData, ExpnHash, Pos, RelativeBytePos, SourceFile, Span};
|
||||
use rustc_span::{CachingSourceMapView, Symbol};
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::io;
|
||||
@ -688,11 +688,12 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
|
||||
|
||||
let file_lo_index = SourceFileIndex::decode(decoder);
|
||||
let line_lo = usize::decode(decoder);
|
||||
let col_lo = BytePos::decode(decoder);
|
||||
let col_lo = RelativeBytePos::decode(decoder);
|
||||
let len = BytePos::decode(decoder);
|
||||
|
||||
let file_lo = decoder.file_index_to_file(file_lo_index);
|
||||
let lo = file_lo.lines(|lines| lines[line_lo - 1] + col_lo);
|
||||
let lo = file_lo.absolute_position(lo);
|
||||
let hi = lo + len;
|
||||
|
||||
Span::new(lo, hi, ctxt, parent)
|
||||
|
@ -1,5 +1,6 @@
|
||||
use rustc_index::IndexSlice;
|
||||
use rustc_middle::{mir::*, thir::*, ty::Ty};
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_middle::{mir::*, thir::*};
|
||||
use rustc_span::Span;
|
||||
|
||||
use super::{PResult, ParseCtxt, ParseError};
|
||||
@ -159,6 +160,14 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
|
||||
);
|
||||
self.parse_local_decls(local_decls.iter().copied())?;
|
||||
|
||||
let (debuginfo, rest) = parse_by_kind!(self, rest, _, "body with debuginfo",
|
||||
ExprKind::Block { block } => {
|
||||
let block = &self.thir[*block];
|
||||
(&block.stmts, block.expr.unwrap())
|
||||
},
|
||||
);
|
||||
self.parse_debuginfo(debuginfo.iter().copied())?;
|
||||
|
||||
let block_defs = parse_by_kind!(self, rest, _, "body with block defs",
|
||||
ExprKind::Block { block } => &self.thir[*block].stmts,
|
||||
);
|
||||
@ -195,6 +204,52 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_debuginfo(&mut self, stmts: impl Iterator<Item = StmtId>) -> PResult<()> {
|
||||
for stmt in stmts {
|
||||
let stmt = &self.thir[stmt];
|
||||
let expr = match stmt.kind {
|
||||
StmtKind::Let { span, .. } => {
|
||||
return Err(ParseError {
|
||||
span,
|
||||
item_description: format!("{:?}", stmt),
|
||||
expected: "debuginfo".to_string(),
|
||||
});
|
||||
}
|
||||
StmtKind::Expr { expr, .. } => expr,
|
||||
};
|
||||
let span = self.thir[expr].span;
|
||||
let (name, operand) = parse_by_kind!(self, expr, _, "debuginfo",
|
||||
@call("mir_debuginfo", args) => {
|
||||
(args[0], args[1])
|
||||
},
|
||||
);
|
||||
let name = parse_by_kind!(self, name, _, "debuginfo",
|
||||
ExprKind::Literal { lit, neg: false } => lit,
|
||||
);
|
||||
let Some(name) = name.node.str() else {
|
||||
return Err(ParseError {
|
||||
span,
|
||||
item_description: format!("{:?}", name),
|
||||
expected: "string".to_string(),
|
||||
});
|
||||
};
|
||||
let operand = self.parse_operand(operand)?;
|
||||
let value = match operand {
|
||||
Operand::Constant(c) => VarDebugInfoContents::Const(*c),
|
||||
Operand::Copy(p) | Operand::Move(p) => VarDebugInfoContents::Place(p),
|
||||
};
|
||||
let dbginfo = VarDebugInfo {
|
||||
name,
|
||||
source_info: SourceInfo { span, scope: self.source_scope },
|
||||
argument_index: None,
|
||||
value,
|
||||
};
|
||||
self.body.var_debug_info.push(dbginfo);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_let_statement(&mut self, stmt_id: StmtId) -> PResult<(LocalVarId, Ty<'tcx>, Span)> {
|
||||
let pattern = match &self.thir[stmt_id].kind {
|
||||
StmtKind::Let { pattern, .. } => pattern,
|
||||
|
@ -204,7 +204,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_operand(&self, expr_id: ExprId) -> PResult<Operand<'tcx>> {
|
||||
pub fn parse_operand(&self, expr_id: ExprId) -> PResult<Operand<'tcx>> {
|
||||
parse_by_kind!(self, expr_id, expr, "operand",
|
||||
@call("mir_move", args) => self.parse_place(args[0]).map(Operand::Move),
|
||||
@call("mir_static", args) => self.parse_static(args[0]),
|
||||
|
@ -720,32 +720,31 @@ fn non_exhaustive_match<'p, 'tcx>(
|
||||
};
|
||||
};
|
||||
|
||||
let is_variant_list_non_exhaustive = matches!(scrut_ty.kind(),
|
||||
ty::Adt(def, _) if def.is_variant_list_non_exhaustive() && !def.did().is_local());
|
||||
|
||||
adt_defined_here(cx, &mut err, scrut_ty, &witnesses);
|
||||
err.note(format!(
|
||||
"the matched value is of type `{}`{}",
|
||||
scrut_ty,
|
||||
if is_variant_list_non_exhaustive { ", which is marked as non-exhaustive" } else { "" }
|
||||
));
|
||||
err.note(format!("the matched value is of type `{}`", scrut_ty));
|
||||
|
||||
if !is_empty_match && witnesses.len() == 1 {
|
||||
let mut non_exhaustive_tys = FxHashSet::default();
|
||||
collect_non_exhaustive_tys(&witnesses[0], &mut non_exhaustive_tys);
|
||||
|
||||
for ty in non_exhaustive_tys {
|
||||
if ty == cx.tcx.types.usize || ty == cx.tcx.types.isize {
|
||||
if ty.is_ptr_sized_integral() {
|
||||
err.note(format!(
|
||||
"`{ty}` does not have a fixed maximum value, so a wildcard `_` is necessary to match \
|
||||
exhaustively",
|
||||
));
|
||||
exhaustively",
|
||||
));
|
||||
if cx.tcx.sess.is_nightly_build() {
|
||||
err.help(format!(
|
||||
"add `#![feature(precise_pointer_size_matching)]` to the crate attributes to \
|
||||
enable precise `{ty}` matching",
|
||||
));
|
||||
"add `#![feature(precise_pointer_size_matching)]` to the crate attributes to \
|
||||
enable precise `{ty}` matching",
|
||||
));
|
||||
}
|
||||
} else if ty == cx.tcx.types.str_ {
|
||||
err.note(format!(
|
||||
"`&str` cannot be matched exhaustively, so a wildcard `_` is necessary",
|
||||
));
|
||||
} else if cx.is_foreign_non_exhaustive_enum(ty) {
|
||||
err.note(format!("`{ty}` is marked as non-exhaustive, so a wildcard `_` is necessary to match exhaustively"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -555,8 +555,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
||||
subpattern: pattern,
|
||||
ascription: Ascription {
|
||||
annotation,
|
||||
/// Note that use `Contravariant` here. See the
|
||||
/// `variance` field documentation for details.
|
||||
// Note that use `Contravariant` here. See the
|
||||
// `variance` field documentation for details.
|
||||
variance: ty::Variance::Contravariant,
|
||||
},
|
||||
},
|
||||
|
@ -618,10 +618,15 @@ impl<'p, 'tcx> Usefulness<'p, 'tcx> {
|
||||
let new_witnesses = if let Constructor::Missing { .. } = ctor {
|
||||
// We got the special `Missing` constructor, so each of the missing constructors
|
||||
// gives a new pattern that is not caught by the match. We list those patterns.
|
||||
let new_patterns = if pcx.is_non_exhaustive {
|
||||
// Here we don't want the user to try to list all variants, we want them to add
|
||||
// a wildcard, so we only suggest that.
|
||||
vec![DeconstructedPat::wildcard(pcx.ty, pcx.span)]
|
||||
if pcx.is_non_exhaustive {
|
||||
witnesses
|
||||
.into_iter()
|
||||
// Here we don't want the user to try to list all variants, we want them to add
|
||||
// a wildcard, so we only suggest that.
|
||||
.map(|witness| {
|
||||
witness.apply_constructor(pcx, &Constructor::NonExhaustive)
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
let mut split_wildcard = SplitWildcard::new(pcx);
|
||||
split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
||||
@ -633,7 +638,7 @@ impl<'p, 'tcx> Usefulness<'p, 'tcx> {
|
||||
// constructor, that matches everything that can be built with
|
||||
// it. For example, if `ctor` is a `Constructor::Variant` for
|
||||
// `Option::Some`, we get the pattern `Some(_)`.
|
||||
let mut new: Vec<DeconstructedPat<'_, '_>> = split_wildcard
|
||||
let mut new_patterns: Vec<DeconstructedPat<'_, '_>> = split_wildcard
|
||||
.iter_missing(pcx)
|
||||
.filter_map(|missing_ctor| {
|
||||
// Check if this variant is marked `doc(hidden)`
|
||||
@ -648,27 +653,25 @@ impl<'p, 'tcx> Usefulness<'p, 'tcx> {
|
||||
.collect();
|
||||
|
||||
if hide_variant_show_wild {
|
||||
new.push(DeconstructedPat::wildcard(pcx.ty, pcx.span));
|
||||
new_patterns.push(DeconstructedPat::wildcard(pcx.ty, pcx.span));
|
||||
}
|
||||
|
||||
new
|
||||
};
|
||||
|
||||
witnesses
|
||||
.into_iter()
|
||||
.flat_map(|witness| {
|
||||
new_patterns.iter().map(move |pat| {
|
||||
Witness(
|
||||
witness
|
||||
.0
|
||||
.iter()
|
||||
.chain(once(pat))
|
||||
.map(DeconstructedPat::clone_and_forget_reachability)
|
||||
.collect(),
|
||||
)
|
||||
witnesses
|
||||
.into_iter()
|
||||
.flat_map(|witness| {
|
||||
new_patterns.iter().map(move |pat| {
|
||||
Witness(
|
||||
witness
|
||||
.0
|
||||
.iter()
|
||||
.chain(once(pat))
|
||||
.map(DeconstructedPat::clone_and_forget_reachability)
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
.collect()
|
||||
}
|
||||
} else {
|
||||
witnesses
|
||||
.into_iter()
|
||||
|
@ -538,7 +538,7 @@ where
|
||||
|
||||
fn visit_block_start(
|
||||
&mut self,
|
||||
_results: &Results<'tcx, A>,
|
||||
_results: &mut Results<'tcx, A>,
|
||||
state: &Self::FlowState,
|
||||
_block_data: &mir::BasicBlockData<'tcx>,
|
||||
_block: BasicBlock,
|
||||
@ -550,7 +550,7 @@ where
|
||||
|
||||
fn visit_block_end(
|
||||
&mut self,
|
||||
_results: &Results<'tcx, A>,
|
||||
_results: &mut Results<'tcx, A>,
|
||||
state: &Self::FlowState,
|
||||
_block_data: &mir::BasicBlockData<'tcx>,
|
||||
_block: BasicBlock,
|
||||
@ -562,7 +562,7 @@ where
|
||||
|
||||
fn visit_statement_before_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, A>,
|
||||
results: &mut Results<'tcx, A>,
|
||||
state: &Self::FlowState,
|
||||
_statement: &mir::Statement<'tcx>,
|
||||
_location: Location,
|
||||
@ -575,7 +575,7 @@ where
|
||||
|
||||
fn visit_statement_after_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, A>,
|
||||
results: &mut Results<'tcx, A>,
|
||||
state: &Self::FlowState,
|
||||
_statement: &mir::Statement<'tcx>,
|
||||
_location: Location,
|
||||
@ -586,7 +586,7 @@ where
|
||||
|
||||
fn visit_terminator_before_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, A>,
|
||||
results: &mut Results<'tcx, A>,
|
||||
state: &Self::FlowState,
|
||||
_terminator: &mir::Terminator<'tcx>,
|
||||
_location: Location,
|
||||
@ -599,7 +599,7 @@ where
|
||||
|
||||
fn visit_terminator_after_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, A>,
|
||||
results: &mut Results<'tcx, A>,
|
||||
state: &Self::FlowState,
|
||||
_terminator: &mir::Terminator<'tcx>,
|
||||
_location: Location,
|
||||
|
@ -35,7 +35,7 @@ pub trait ResultsVisitor<'mir, 'tcx, R> {
|
||||
|
||||
fn visit_block_start(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
_state: &Self::FlowState,
|
||||
_block_data: &'mir mir::BasicBlockData<'tcx>,
|
||||
_block: BasicBlock,
|
||||
@ -46,7 +46,7 @@ pub trait ResultsVisitor<'mir, 'tcx, R> {
|
||||
/// its `statement_effect`.
|
||||
fn visit_statement_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
_state: &Self::FlowState,
|
||||
_statement: &'mir mir::Statement<'tcx>,
|
||||
_location: Location,
|
||||
@ -57,7 +57,7 @@ pub trait ResultsVisitor<'mir, 'tcx, R> {
|
||||
/// statement applied to `state`.
|
||||
fn visit_statement_after_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
_state: &Self::FlowState,
|
||||
_statement: &'mir mir::Statement<'tcx>,
|
||||
_location: Location,
|
||||
@ -68,7 +68,7 @@ pub trait ResultsVisitor<'mir, 'tcx, R> {
|
||||
/// its `terminator_effect`.
|
||||
fn visit_terminator_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
_state: &Self::FlowState,
|
||||
_terminator: &'mir mir::Terminator<'tcx>,
|
||||
_location: Location,
|
||||
@ -81,7 +81,7 @@ pub trait ResultsVisitor<'mir, 'tcx, R> {
|
||||
/// The `call_return_effect` (if one exists) will *not* be applied to `state`.
|
||||
fn visit_terminator_after_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
_state: &Self::FlowState,
|
||||
_terminator: &'mir mir::Terminator<'tcx>,
|
||||
_location: Location,
|
||||
@ -90,7 +90,7 @@ pub trait ResultsVisitor<'mir, 'tcx, R> {
|
||||
|
||||
fn visit_block_end(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
_state: &Self::FlowState,
|
||||
_block_data: &'mir mir::BasicBlockData<'tcx>,
|
||||
_block: BasicBlock,
|
||||
|
@ -22,8 +22,7 @@ use rustc_target::spec::abi::Abi as CallAbi;
|
||||
use crate::MirPass;
|
||||
use rustc_const_eval::interpret::{
|
||||
self, compile_time_machine, AllocId, ConstAllocation, ConstValue, FnArg, Frame, ImmTy,
|
||||
Immediate, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, Scalar,
|
||||
StackPopCleanup,
|
||||
Immediate, InterpCx, InterpResult, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, StackPopCleanup,
|
||||
};
|
||||
|
||||
/// The maximum number of bytes that we'll allocate space for a local or the return value.
|
||||
@ -225,11 +224,11 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
|
||||
throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp")
|
||||
}
|
||||
|
||||
fn access_local_mut<'a>(
|
||||
fn before_access_local_mut<'a>(
|
||||
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
|
||||
frame: usize,
|
||||
local: Local,
|
||||
) -> InterpResult<'tcx, &'a mut interpret::Operand<Self::Provenance>> {
|
||||
) -> InterpResult<'tcx> {
|
||||
assert_eq!(frame, 0);
|
||||
match ecx.machine.can_const_prop[local] {
|
||||
ConstPropMode::NoPropagation => {
|
||||
@ -242,7 +241,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
|
||||
}
|
||||
ConstPropMode::FullConstProp => {}
|
||||
}
|
||||
ecx.machine.stack[frame].locals[local].access_mut()
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn before_access_global(
|
||||
@ -382,8 +381,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
// mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
|
||||
// stopping us before those unsized immediates can cause issues deeper in the
|
||||
// interpreter.
|
||||
ecx.frame_mut().locals[local].value =
|
||||
LocalValue::Live(interpret::Operand::Immediate(Immediate::Uninit));
|
||||
ecx.frame_mut().locals[local].make_live_uninit();
|
||||
}
|
||||
|
||||
ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls }
|
||||
@ -392,7 +390,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> {
|
||||
let op = match self.ecx.eval_place_to_op(place, None) {
|
||||
Ok(op) => {
|
||||
if matches!(*op, interpret::Operand::Immediate(Immediate::Uninit)) {
|
||||
if op
|
||||
.as_mplace_or_imm()
|
||||
.right()
|
||||
.is_some_and(|imm| matches!(*imm, Immediate::Uninit))
|
||||
{
|
||||
// Make sure nobody accidentally uses this value.
|
||||
return None;
|
||||
}
|
||||
@ -415,8 +417,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
/// Remove `local` from the pool of `Locals`. Allows writing to them,
|
||||
/// but not reading from them anymore.
|
||||
fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
|
||||
ecx.frame_mut().locals[local].value =
|
||||
LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit));
|
||||
ecx.frame_mut().locals[local].make_live_uninit();
|
||||
ecx.machine.written_only_inside_own_block_locals.remove(&local);
|
||||
}
|
||||
|
||||
@ -743,7 +744,8 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
|
||||
) -> Option<PlaceElem<'tcx>> {
|
||||
if let PlaceElem::Index(local) = elem
|
||||
&& let Some(value) = self.get_const(local.into())
|
||||
&& let interpret::Operand::Immediate(Immediate::Scalar(scalar)) = *value
|
||||
&& let Some(imm) = value.as_mplace_or_imm().right()
|
||||
&& let Immediate::Scalar(scalar) = *imm
|
||||
&& let Ok(offset) = scalar.to_target_usize(&self.tcx)
|
||||
&& let Some(min_length) = offset.checked_add(1)
|
||||
{
|
||||
|
@ -7,7 +7,7 @@ use either::Left;
|
||||
|
||||
use rustc_const_eval::interpret::Immediate;
|
||||
use rustc_const_eval::interpret::{
|
||||
self, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, Scalar, StackPopCleanup,
|
||||
InterpCx, InterpResult, MemoryKind, OpTy, Scalar, StackPopCleanup,
|
||||
};
|
||||
use rustc_const_eval::ReportErrorExt;
|
||||
use rustc_hir::def::DefKind;
|
||||
@ -212,8 +212,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
// mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
|
||||
// stopping us before those unsized immediates can cause issues deeper in the
|
||||
// interpreter.
|
||||
ecx.frame_mut().locals[local].value =
|
||||
LocalValue::Live(interpret::Operand::Immediate(Immediate::Uninit));
|
||||
ecx.frame_mut().locals[local].make_live_uninit();
|
||||
}
|
||||
|
||||
ConstPropagator {
|
||||
@ -236,7 +235,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> {
|
||||
let op = match self.ecx.eval_place_to_op(place, None) {
|
||||
Ok(op) => {
|
||||
if matches!(*op, interpret::Operand::Immediate(Immediate::Uninit)) {
|
||||
if op
|
||||
.as_mplace_or_imm()
|
||||
.right()
|
||||
.is_some_and(|imm| matches!(*imm, Immediate::Uninit))
|
||||
{
|
||||
// Make sure nobody accidentally uses this value.
|
||||
return None;
|
||||
}
|
||||
@ -259,8 +262,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
/// Remove `local` from the pool of `Locals`. Allows writing to them,
|
||||
/// but not reading from them anymore.
|
||||
fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
|
||||
ecx.frame_mut().locals[local].value =
|
||||
LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit));
|
||||
ecx.frame_mut().locals[local].make_live_uninit();
|
||||
ecx.machine.written_only_inside_own_block_locals.remove(&local);
|
||||
}
|
||||
|
||||
@ -656,12 +658,12 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
|
||||
}
|
||||
StatementKind::StorageLive(local) => {
|
||||
let frame = self.ecx.frame_mut();
|
||||
frame.locals[local].value =
|
||||
LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit));
|
||||
frame.locals[local].make_live_uninit();
|
||||
}
|
||||
StatementKind::StorageDead(local) => {
|
||||
let frame = self.ecx.frame_mut();
|
||||
frame.locals[local].value = LocalValue::Dead;
|
||||
// We don't actually track liveness, so the local remains live. But forget its value.
|
||||
frame.locals[local].make_live_uninit();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ use rustc_middle::mir::{
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::{CharPos, ExpnKind, Pos, SourceFile, Span, Symbol};
|
||||
use rustc_span::{ExpnKind, SourceFile, Span, Symbol};
|
||||
|
||||
/// A simple error message wrapper for `coverage::Error`s.
|
||||
#[derive(Debug)]
|
||||
@ -314,8 +314,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
};
|
||||
graphviz_data.add_bcb_coverage_span_with_counter(bcb, &covspan, &counter_kind);
|
||||
|
||||
let code_region =
|
||||
make_code_region(source_map, file_name, &self.source_file, span, body_span);
|
||||
let code_region = make_code_region(source_map, file_name, span, body_span);
|
||||
|
||||
inject_statement(
|
||||
self.mir_body,
|
||||
@ -510,40 +509,36 @@ fn inject_intermediate_expression(mir_body: &mut mir::Body<'_>, expression: Cove
|
||||
fn make_code_region(
|
||||
source_map: &SourceMap,
|
||||
file_name: Symbol,
|
||||
source_file: &Lrc<SourceFile>,
|
||||
span: Span,
|
||||
body_span: Span,
|
||||
) -> CodeRegion {
|
||||
debug!(
|
||||
"Called make_code_region(file_name={}, source_file={:?}, span={}, body_span={})",
|
||||
"Called make_code_region(file_name={}, span={}, body_span={})",
|
||||
file_name,
|
||||
source_file,
|
||||
source_map.span_to_diagnostic_string(span),
|
||||
source_map.span_to_diagnostic_string(body_span)
|
||||
);
|
||||
|
||||
let (start_line, mut start_col) = source_file.lookup_file_pos(span.lo());
|
||||
let (end_line, end_col) = if span.hi() == span.lo() {
|
||||
let (end_line, mut end_col) = (start_line, start_col);
|
||||
let (file, mut start_line, mut start_col, mut end_line, mut end_col) =
|
||||
source_map.span_to_location_info(span);
|
||||
if span.hi() == span.lo() {
|
||||
// Extend an empty span by one character so the region will be counted.
|
||||
let CharPos(char_pos) = start_col;
|
||||
if span.hi() == body_span.hi() {
|
||||
start_col = CharPos(char_pos.saturating_sub(1));
|
||||
start_col = start_col.saturating_sub(1);
|
||||
} else {
|
||||
end_col = CharPos(char_pos + 1);
|
||||
end_col = start_col + 1;
|
||||
}
|
||||
(end_line, end_col)
|
||||
} else {
|
||||
source_file.lookup_file_pos(span.hi())
|
||||
};
|
||||
let start_line = source_map.doctest_offset_line(&source_file.name, start_line);
|
||||
let end_line = source_map.doctest_offset_line(&source_file.name, end_line);
|
||||
if let Some(file) = file {
|
||||
start_line = source_map.doctest_offset_line(&file.name, start_line);
|
||||
end_line = source_map.doctest_offset_line(&file.name, end_line);
|
||||
}
|
||||
CodeRegion {
|
||||
file_name,
|
||||
start_line: start_line as u32,
|
||||
start_col: start_col.to_u32() + 1,
|
||||
start_col: start_col as u32,
|
||||
end_line: end_line as u32,
|
||||
end_col: end_col.to_u32() + 1,
|
||||
end_col: end_col as u32,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -401,7 +401,7 @@ impl<'mir, 'tcx>
|
||||
|
||||
fn visit_statement_before_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
|
||||
results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
|
||||
state: &Self::FlowState,
|
||||
statement: &'mir Statement<'tcx>,
|
||||
location: Location,
|
||||
@ -417,7 +417,7 @@ impl<'mir, 'tcx>
|
||||
|
||||
fn visit_statement_after_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
|
||||
results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
|
||||
state: &Self::FlowState,
|
||||
statement: &'mir Statement<'tcx>,
|
||||
location: Location,
|
||||
@ -443,7 +443,7 @@ impl<'mir, 'tcx>
|
||||
|
||||
fn visit_terminator_before_primary_effect(
|
||||
&mut self,
|
||||
results: &Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
|
||||
results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
|
||||
state: &Self::FlowState,
|
||||
terminator: &'mir Terminator<'tcx>,
|
||||
location: Location,
|
||||
|
@ -814,7 +814,7 @@ impl<'mir, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx, R>
|
||||
|
||||
fn visit_statement_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
state: &Self::FlowState,
|
||||
_statement: &'mir Statement<'tcx>,
|
||||
loc: Location,
|
||||
@ -824,7 +824,7 @@ impl<'mir, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx, R>
|
||||
|
||||
fn visit_terminator_before_primary_effect(
|
||||
&mut self,
|
||||
_results: &R,
|
||||
_results: &mut R,
|
||||
state: &Self::FlowState,
|
||||
_terminator: &'mir Terminator<'tcx>,
|
||||
loc: Location,
|
||||
|
@ -176,23 +176,22 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
|
||||
} else {
|
||||
span_bug!(terminator.source_info.span, "Only passing a local is supported");
|
||||
};
|
||||
// Add new statement at the end of the block that does the read, and patch
|
||||
// up the terminator.
|
||||
block.statements.push(Statement {
|
||||
source_info: terminator.source_info,
|
||||
kind: StatementKind::Assign(Box::new((
|
||||
*destination,
|
||||
Rvalue::Use(Operand::Copy(derefed_place)),
|
||||
))),
|
||||
});
|
||||
terminator.kind = match *target {
|
||||
None => {
|
||||
// No target means this read something uninhabited,
|
||||
// so it must be unreachable, and we don't need to
|
||||
// preserve the assignment either.
|
||||
// so it must be unreachable.
|
||||
TerminatorKind::Unreachable
|
||||
}
|
||||
Some(target) => {
|
||||
block.statements.push(Statement {
|
||||
source_info: terminator.source_info,
|
||||
kind: StatementKind::Assign(Box::new((
|
||||
*destination,
|
||||
Rvalue::Use(Operand::Copy(derefed_place)),
|
||||
))),
|
||||
});
|
||||
TerminatorKind::Goto { target }
|
||||
}
|
||||
Some(target) => TerminatorKind::Goto { target },
|
||||
}
|
||||
}
|
||||
sym::write_via_move => {
|
||||
|
@ -14,6 +14,10 @@ monomorphize_large_assignments =
|
||||
.label = value moved from here
|
||||
.note = The current maximum size is {$limit}, but it can be customized with the move_size_limit attribute: `#![move_size_limit = "..."]`
|
||||
|
||||
monomorphize_no_optimized_mir =
|
||||
missing optimized MIR for an item in the crate `{$crate_name}`
|
||||
.note = missing optimized MIR for this item (was the crate `{$crate_name}` compiled with `--emit=metadata`?)
|
||||
|
||||
monomorphize_recursion_limit =
|
||||
reached the recursion limit while instantiating `{$shrunk}`
|
||||
.note = `{$def_path_str}` defined here
|
||||
|
@ -179,8 +179,8 @@ use rustc_middle::query::TyCtxtAt;
|
||||
use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion};
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_middle::ty::{
|
||||
self, GenericParamDefKind, Instance, InstanceDef, Ty, TyCtxt, TypeFoldable, TypeVisitableExt,
|
||||
VtblEntry,
|
||||
self, AssocKind, GenericParamDefKind, Instance, InstanceDef, Ty, TyCtxt, TypeFoldable,
|
||||
TypeVisitableExt, VtblEntry,
|
||||
};
|
||||
use rustc_middle::ty::{GenericArgKind, GenericArgs};
|
||||
use rustc_middle::{middle::codegen_fn_attrs::CodegenFnAttrFlags, mir::visit::TyContext};
|
||||
@ -188,11 +188,13 @@ use rustc_session::config::EntryFnType;
|
||||
use rustc_session::lint::builtin::LARGE_ASSIGNMENTS;
|
||||
use rustc_session::Limit;
|
||||
use rustc_span::source_map::{dummy_spanned, respan, Span, Spanned, DUMMY_SP};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_target::abi::Size;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::errors::{
|
||||
EncounteredErrorWhileInstantiating, LargeAssignmentsLint, RecursionLimit, TypeLengthLimit,
|
||||
EncounteredErrorWhileInstantiating, LargeAssignmentsLint, NoOptimizedMir, RecursionLimit,
|
||||
TypeLengthLimit,
|
||||
};
|
||||
|
||||
#[derive(PartialEq)]
|
||||
@ -431,7 +433,7 @@ fn collect_items_rec<'tcx>(
|
||||
hir::InlineAsmOperand::SymFn { anon_const } => {
|
||||
let fn_ty =
|
||||
tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id);
|
||||
visit_fn_use(tcx, fn_ty, false, *op_sp, &mut used_items);
|
||||
visit_fn_use(tcx, fn_ty, false, *op_sp, &mut used_items, &[]);
|
||||
}
|
||||
hir::InlineAsmOperand::SymStatic { path: _, def_id } => {
|
||||
let instance = Instance::mono(tcx, *def_id);
|
||||
@ -592,6 +594,11 @@ struct MirUsedCollector<'a, 'tcx> {
|
||||
instance: Instance<'tcx>,
|
||||
/// Spans for move size lints already emitted. Helps avoid duplicate lints.
|
||||
move_size_spans: Vec<Span>,
|
||||
/// If true, we should temporarily skip move size checks, because we are
|
||||
/// processing an operand to a `skip_move_check_fns` function call.
|
||||
skip_move_size_check: bool,
|
||||
/// Set of functions for which it is OK to move large data into.
|
||||
skip_move_check_fns: Vec<DefId>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> MirUsedCollector<'a, 'tcx> {
|
||||
@ -690,7 +697,14 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> {
|
||||
) => {
|
||||
let fn_ty = operand.ty(self.body, self.tcx);
|
||||
let fn_ty = self.monomorphize(fn_ty);
|
||||
visit_fn_use(self.tcx, fn_ty, false, span, &mut self.output);
|
||||
visit_fn_use(
|
||||
self.tcx,
|
||||
fn_ty,
|
||||
false,
|
||||
span,
|
||||
&mut self.output,
|
||||
&self.skip_move_check_fns,
|
||||
);
|
||||
}
|
||||
mir::Rvalue::Cast(
|
||||
mir::CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_)),
|
||||
@ -789,7 +803,14 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> {
|
||||
mir::TerminatorKind::Call { ref func, .. } => {
|
||||
let callee_ty = func.ty(self.body, tcx);
|
||||
let callee_ty = self.monomorphize(callee_ty);
|
||||
visit_fn_use(self.tcx, callee_ty, true, source, &mut self.output)
|
||||
self.skip_move_size_check = visit_fn_use(
|
||||
self.tcx,
|
||||
callee_ty,
|
||||
true,
|
||||
source,
|
||||
&mut self.output,
|
||||
&self.skip_move_check_fns,
|
||||
)
|
||||
}
|
||||
mir::TerminatorKind::Drop { ref place, .. } => {
|
||||
let ty = place.ty(self.body, self.tcx).ty;
|
||||
@ -801,7 +822,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> {
|
||||
match *op {
|
||||
mir::InlineAsmOperand::SymFn { ref value } => {
|
||||
let fn_ty = self.monomorphize(value.literal.ty());
|
||||
visit_fn_use(self.tcx, fn_ty, false, source, &mut self.output);
|
||||
visit_fn_use(self.tcx, fn_ty, false, source, &mut self.output, &[]);
|
||||
}
|
||||
mir::InlineAsmOperand::SymStatic { def_id } => {
|
||||
let instance = Instance::mono(self.tcx, def_id);
|
||||
@ -840,12 +861,13 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> {
|
||||
}
|
||||
|
||||
self.super_terminator(terminator, location);
|
||||
self.skip_move_size_check = false;
|
||||
}
|
||||
|
||||
fn visit_operand(&mut self, operand: &mir::Operand<'tcx>, location: Location) {
|
||||
self.super_operand(operand, location);
|
||||
let move_size_limit = self.tcx.move_size_limit().0;
|
||||
if move_size_limit > 0 {
|
||||
if move_size_limit > 0 && !self.skip_move_size_check {
|
||||
self.check_move_size(move_size_limit, operand, location);
|
||||
}
|
||||
}
|
||||
@ -876,8 +898,11 @@ fn visit_fn_use<'tcx>(
|
||||
is_direct_call: bool,
|
||||
source: Span,
|
||||
output: &mut MonoItems<'tcx>,
|
||||
) {
|
||||
skip_move_check_fns: &[DefId],
|
||||
) -> bool {
|
||||
let mut skip_move_size_check = false;
|
||||
if let ty::FnDef(def_id, args) = *ty.kind() {
|
||||
skip_move_size_check = skip_move_check_fns.contains(&def_id);
|
||||
let instance = if is_direct_call {
|
||||
ty::Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args)
|
||||
} else {
|
||||
@ -888,6 +913,7 @@ fn visit_fn_use<'tcx>(
|
||||
};
|
||||
visit_instance_use(tcx, instance, is_direct_call, source, output);
|
||||
}
|
||||
skip_move_size_check
|
||||
}
|
||||
|
||||
fn visit_instance_use<'tcx>(
|
||||
@ -960,7 +986,10 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) ->
|
||||
}
|
||||
|
||||
if !tcx.is_mir_available(def_id) {
|
||||
bug!("no MIR available for {:?}", def_id);
|
||||
tcx.sess.emit_fatal(NoOptimizedMir {
|
||||
span: tcx.def_span(def_id),
|
||||
crate_name: tcx.crate_name(def_id.krate),
|
||||
});
|
||||
}
|
||||
|
||||
true
|
||||
@ -1365,6 +1394,31 @@ fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIt
|
||||
}
|
||||
}
|
||||
|
||||
fn add_assoc_fn<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
def_id: Option<DefId>,
|
||||
fn_ident: Ident,
|
||||
skip_move_check_fns: &mut Vec<DefId>,
|
||||
) {
|
||||
if let Some(def_id) = def_id.and_then(|def_id| assoc_fn_of_type(tcx, def_id, fn_ident)) {
|
||||
skip_move_check_fns.push(def_id);
|
||||
}
|
||||
}
|
||||
|
||||
fn assoc_fn_of_type<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fn_ident: Ident) -> Option<DefId> {
|
||||
for impl_def_id in tcx.inherent_impls(def_id) {
|
||||
if let Some(new) = tcx.associated_items(impl_def_id).find_by_name_and_kind(
|
||||
tcx,
|
||||
fn_ident,
|
||||
AssocKind::Fn,
|
||||
def_id,
|
||||
) {
|
||||
return Some(new.def_id);
|
||||
}
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
/// Scans the MIR in order to find function calls, closures, and drop-glue.
|
||||
#[instrument(skip(tcx, output), level = "debug")]
|
||||
fn collect_used_items<'tcx>(
|
||||
@ -1373,8 +1427,39 @@ fn collect_used_items<'tcx>(
|
||||
output: &mut MonoItems<'tcx>,
|
||||
) {
|
||||
let body = tcx.instance_mir(instance.def);
|
||||
MirUsedCollector { tcx, body: &body, output, instance, move_size_spans: vec![] }
|
||||
.visit_body(&body);
|
||||
|
||||
let mut skip_move_check_fns = vec![];
|
||||
if tcx.move_size_limit().0 > 0 {
|
||||
add_assoc_fn(
|
||||
tcx,
|
||||
tcx.lang_items().owned_box(),
|
||||
Ident::from_str("new"),
|
||||
&mut skip_move_check_fns,
|
||||
);
|
||||
add_assoc_fn(
|
||||
tcx,
|
||||
tcx.get_diagnostic_item(sym::Arc),
|
||||
Ident::from_str("new"),
|
||||
&mut skip_move_check_fns,
|
||||
);
|
||||
add_assoc_fn(
|
||||
tcx,
|
||||
tcx.get_diagnostic_item(sym::Rc),
|
||||
Ident::from_str("new"),
|
||||
&mut skip_move_check_fns,
|
||||
);
|
||||
}
|
||||
|
||||
MirUsedCollector {
|
||||
tcx,
|
||||
body: &body,
|
||||
output,
|
||||
instance,
|
||||
move_size_spans: vec![],
|
||||
skip_move_size_check: false,
|
||||
skip_move_check_fns,
|
||||
}
|
||||
.visit_body(&body);
|
||||
}
|
||||
|
||||
#[instrument(skip(tcx, output), level = "debug")]
|
||||
|
@ -4,7 +4,7 @@ use crate::fluent_generated as fluent;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_errors::IntoDiagnostic;
|
||||
use rustc_macros::{Diagnostic, LintDiagnostic};
|
||||
use rustc_span::Span;
|
||||
use rustc_span::{Span, Symbol};
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_recursion_limit)]
|
||||
@ -33,6 +33,14 @@ pub struct TypeLengthLimit {
|
||||
pub type_length: usize,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_no_optimized_mir)]
|
||||
pub struct NoOptimizedMir {
|
||||
#[note]
|
||||
pub span: Span,
|
||||
pub crate_name: Symbol,
|
||||
}
|
||||
|
||||
pub struct UnusedGenericParamsHint {
|
||||
pub span: Span,
|
||||
pub param_spans: Vec<Span>,
|
||||
|
@ -132,7 +132,7 @@ fn maybe_source_file_to_parser(
|
||||
sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||
let end_pos = source_file.end_pos;
|
||||
let end_pos = source_file.end_position();
|
||||
let stream = maybe_file_to_stream(sess, source_file, None)?;
|
||||
let mut parser = stream_to_parser(sess, stream, None);
|
||||
if parser.token == token::Eof {
|
||||
|
@ -2477,9 +2477,7 @@ impl<'a> Parser<'a> {
|
||||
} else {
|
||||
self.expect(&token::Eq)?;
|
||||
}
|
||||
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
|
||||
this.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())
|
||||
})?;
|
||||
let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())?;
|
||||
let span = lo.to(expr.span);
|
||||
self.sess.gated_spans.gate(sym::let_chains, span);
|
||||
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span)))
|
||||
|
@ -9,7 +9,6 @@ edition = "2021"
|
||||
[dependencies]
|
||||
field-offset = "0.3.5"
|
||||
measureme = "10.0.0"
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_hir = { path = "../rustc_hir" }
|
||||
|
@ -5,7 +5,7 @@ use crate::ich::StableHashingContext;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_span::{BytePos, NormalizedPos, SourceFile};
|
||||
use rustc_span::SourceFile;
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
use smallvec::SmallVec;
|
||||
@ -67,8 +67,8 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
|
||||
src: _,
|
||||
ref src_hash,
|
||||
external_src: _,
|
||||
start_pos,
|
||||
end_pos: _,
|
||||
start_pos: _,
|
||||
source_len: _,
|
||||
lines: _,
|
||||
ref multibyte_chars,
|
||||
ref non_narrow_chars,
|
||||
@ -85,56 +85,30 @@ impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {
|
||||
// We only hash the relative position within this source_file
|
||||
lines.len().hash_stable(hcx, hasher);
|
||||
for &line in lines.iter() {
|
||||
stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
|
||||
line.hash_stable(hcx, hasher);
|
||||
}
|
||||
});
|
||||
|
||||
// We only hash the relative position within this source_file
|
||||
multibyte_chars.len().hash_stable(hcx, hasher);
|
||||
for &char_pos in multibyte_chars.iter() {
|
||||
stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
|
||||
char_pos.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
non_narrow_chars.len().hash_stable(hcx, hasher);
|
||||
for &char_pos in non_narrow_chars.iter() {
|
||||
stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
|
||||
char_pos.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
normalized_pos.len().hash_stable(hcx, hasher);
|
||||
for &char_pos in normalized_pos.iter() {
|
||||
stable_normalized_pos(char_pos, start_pos).hash_stable(hcx, hasher);
|
||||
char_pos.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
cnum.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
fn stable_byte_pos(pos: BytePos, source_file_start: BytePos) -> u32 {
|
||||
pos.0 - source_file_start.0
|
||||
}
|
||||
|
||||
fn stable_multibyte_char(mbc: rustc_span::MultiByteChar, source_file_start: BytePos) -> (u32, u32) {
|
||||
let rustc_span::MultiByteChar { pos, bytes } = mbc;
|
||||
|
||||
(pos.0 - source_file_start.0, bytes as u32)
|
||||
}
|
||||
|
||||
fn stable_non_narrow_char(
|
||||
swc: rustc_span::NonNarrowChar,
|
||||
source_file_start: BytePos,
|
||||
) -> (u32, u32) {
|
||||
let pos = swc.pos();
|
||||
let width = swc.width();
|
||||
|
||||
(pos.0 - source_file_start.0, width as u32)
|
||||
}
|
||||
|
||||
fn stable_normalized_pos(np: NormalizedPos, source_file_start: BytePos) -> (u32, u32) {
|
||||
let NormalizedPos { pos, diff } = np;
|
||||
|
||||
(pos.0 - source_file_start.0, diff)
|
||||
}
|
||||
|
||||
impl<'tcx> HashStable<StableHashingContext<'tcx>> for rustc_feature::Features {
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) {
|
||||
// Unfortunately we cannot exhaustively list fields here, since the
|
||||
|
@ -4,15 +4,17 @@
|
||||
//! until stable MIR is complete.
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::ops::Index;
|
||||
use std::ops::{ControlFlow, Index};
|
||||
|
||||
use crate::rustc_internal;
|
||||
use crate::stable_mir::CompilerError;
|
||||
use crate::{
|
||||
rustc_smir::Tables,
|
||||
stable_mir::{self, with},
|
||||
};
|
||||
use rustc_driver::{Callbacks, Compilation, RunCompiler};
|
||||
use rustc_interface::{interface, Queries};
|
||||
use rustc_middle::mir::interpret::AllocId;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::EarlyErrorHandler;
|
||||
pub use rustc_span::def_id::{CrateNum, DefId};
|
||||
@ -133,6 +135,10 @@ impl<'tcx> Tables<'tcx> {
|
||||
stable_mir::ty::ImplDef(self.create_def_id(did))
|
||||
}
|
||||
|
||||
pub fn prov(&mut self, aid: AllocId) -> stable_mir::ty::Prov {
|
||||
stable_mir::ty::Prov(self.create_alloc_id(aid))
|
||||
}
|
||||
|
||||
fn create_def_id(&mut self, did: DefId) -> stable_mir::DefId {
|
||||
// FIXME: this becomes inefficient when we have too many ids
|
||||
for (i, &d) in self.def_ids.iter().enumerate() {
|
||||
@ -144,6 +150,16 @@ impl<'tcx> Tables<'tcx> {
|
||||
self.def_ids.push(did);
|
||||
stable_mir::DefId(id)
|
||||
}
|
||||
|
||||
fn create_alloc_id(&mut self, aid: AllocId) -> stable_mir::AllocId {
|
||||
// FIXME: this becomes inefficient when we have too many ids
|
||||
if let Some(i) = self.alloc_ids.iter().position(|a| *a == aid) {
|
||||
return stable_mir::AllocId(i);
|
||||
};
|
||||
let id = self.def_ids.len();
|
||||
self.alloc_ids.push(aid);
|
||||
stable_mir::AllocId(id)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn crate_num(item: &stable_mir::Crate) -> CrateNum {
|
||||
@ -151,7 +167,7 @@ pub fn crate_num(item: &stable_mir::Crate) -> CrateNum {
|
||||
}
|
||||
|
||||
pub fn run(tcx: TyCtxt<'_>, f: impl FnOnce()) {
|
||||
crate::stable_mir::run(Tables { tcx, def_ids: vec![], types: vec![] }, f);
|
||||
crate::stable_mir::run(Tables { tcx, def_ids: vec![], alloc_ids: vec![], types: vec![] }, f);
|
||||
}
|
||||
|
||||
/// A type that provides internal information but that can still be used for debug purpose.
|
||||
@ -174,27 +190,45 @@ pub(crate) fn opaque<T: Debug>(value: &T) -> Opaque {
|
||||
Opaque(format!("{value:?}"))
|
||||
}
|
||||
|
||||
pub struct StableMir {
|
||||
pub struct StableMir<B = (), C = ()>
|
||||
where
|
||||
B: Send,
|
||||
C: Send,
|
||||
{
|
||||
args: Vec<String>,
|
||||
callback: fn(TyCtxt<'_>),
|
||||
callback: fn(TyCtxt<'_>) -> ControlFlow<B, C>,
|
||||
result: Option<ControlFlow<B, C>>,
|
||||
}
|
||||
|
||||
impl StableMir {
|
||||
impl<B, C> StableMir<B, C>
|
||||
where
|
||||
B: Send,
|
||||
C: Send,
|
||||
{
|
||||
/// Creates a new `StableMir` instance, with given test_function and arguments.
|
||||
pub fn new(args: Vec<String>, callback: fn(TyCtxt<'_>)) -> Self {
|
||||
StableMir { args, callback }
|
||||
pub fn new(args: Vec<String>, callback: fn(TyCtxt<'_>) -> ControlFlow<B, C>) -> Self {
|
||||
StableMir { args, callback, result: None }
|
||||
}
|
||||
|
||||
/// Runs the compiler against given target and tests it with `test_function`
|
||||
pub fn run(&mut self) {
|
||||
rustc_driver::catch_fatal_errors(|| {
|
||||
RunCompiler::new(&self.args.clone(), self).run().unwrap();
|
||||
})
|
||||
.unwrap();
|
||||
pub fn run(&mut self) -> Result<C, CompilerError<B>> {
|
||||
let compiler_result =
|
||||
rustc_driver::catch_fatal_errors(|| RunCompiler::new(&self.args.clone(), self).run());
|
||||
match (compiler_result, self.result.take()) {
|
||||
(Ok(Ok(())), Some(ControlFlow::Continue(value))) => Ok(value),
|
||||
(Ok(Ok(())), Some(ControlFlow::Break(value))) => Err(CompilerError::Interrupted(value)),
|
||||
(Ok(Ok(_)), None) => Err(CompilerError::Skipped),
|
||||
(Ok(Err(_)), _) => Err(CompilerError::CompilationFailed),
|
||||
(Err(_), _) => Err(CompilerError::ICE),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Callbacks for StableMir {
|
||||
impl<B, C> Callbacks for StableMir<B, C>
|
||||
where
|
||||
B: Send,
|
||||
C: Send,
|
||||
{
|
||||
/// Called after analysis. Return value instructs the compiler whether to
|
||||
/// continue the compilation afterwards (defaults to `Compilation::Continue`)
|
||||
fn after_analysis<'tcx>(
|
||||
@ -204,9 +238,14 @@ impl Callbacks for StableMir {
|
||||
queries: &'tcx Queries<'tcx>,
|
||||
) -> Compilation {
|
||||
queries.global_ctxt().unwrap().enter(|tcx| {
|
||||
rustc_internal::run(tcx, || (self.callback)(tcx));
|
||||
});
|
||||
// No need to keep going.
|
||||
Compilation::Stop
|
||||
rustc_internal::run(tcx, || {
|
||||
self.result = Some((self.callback)(tcx));
|
||||
});
|
||||
if self.result.as_ref().is_some_and(|val| val.is_continue()) {
|
||||
Compilation::Continue
|
||||
} else {
|
||||
Compilation::Stop
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use rustc_middle::mir::interpret::{alloc_range, AllocRange, ConstValue, Pointer};
|
||||
|
||||
use crate::{
|
||||
rustc_internal::opaque,
|
||||
rustc_smir::{Stable, Tables},
|
||||
stable_mir::mir::Mutability,
|
||||
stable_mir::ty::{Allocation, ProvenanceMap},
|
||||
@ -113,7 +112,7 @@ pub(super) fn allocation_filter<'tcx>(
|
||||
.iter()
|
||||
.filter(|a| a.0 >= alloc_range.start && a.0 <= alloc_range.end())
|
||||
{
|
||||
ptrs.push((offset.bytes_usize() - alloc_range.start.bytes_usize(), opaque(prov)));
|
||||
ptrs.push((offset.bytes_usize() - alloc_range.start.bytes_usize(), tables.prov(*prov)));
|
||||
}
|
||||
Allocation {
|
||||
bytes: bytes,
|
||||
|
@ -1,5 +1,5 @@
|
||||
//! Module that implements what will become the rustc side of Stable MIR.
|
||||
//!
|
||||
|
||||
//! This module is responsible for building Stable MIR components from internal components.
|
||||
//!
|
||||
//! This module is not intended to be invoked directly by users. It will eventually
|
||||
@ -10,12 +10,13 @@
|
||||
use crate::rustc_internal::{self, opaque};
|
||||
use crate::stable_mir::mir::{CopyNonOverlapping, UserTypeProjection, VariantIdx};
|
||||
use crate::stable_mir::ty::{FloatTy, GenericParamDef, IntTy, Movability, RigidTy, TyKind, UintTy};
|
||||
use crate::stable_mir::{self, Context};
|
||||
use crate::stable_mir::{self, CompilerError, Context};
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::mir::interpret::alloc_range;
|
||||
use rustc_middle::mir::interpret::{alloc_range, AllocId};
|
||||
use rustc_middle::mir::{self, ConstantKind};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, Variance};
|
||||
use rustc_span::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use rustc_span::ErrorGuaranteed;
|
||||
use rustc_target::abi::FieldIdx;
|
||||
use tracing::debug;
|
||||
|
||||
@ -37,9 +38,14 @@ impl<'tcx> Context for Tables<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
fn name_of_def_id(&self, def_id: stable_mir::DefId) -> String {
|
||||
self.tcx.def_path_str(self[def_id])
|
||||
}
|
||||
|
||||
fn all_local_items(&mut self) -> stable_mir::CrateItems {
|
||||
self.tcx.mir_keys(()).iter().map(|item| self.crate_item(item.to_def_id())).collect()
|
||||
}
|
||||
|
||||
fn entry_fn(&mut self) -> Option<stable_mir::CrateItem> {
|
||||
Some(self.crate_item(self.tcx.entry_fn(())?.0))
|
||||
}
|
||||
@ -125,6 +131,7 @@ impl<'tcx> Context for Tables<'tcx> {
|
||||
pub struct Tables<'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
pub def_ids: Vec<DefId>,
|
||||
pub alloc_ids: Vec<AllocId>,
|
||||
pub types: Vec<Ty<'tcx>>,
|
||||
}
|
||||
|
||||
@ -1446,3 +1453,9 @@ impl<'tcx> Stable<'tcx> for rustc_span::Span {
|
||||
opaque(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<ErrorGuaranteed> for CompilerError<T> {
|
||||
fn from(_error: ErrorGuaranteed) -> Self {
|
||||
CompilerError::CompilationFailed
|
||||
}
|
||||
}
|
||||
|
@ -12,6 +12,8 @@
|
||||
//! If you need an internal construct, consider using `rustc_internal` or `rustc_smir`.
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::fmt;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use self::ty::{
|
||||
GenericPredicates, Generics, ImplDef, ImplTrait, Span, TraitDecl, TraitDef, Ty, TyKind,
|
||||
@ -29,9 +31,22 @@ pub type Symbol = String;
|
||||
pub type CrateNum = usize;
|
||||
|
||||
/// A unique identification number for each item accessible for the current compilation unit.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct DefId(pub(crate) usize);
|
||||
|
||||
impl Debug for DefId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("DefId:")
|
||||
.field("id", &self.0)
|
||||
.field("name", &with(|cx| cx.name_of_def_id(*self)))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// A unique identification number for each provenance
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct AllocId(pub(crate) usize);
|
||||
|
||||
/// A list of crate items.
|
||||
pub type CrateItems = Vec<CrateItem>;
|
||||
|
||||
@ -41,6 +56,20 @@ pub type TraitDecls = Vec<TraitDef>;
|
||||
/// A list of impl trait decls.
|
||||
pub type ImplTraitDecls = Vec<ImplDef>;
|
||||
|
||||
/// An error type used to represent an error that has already been reported by the compiler.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum CompilerError<T> {
|
||||
/// Internal compiler error (I.e.: Compiler crashed).
|
||||
ICE,
|
||||
/// Compilation failed.
|
||||
CompilationFailed,
|
||||
/// Compilation was interrupted.
|
||||
Interrupted(T),
|
||||
/// Compilation skipped. This happens when users invoke rustc to retrieve information such as
|
||||
/// --version.
|
||||
Skipped,
|
||||
}
|
||||
|
||||
/// Holds information about a crate.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Crate {
|
||||
@ -123,6 +152,9 @@ pub trait Context {
|
||||
/// Find a crate with the given name.
|
||||
fn find_crate(&self, name: &str) -> Option<Crate>;
|
||||
|
||||
/// Prints the name of given `DefId`
|
||||
fn name_of_def_id(&self, def_id: DefId) -> String;
|
||||
|
||||
/// Obtain the representation of a type.
|
||||
fn ty_kind(&mut self, ty: Ty) -> TyKind;
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{mir::Mutability, mir::Safety, with, DefId};
|
||||
use super::{mir::Mutability, mir::Safety, with, AllocId, DefId};
|
||||
use crate::rustc_internal::Opaque;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@ -260,7 +260,9 @@ pub struct BoundTy {
|
||||
|
||||
pub type Bytes = Vec<Option<u8>>;
|
||||
pub type Size = usize;
|
||||
pub type Prov = Opaque;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct Prov(pub(crate) AllocId);
|
||||
pub type Align = u64;
|
||||
pub type Promoted = u32;
|
||||
pub type InitMaskMaterialized = Vec<u64>;
|
||||
|
@ -11,26 +11,19 @@ mod tests;
|
||||
/// is detected at runtime.
|
||||
pub fn analyze_source_file(
|
||||
src: &str,
|
||||
source_file_start_pos: BytePos,
|
||||
) -> (Vec<BytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>) {
|
||||
let mut lines = vec![source_file_start_pos];
|
||||
) -> (Vec<RelativeBytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>) {
|
||||
let mut lines = vec![RelativeBytePos::from_u32(0)];
|
||||
let mut multi_byte_chars = vec![];
|
||||
let mut non_narrow_chars = vec![];
|
||||
|
||||
// Calls the right implementation, depending on hardware support available.
|
||||
analyze_source_file_dispatch(
|
||||
src,
|
||||
source_file_start_pos,
|
||||
&mut lines,
|
||||
&mut multi_byte_chars,
|
||||
&mut non_narrow_chars,
|
||||
);
|
||||
analyze_source_file_dispatch(src, &mut lines, &mut multi_byte_chars, &mut non_narrow_chars);
|
||||
|
||||
// The code above optimistically registers a new line *after* each \n
|
||||
// it encounters. If that point is already outside the source_file, remove
|
||||
// it again.
|
||||
if let Some(&last_line_start) = lines.last() {
|
||||
let source_file_end = source_file_start_pos + BytePos::from_usize(src.len());
|
||||
let source_file_end = RelativeBytePos::from_usize(src.len());
|
||||
assert!(source_file_end >= last_line_start);
|
||||
if last_line_start == source_file_end {
|
||||
lines.pop();
|
||||
@ -43,14 +36,12 @@ pub fn analyze_source_file(
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
|
||||
fn analyze_source_file_dispatch(src: &str,
|
||||
source_file_start_pos: BytePos,
|
||||
lines: &mut Vec<BytePos>,
|
||||
lines: &mut Vec<RelativeBytePos>,
|
||||
multi_byte_chars: &mut Vec<MultiByteChar>,
|
||||
non_narrow_chars: &mut Vec<NonNarrowChar>) {
|
||||
if is_x86_feature_detected!("sse2") {
|
||||
unsafe {
|
||||
analyze_source_file_sse2(src,
|
||||
source_file_start_pos,
|
||||
lines,
|
||||
multi_byte_chars,
|
||||
non_narrow_chars);
|
||||
@ -58,7 +49,7 @@ cfg_if::cfg_if! {
|
||||
} else {
|
||||
analyze_source_file_generic(src,
|
||||
src.len(),
|
||||
source_file_start_pos,
|
||||
RelativeBytePos::from_u32(0),
|
||||
lines,
|
||||
multi_byte_chars,
|
||||
non_narrow_chars);
|
||||
@ -72,8 +63,7 @@ cfg_if::cfg_if! {
|
||||
/// SSE2 intrinsics to quickly find all newlines.
|
||||
#[target_feature(enable = "sse2")]
|
||||
unsafe fn analyze_source_file_sse2(src: &str,
|
||||
output_offset: BytePos,
|
||||
lines: &mut Vec<BytePos>,
|
||||
lines: &mut Vec<RelativeBytePos>,
|
||||
multi_byte_chars: &mut Vec<MultiByteChar>,
|
||||
non_narrow_chars: &mut Vec<NonNarrowChar>) {
|
||||
#[cfg(target_arch = "x86")]
|
||||
@ -129,8 +119,7 @@ cfg_if::cfg_if! {
|
||||
if control_char_mask == newlines_mask {
|
||||
// All control characters are newlines, record them
|
||||
let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32;
|
||||
let output_offset = output_offset +
|
||||
BytePos::from_usize(chunk_index * CHUNK_SIZE + 1);
|
||||
let output_offset = RelativeBytePos::from_usize(chunk_index * CHUNK_SIZE + 1);
|
||||
|
||||
loop {
|
||||
let index = newlines_mask.trailing_zeros();
|
||||
@ -140,7 +129,7 @@ cfg_if::cfg_if! {
|
||||
break
|
||||
}
|
||||
|
||||
lines.push(BytePos(index) + output_offset);
|
||||
lines.push(RelativeBytePos(index) + output_offset);
|
||||
|
||||
// Clear the bit, so we can find the next one.
|
||||
newlines_mask &= (!1) << index;
|
||||
@ -165,7 +154,7 @@ cfg_if::cfg_if! {
|
||||
intra_chunk_offset = analyze_source_file_generic(
|
||||
&src[scan_start .. ],
|
||||
CHUNK_SIZE - intra_chunk_offset,
|
||||
BytePos::from_usize(scan_start) + output_offset,
|
||||
RelativeBytePos::from_usize(scan_start),
|
||||
lines,
|
||||
multi_byte_chars,
|
||||
non_narrow_chars
|
||||
@ -177,7 +166,7 @@ cfg_if::cfg_if! {
|
||||
if tail_start < src.len() {
|
||||
analyze_source_file_generic(&src[tail_start ..],
|
||||
src.len() - tail_start,
|
||||
output_offset + BytePos::from_usize(tail_start),
|
||||
RelativeBytePos::from_usize(tail_start),
|
||||
lines,
|
||||
multi_byte_chars,
|
||||
non_narrow_chars);
|
||||
@ -187,13 +176,12 @@ cfg_if::cfg_if! {
|
||||
|
||||
// The target (or compiler version) does not support SSE2 ...
|
||||
fn analyze_source_file_dispatch(src: &str,
|
||||
source_file_start_pos: BytePos,
|
||||
lines: &mut Vec<BytePos>,
|
||||
lines: &mut Vec<RelativeBytePos>,
|
||||
multi_byte_chars: &mut Vec<MultiByteChar>,
|
||||
non_narrow_chars: &mut Vec<NonNarrowChar>) {
|
||||
analyze_source_file_generic(src,
|
||||
src.len(),
|
||||
source_file_start_pos,
|
||||
RelativeBytePos::from_u32(0),
|
||||
lines,
|
||||
multi_byte_chars,
|
||||
non_narrow_chars);
|
||||
@ -207,8 +195,8 @@ cfg_if::cfg_if! {
|
||||
fn analyze_source_file_generic(
|
||||
src: &str,
|
||||
scan_len: usize,
|
||||
output_offset: BytePos,
|
||||
lines: &mut Vec<BytePos>,
|
||||
output_offset: RelativeBytePos,
|
||||
lines: &mut Vec<RelativeBytePos>,
|
||||
multi_byte_chars: &mut Vec<MultiByteChar>,
|
||||
non_narrow_chars: &mut Vec<NonNarrowChar>,
|
||||
) -> usize {
|
||||
@ -230,11 +218,11 @@ fn analyze_source_file_generic(
|
||||
// This is an ASCII control character, it could be one of the cases
|
||||
// that are interesting to us.
|
||||
|
||||
let pos = BytePos::from_usize(i) + output_offset;
|
||||
let pos = RelativeBytePos::from_usize(i) + output_offset;
|
||||
|
||||
match byte {
|
||||
b'\n' => {
|
||||
lines.push(pos + BytePos(1));
|
||||
lines.push(pos + RelativeBytePos(1));
|
||||
}
|
||||
b'\t' => {
|
||||
non_narrow_chars.push(NonNarrowChar::Tab(pos));
|
||||
@ -250,7 +238,7 @@ fn analyze_source_file_generic(
|
||||
let c = src[i..].chars().next().unwrap();
|
||||
char_len = c.len_utf8();
|
||||
|
||||
let pos = BytePos::from_usize(i) + output_offset;
|
||||
let pos = RelativeBytePos::from_usize(i) + output_offset;
|
||||
|
||||
if char_len > 1 {
|
||||
assert!((2..=4).contains(&char_len));
|
||||
|
@ -3,29 +3,28 @@ use super::*;
|
||||
macro_rules! test {
|
||||
(case: $test_name:ident,
|
||||
text: $text:expr,
|
||||
source_file_start_pos: $source_file_start_pos:expr,
|
||||
lines: $lines:expr,
|
||||
multi_byte_chars: $multi_byte_chars:expr,
|
||||
non_narrow_chars: $non_narrow_chars:expr,) => {
|
||||
#[test]
|
||||
fn $test_name() {
|
||||
let (lines, multi_byte_chars, non_narrow_chars) =
|
||||
analyze_source_file($text, BytePos($source_file_start_pos));
|
||||
let (lines, multi_byte_chars, non_narrow_chars) = analyze_source_file($text);
|
||||
|
||||
let expected_lines: Vec<BytePos> = $lines.into_iter().map(BytePos).collect();
|
||||
let expected_lines: Vec<RelativeBytePos> =
|
||||
$lines.into_iter().map(RelativeBytePos).collect();
|
||||
|
||||
assert_eq!(lines, expected_lines);
|
||||
|
||||
let expected_mbcs: Vec<MultiByteChar> = $multi_byte_chars
|
||||
.into_iter()
|
||||
.map(|(pos, bytes)| MultiByteChar { pos: BytePos(pos), bytes })
|
||||
.map(|(pos, bytes)| MultiByteChar { pos: RelativeBytePos(pos), bytes })
|
||||
.collect();
|
||||
|
||||
assert_eq!(multi_byte_chars, expected_mbcs);
|
||||
|
||||
let expected_nncs: Vec<NonNarrowChar> = $non_narrow_chars
|
||||
.into_iter()
|
||||
.map(|(pos, width)| NonNarrowChar::new(BytePos(pos), width))
|
||||
.map(|(pos, width)| NonNarrowChar::new(RelativeBytePos(pos), width))
|
||||
.collect();
|
||||
|
||||
assert_eq!(non_narrow_chars, expected_nncs);
|
||||
@ -36,7 +35,6 @@ macro_rules! test {
|
||||
test!(
|
||||
case: empty_text,
|
||||
text: "",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![],
|
||||
multi_byte_chars: vec![],
|
||||
non_narrow_chars: vec![],
|
||||
@ -45,7 +43,6 @@ test!(
|
||||
test!(
|
||||
case: newlines_short,
|
||||
text: "a\nc",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0, 2],
|
||||
multi_byte_chars: vec![],
|
||||
non_narrow_chars: vec![],
|
||||
@ -54,7 +51,6 @@ test!(
|
||||
test!(
|
||||
case: newlines_long,
|
||||
text: "012345678\nabcdef012345678\na",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0, 10, 26],
|
||||
multi_byte_chars: vec![],
|
||||
non_narrow_chars: vec![],
|
||||
@ -63,7 +59,6 @@ test!(
|
||||
test!(
|
||||
case: newline_and_multi_byte_char_in_same_chunk,
|
||||
text: "01234β789\nbcdef0123456789abcdef",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0, 11],
|
||||
multi_byte_chars: vec![(5, 2)],
|
||||
non_narrow_chars: vec![],
|
||||
@ -72,7 +67,6 @@ test!(
|
||||
test!(
|
||||
case: newline_and_control_char_in_same_chunk,
|
||||
text: "01234\u{07}6789\nbcdef0123456789abcdef",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0, 11],
|
||||
multi_byte_chars: vec![],
|
||||
non_narrow_chars: vec![(5, 0)],
|
||||
@ -81,7 +75,6 @@ test!(
|
||||
test!(
|
||||
case: multi_byte_char_short,
|
||||
text: "aβc",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0],
|
||||
multi_byte_chars: vec![(1, 2)],
|
||||
non_narrow_chars: vec![],
|
||||
@ -90,7 +83,6 @@ test!(
|
||||
test!(
|
||||
case: multi_byte_char_long,
|
||||
text: "0123456789abcΔf012345β",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0],
|
||||
multi_byte_chars: vec![(13, 2), (22, 2)],
|
||||
non_narrow_chars: vec![],
|
||||
@ -99,7 +91,6 @@ test!(
|
||||
test!(
|
||||
case: multi_byte_char_across_chunk_boundary,
|
||||
text: "0123456789abcdeΔ123456789abcdef01234",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0],
|
||||
multi_byte_chars: vec![(15, 2)],
|
||||
non_narrow_chars: vec![],
|
||||
@ -108,7 +99,6 @@ test!(
|
||||
test!(
|
||||
case: multi_byte_char_across_chunk_boundary_tail,
|
||||
text: "0123456789abcdeΔ....",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0],
|
||||
multi_byte_chars: vec![(15, 2)],
|
||||
non_narrow_chars: vec![],
|
||||
@ -117,7 +107,6 @@ test!(
|
||||
test!(
|
||||
case: non_narrow_short,
|
||||
text: "0\t2",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0],
|
||||
multi_byte_chars: vec![],
|
||||
non_narrow_chars: vec![(1, 4)],
|
||||
@ -126,7 +115,6 @@ test!(
|
||||
test!(
|
||||
case: non_narrow_long,
|
||||
text: "01\t3456789abcdef01234567\u{07}9",
|
||||
source_file_start_pos: 0,
|
||||
lines: vec![0],
|
||||
multi_byte_chars: vec![],
|
||||
non_narrow_chars: vec![(2, 4), (24, 0)],
|
||||
@ -135,8 +123,7 @@ test!(
|
||||
test!(
|
||||
case: output_offset_all,
|
||||
text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
|
||||
source_file_start_pos: 1000,
|
||||
lines: vec![0 + 1000, 7 + 1000, 27 + 1000],
|
||||
multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)],
|
||||
non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)],
|
||||
lines: vec![0, 7, 27],
|
||||
multi_byte_chars: vec![(13, 2), (29, 2)],
|
||||
non_narrow_chars: vec![(2, 4), (24, 0)],
|
||||
);
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::source_map::SourceMap;
|
||||
use crate::{BytePos, SourceFile, SpanData};
|
||||
use crate::{BytePos, Pos, RelativeBytePos, SourceFile, SpanData};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::ops::Range;
|
||||
|
||||
@ -37,6 +37,7 @@ impl CacheEntry {
|
||||
self.file_index = file_idx;
|
||||
}
|
||||
|
||||
let pos = self.file.relative_position(pos);
|
||||
let line_index = self.file.lookup_line(pos).unwrap();
|
||||
let line_bounds = self.file.line_bounds(line_index);
|
||||
self.line_number = line_index + 1;
|
||||
@ -79,7 +80,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||
pub fn byte_pos_to_line_and_col(
|
||||
&mut self,
|
||||
pos: BytePos,
|
||||
) -> Option<(Lrc<SourceFile>, usize, BytePos)> {
|
||||
) -> Option<(Lrc<SourceFile>, usize, RelativeBytePos)> {
|
||||
self.time_stamp += 1;
|
||||
|
||||
// Check if the position is in one of the cached lines
|
||||
@ -88,11 +89,8 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||
let cache_entry = &mut self.line_cache[cache_idx as usize];
|
||||
cache_entry.touch(self.time_stamp);
|
||||
|
||||
return Some((
|
||||
cache_entry.file.clone(),
|
||||
cache_entry.line_number,
|
||||
pos - cache_entry.line.start,
|
||||
));
|
||||
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
||||
return Some((cache_entry.file.clone(), cache_entry.line_number, col));
|
||||
}
|
||||
|
||||
// No cache hit ...
|
||||
@ -108,7 +106,8 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||
let cache_entry = &mut self.line_cache[oldest];
|
||||
cache_entry.update(new_file_and_idx, pos, self.time_stamp);
|
||||
|
||||
Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start))
|
||||
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
||||
Some((cache_entry.file.clone(), cache_entry.line_number, col))
|
||||
}
|
||||
|
||||
pub fn span_data_to_lines_and_cols(
|
||||
|
@ -1107,27 +1107,27 @@ impl fmt::Debug for SpanData {
|
||||
}
|
||||
|
||||
/// Identifies an offset of a multi-byte character in a `SourceFile`.
|
||||
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)]
|
||||
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug, HashStable_Generic)]
|
||||
pub struct MultiByteChar {
|
||||
/// The absolute offset of the character in the `SourceMap`.
|
||||
pub pos: BytePos,
|
||||
/// The relative offset of the character in the `SourceFile`.
|
||||
pub pos: RelativeBytePos,
|
||||
/// The number of bytes, `>= 2`.
|
||||
pub bytes: u8,
|
||||
}
|
||||
|
||||
/// Identifies an offset of a non-narrow character in a `SourceFile`.
|
||||
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)]
|
||||
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug, HashStable_Generic)]
|
||||
pub enum NonNarrowChar {
|
||||
/// Represents a zero-width character.
|
||||
ZeroWidth(BytePos),
|
||||
ZeroWidth(RelativeBytePos),
|
||||
/// Represents a wide (full-width) character.
|
||||
Wide(BytePos),
|
||||
Wide(RelativeBytePos),
|
||||
/// Represents a tab character, represented visually with a width of 4 characters.
|
||||
Tab(BytePos),
|
||||
Tab(RelativeBytePos),
|
||||
}
|
||||
|
||||
impl NonNarrowChar {
|
||||
fn new(pos: BytePos, width: usize) -> Self {
|
||||
fn new(pos: RelativeBytePos, width: usize) -> Self {
|
||||
match width {
|
||||
0 => NonNarrowChar::ZeroWidth(pos),
|
||||
2 => NonNarrowChar::Wide(pos),
|
||||
@ -1136,8 +1136,8 @@ impl NonNarrowChar {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the absolute offset of the character in the `SourceMap`.
|
||||
pub fn pos(&self) -> BytePos {
|
||||
/// Returns the relative offset of the character in the `SourceFile`.
|
||||
pub fn pos(&self) -> RelativeBytePos {
|
||||
match *self {
|
||||
NonNarrowChar::ZeroWidth(p) | NonNarrowChar::Wide(p) | NonNarrowChar::Tab(p) => p,
|
||||
}
|
||||
@ -1153,10 +1153,10 @@ impl NonNarrowChar {
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<BytePos> for NonNarrowChar {
|
||||
impl Add<RelativeBytePos> for NonNarrowChar {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, rhs: BytePos) -> Self {
|
||||
fn add(self, rhs: RelativeBytePos) -> Self {
|
||||
match self {
|
||||
NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs),
|
||||
NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs),
|
||||
@ -1165,10 +1165,10 @@ impl Add<BytePos> for NonNarrowChar {
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<BytePos> for NonNarrowChar {
|
||||
impl Sub<RelativeBytePos> for NonNarrowChar {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, rhs: BytePos) -> Self {
|
||||
fn sub(self, rhs: RelativeBytePos) -> Self {
|
||||
match self {
|
||||
NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs),
|
||||
NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs),
|
||||
@ -1178,10 +1178,10 @@ impl Sub<BytePos> for NonNarrowChar {
|
||||
}
|
||||
|
||||
/// Identifies an offset of a character that was normalized away from `SourceFile`.
|
||||
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)]
|
||||
#[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug, HashStable_Generic)]
|
||||
pub struct NormalizedPos {
|
||||
/// The absolute offset of the character in the `SourceMap`.
|
||||
pub pos: BytePos,
|
||||
/// The relative offset of the character in the `SourceFile`.
|
||||
pub pos: RelativeBytePos,
|
||||
/// The difference between original and normalized string at position.
|
||||
pub diff: u32,
|
||||
}
|
||||
@ -1293,7 +1293,7 @@ impl SourceFileHash {
|
||||
#[derive(Clone)]
|
||||
pub enum SourceFileLines {
|
||||
/// The source file lines, in decoded (random-access) form.
|
||||
Lines(Vec<BytePos>),
|
||||
Lines(Vec<RelativeBytePos>),
|
||||
|
||||
/// The source file lines, in undecoded difference list form.
|
||||
Diffs(SourceFileDiffs),
|
||||
@ -1314,11 +1314,6 @@ impl SourceFileLines {
|
||||
/// small crates where very little of `std`'s metadata is used.
|
||||
#[derive(Clone)]
|
||||
pub struct SourceFileDiffs {
|
||||
/// Position of the first line. Note that this is always encoded as a
|
||||
/// `BytePos` because it is often much larger than any of the
|
||||
/// differences.
|
||||
line_start: BytePos,
|
||||
|
||||
/// Always 1, 2, or 4. Always as small as possible, while being big
|
||||
/// enough to hold the length of the longest line in the source file.
|
||||
/// The 1 case is by far the most common.
|
||||
@ -1351,8 +1346,8 @@ pub struct SourceFile {
|
||||
pub external_src: Lock<ExternalSource>,
|
||||
/// The start position of this source in the `SourceMap`.
|
||||
pub start_pos: BytePos,
|
||||
/// The end position of this source in the `SourceMap`.
|
||||
pub end_pos: BytePos,
|
||||
/// The byte length of this source.
|
||||
pub source_len: RelativeBytePos,
|
||||
/// Locations of lines beginnings in the source code.
|
||||
pub lines: Lock<SourceFileLines>,
|
||||
/// Locations of multi-byte characters in the source code.
|
||||
@ -1375,7 +1370,7 @@ impl Clone for SourceFile {
|
||||
src_hash: self.src_hash,
|
||||
external_src: Lock::new(self.external_src.borrow().clone()),
|
||||
start_pos: self.start_pos,
|
||||
end_pos: self.end_pos,
|
||||
source_len: self.source_len,
|
||||
lines: Lock::new(self.lines.borrow().clone()),
|
||||
multibyte_chars: self.multibyte_chars.clone(),
|
||||
non_narrow_chars: self.non_narrow_chars.clone(),
|
||||
@ -1390,8 +1385,8 @@ impl<S: Encoder> Encodable<S> for SourceFile {
|
||||
fn encode(&self, s: &mut S) {
|
||||
self.name.encode(s);
|
||||
self.src_hash.encode(s);
|
||||
self.start_pos.encode(s);
|
||||
self.end_pos.encode(s);
|
||||
// Do not encode `start_pos` as it's global state for this session.
|
||||
self.source_len.encode(s);
|
||||
|
||||
// We are always in `Lines` form by the time we reach here.
|
||||
assert!(self.lines.borrow().is_lines());
|
||||
@ -1422,7 +1417,7 @@ impl<S: Encoder> Encodable<S> for SourceFile {
|
||||
s.emit_u8(bytes_per_diff as u8);
|
||||
|
||||
// Encode the first element.
|
||||
lines[0].encode(s);
|
||||
assert_eq!(lines[0], RelativeBytePos(0));
|
||||
|
||||
// Encode the difference list.
|
||||
let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
|
||||
@ -1465,26 +1460,17 @@ impl<D: Decoder> Decodable<D> for SourceFile {
|
||||
fn decode(d: &mut D) -> SourceFile {
|
||||
let name: FileName = Decodable::decode(d);
|
||||
let src_hash: SourceFileHash = Decodable::decode(d);
|
||||
let start_pos: BytePos = Decodable::decode(d);
|
||||
let end_pos: BytePos = Decodable::decode(d);
|
||||
let source_len: RelativeBytePos = Decodable::decode(d);
|
||||
let lines = {
|
||||
let num_lines: u32 = Decodable::decode(d);
|
||||
if num_lines > 0 {
|
||||
// Read the number of bytes used per diff.
|
||||
let bytes_per_diff = d.read_u8() as usize;
|
||||
|
||||
// Read the first element.
|
||||
let line_start: BytePos = Decodable::decode(d);
|
||||
|
||||
// Read the difference list.
|
||||
let num_diffs = num_lines as usize - 1;
|
||||
let raw_diffs = d.read_raw_bytes(bytes_per_diff * num_diffs).to_vec();
|
||||
SourceFileLines::Diffs(SourceFileDiffs {
|
||||
line_start,
|
||||
bytes_per_diff,
|
||||
num_diffs,
|
||||
raw_diffs,
|
||||
})
|
||||
SourceFileLines::Diffs(SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs })
|
||||
} else {
|
||||
SourceFileLines::Lines(vec![])
|
||||
}
|
||||
@ -1496,8 +1482,8 @@ impl<D: Decoder> Decodable<D> for SourceFile {
|
||||
let cnum: CrateNum = Decodable::decode(d);
|
||||
SourceFile {
|
||||
name,
|
||||
start_pos,
|
||||
end_pos,
|
||||
start_pos: BytePos::from_u32(0),
|
||||
source_len,
|
||||
src: None,
|
||||
src_hash,
|
||||
// Unused - the metadata decoder will construct
|
||||
@ -1523,63 +1509,58 @@ impl SourceFile {
|
||||
pub fn new(
|
||||
name: FileName,
|
||||
mut src: String,
|
||||
start_pos: BytePos,
|
||||
hash_kind: SourceFileHashAlgorithm,
|
||||
) -> Self {
|
||||
) -> Result<Self, OffsetOverflowError> {
|
||||
// Compute the file hash before any normalization.
|
||||
let src_hash = SourceFileHash::new(hash_kind, &src);
|
||||
let normalized_pos = normalize_src(&mut src, start_pos);
|
||||
let normalized_pos = normalize_src(&mut src);
|
||||
|
||||
let name_hash = {
|
||||
let mut hasher: StableHasher = StableHasher::new();
|
||||
name.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
};
|
||||
let end_pos = start_pos.to_usize() + src.len();
|
||||
assert!(end_pos <= u32::MAX as usize);
|
||||
let source_len = src.len();
|
||||
let source_len = u32::try_from(source_len).map_err(|_| OffsetOverflowError)?;
|
||||
|
||||
let (lines, multibyte_chars, non_narrow_chars) =
|
||||
analyze_source_file::analyze_source_file(&src, start_pos);
|
||||
analyze_source_file::analyze_source_file(&src);
|
||||
|
||||
SourceFile {
|
||||
Ok(SourceFile {
|
||||
name,
|
||||
src: Some(Lrc::new(src)),
|
||||
src_hash,
|
||||
external_src: Lock::new(ExternalSource::Unneeded),
|
||||
start_pos,
|
||||
end_pos: Pos::from_usize(end_pos),
|
||||
start_pos: BytePos::from_u32(0),
|
||||
source_len: RelativeBytePos::from_u32(source_len),
|
||||
lines: Lock::new(SourceFileLines::Lines(lines)),
|
||||
multibyte_chars,
|
||||
non_narrow_chars,
|
||||
normalized_pos,
|
||||
name_hash,
|
||||
cnum: LOCAL_CRATE,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn lines<F, R>(&self, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&[BytePos]) -> R,
|
||||
F: FnOnce(&[RelativeBytePos]) -> R,
|
||||
{
|
||||
let mut guard = self.lines.borrow_mut();
|
||||
match &*guard {
|
||||
SourceFileLines::Lines(lines) => f(lines),
|
||||
SourceFileLines::Diffs(SourceFileDiffs {
|
||||
mut line_start,
|
||||
bytes_per_diff,
|
||||
num_diffs,
|
||||
raw_diffs,
|
||||
}) => {
|
||||
SourceFileLines::Diffs(SourceFileDiffs { bytes_per_diff, num_diffs, raw_diffs }) => {
|
||||
// Convert from "diffs" form to "lines" form.
|
||||
let num_lines = num_diffs + 1;
|
||||
let mut lines = Vec::with_capacity(num_lines);
|
||||
let mut line_start = RelativeBytePos(0);
|
||||
lines.push(line_start);
|
||||
|
||||
assert_eq!(*num_diffs, raw_diffs.len() / bytes_per_diff);
|
||||
match bytes_per_diff {
|
||||
1 => {
|
||||
lines.extend(raw_diffs.into_iter().map(|&diff| {
|
||||
line_start = line_start + BytePos(diff as u32);
|
||||
line_start = line_start + RelativeBytePos(diff as u32);
|
||||
line_start
|
||||
}));
|
||||
}
|
||||
@ -1588,7 +1569,7 @@ impl SourceFile {
|
||||
let pos = bytes_per_diff * i;
|
||||
let bytes = [raw_diffs[pos], raw_diffs[pos + 1]];
|
||||
let diff = u16::from_le_bytes(bytes);
|
||||
line_start = line_start + BytePos(diff as u32);
|
||||
line_start = line_start + RelativeBytePos(diff as u32);
|
||||
line_start
|
||||
}));
|
||||
}
|
||||
@ -1602,7 +1583,7 @@ impl SourceFile {
|
||||
raw_diffs[pos + 3],
|
||||
];
|
||||
let diff = u32::from_le_bytes(bytes);
|
||||
line_start = line_start + BytePos(diff);
|
||||
line_start = line_start + RelativeBytePos(diff);
|
||||
line_start
|
||||
}));
|
||||
}
|
||||
@ -1617,8 +1598,10 @@ impl SourceFile {
|
||||
|
||||
/// Returns the `BytePos` of the beginning of the current line.
|
||||
pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
|
||||
let pos = self.relative_position(pos);
|
||||
let line_index = self.lookup_line(pos).unwrap();
|
||||
self.lines(|lines| lines[line_index])
|
||||
let line_start_pos = self.lines(|lines| lines[line_index]);
|
||||
self.absolute_position(line_start_pos)
|
||||
}
|
||||
|
||||
/// Add externally loaded source.
|
||||
@ -1643,7 +1626,7 @@ impl SourceFile {
|
||||
if let Some(mut src) = src {
|
||||
// The src_hash needs to be computed on the pre-normalized src.
|
||||
if self.src_hash.matches(&src) {
|
||||
normalize_src(&mut src, BytePos::from_usize(0));
|
||||
normalize_src(&mut src);
|
||||
*src_kind = ExternalSourceKind::Present(Lrc::new(src));
|
||||
return true;
|
||||
}
|
||||
@ -1676,8 +1659,7 @@ impl SourceFile {
|
||||
|
||||
let begin = {
|
||||
let line = self.lines(|lines| lines.get(line_number).copied())?;
|
||||
let begin: BytePos = line - self.start_pos;
|
||||
begin.to_usize()
|
||||
line.to_usize()
|
||||
};
|
||||
|
||||
if let Some(ref src) = self.src {
|
||||
@ -1703,25 +1685,41 @@ impl SourceFile {
|
||||
self.lines(|lines| lines.len())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn absolute_position(&self, pos: RelativeBytePos) -> BytePos {
|
||||
BytePos::from_u32(pos.to_u32() + self.start_pos.to_u32())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn relative_position(&self, pos: BytePos) -> RelativeBytePos {
|
||||
RelativeBytePos::from_u32(pos.to_u32() - self.start_pos.to_u32())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn end_position(&self) -> BytePos {
|
||||
self.absolute_position(self.source_len)
|
||||
}
|
||||
|
||||
/// Finds the line containing the given position. The return value is the
|
||||
/// index into the `lines` array of this `SourceFile`, not the 1-based line
|
||||
/// number. If the source_file is empty or the position is located before the
|
||||
/// first line, `None` is returned.
|
||||
pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
|
||||
pub fn lookup_line(&self, pos: RelativeBytePos) -> Option<usize> {
|
||||
self.lines(|lines| lines.partition_point(|x| x <= &pos).checked_sub(1))
|
||||
}
|
||||
|
||||
pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> {
|
||||
if self.is_empty() {
|
||||
return self.start_pos..self.end_pos;
|
||||
return self.start_pos..self.start_pos;
|
||||
}
|
||||
|
||||
self.lines(|lines| {
|
||||
assert!(line_index < lines.len());
|
||||
if line_index == (lines.len() - 1) {
|
||||
lines[line_index]..self.end_pos
|
||||
self.absolute_position(lines[line_index])..self.end_position()
|
||||
} else {
|
||||
lines[line_index]..lines[line_index + 1]
|
||||
self.absolute_position(lines[line_index])
|
||||
..self.absolute_position(lines[line_index + 1])
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -1732,17 +1730,19 @@ impl SourceFile {
|
||||
/// returns true still contain one byte position according to this function.
|
||||
#[inline]
|
||||
pub fn contains(&self, byte_pos: BytePos) -> bool {
|
||||
byte_pos >= self.start_pos && byte_pos <= self.end_pos
|
||||
byte_pos >= self.start_pos && byte_pos <= self.end_position()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.start_pos == self.end_pos
|
||||
self.source_len.to_u32() == 0
|
||||
}
|
||||
|
||||
/// Calculates the original byte position relative to the start of the file
|
||||
/// based on the given byte position.
|
||||
pub fn original_relative_byte_pos(&self, pos: BytePos) -> BytePos {
|
||||
pub fn original_relative_byte_pos(&self, pos: BytePos) -> RelativeBytePos {
|
||||
let pos = self.relative_position(pos);
|
||||
|
||||
// Diff before any records is 0. Otherwise use the previously recorded
|
||||
// diff as that applies to the following characters until a new diff
|
||||
// is recorded.
|
||||
@ -1752,7 +1752,7 @@ impl SourceFile {
|
||||
Err(i) => self.normalized_pos[i - 1].diff,
|
||||
};
|
||||
|
||||
BytePos::from_u32(pos.0 - self.start_pos.0 + diff)
|
||||
RelativeBytePos::from_u32(pos.0 + diff)
|
||||
}
|
||||
|
||||
/// Calculates a normalized byte position from a byte offset relative to the
|
||||
@ -1777,8 +1777,8 @@ impl SourceFile {
|
||||
BytePos::from_u32(self.start_pos.0 + offset - diff)
|
||||
}
|
||||
|
||||
/// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`.
|
||||
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
|
||||
/// Converts an relative `RelativeBytePos` to a `CharPos` relative to the `SourceFile`.
|
||||
fn bytepos_to_file_charpos(&self, bpos: RelativeBytePos) -> CharPos {
|
||||
// The number of extra bytes due to multibyte chars in the `SourceFile`.
|
||||
let mut total_extra_bytes = 0;
|
||||
|
||||
@ -1796,13 +1796,13 @@ impl SourceFile {
|
||||
}
|
||||
}
|
||||
|
||||
assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
|
||||
CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize)
|
||||
assert!(total_extra_bytes <= bpos.to_u32());
|
||||
CharPos(bpos.to_usize() - total_extra_bytes as usize)
|
||||
}
|
||||
|
||||
/// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a
|
||||
/// given `BytePos`.
|
||||
pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) {
|
||||
/// given `RelativeBytePos`.
|
||||
fn lookup_file_pos(&self, pos: RelativeBytePos) -> (usize, CharPos) {
|
||||
let chpos = self.bytepos_to_file_charpos(pos);
|
||||
match self.lookup_line(pos) {
|
||||
Some(a) => {
|
||||
@ -1823,6 +1823,7 @@ impl SourceFile {
|
||||
/// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based)
|
||||
/// column offset when displayed, for a given `BytePos`.
|
||||
pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) {
|
||||
let pos = self.relative_position(pos);
|
||||
let (line, col_or_chpos) = self.lookup_file_pos(pos);
|
||||
if line > 0 {
|
||||
let col = col_or_chpos;
|
||||
@ -1861,16 +1862,10 @@ impl SourceFile {
|
||||
}
|
||||
|
||||
/// Normalizes the source code and records the normalizations.
|
||||
fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec<NormalizedPos> {
|
||||
fn normalize_src(src: &mut String) -> Vec<NormalizedPos> {
|
||||
let mut normalized_pos = vec![];
|
||||
remove_bom(src, &mut normalized_pos);
|
||||
normalize_newlines(src, &mut normalized_pos);
|
||||
|
||||
// Offset all the positions by start_pos to match the final file positions.
|
||||
for np in &mut normalized_pos {
|
||||
np.pos.0 += start_pos.0;
|
||||
}
|
||||
|
||||
normalized_pos
|
||||
}
|
||||
|
||||
@ -1878,7 +1873,7 @@ fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec<NormalizedPos> {
|
||||
fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) {
|
||||
if src.starts_with('\u{feff}') {
|
||||
src.drain(..3);
|
||||
normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 });
|
||||
normalized_pos.push(NormalizedPos { pos: RelativeBytePos(0), diff: 3 });
|
||||
}
|
||||
}
|
||||
|
||||
@ -1913,7 +1908,7 @@ fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>)
|
||||
cursor += idx - gap_len;
|
||||
gap_len += 1;
|
||||
normalized_pos.push(NormalizedPos {
|
||||
pos: BytePos::from_usize(cursor + 1),
|
||||
pos: RelativeBytePos::from_usize(cursor + 1),
|
||||
diff: original_gap + gap_len as u32,
|
||||
});
|
||||
}
|
||||
@ -2015,6 +2010,10 @@ impl_pos! {
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
||||
pub struct BytePos(pub u32);
|
||||
|
||||
/// A byte offset relative to file beginning.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
||||
pub struct RelativeBytePos(pub u32);
|
||||
|
||||
/// A character offset.
|
||||
///
|
||||
/// Because of multibyte UTF-8 characters, a byte offset
|
||||
@ -2036,6 +2035,24 @@ impl<D: Decoder> Decodable<D> for BytePos {
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: HashStableContext> HashStable<H> for RelativeBytePos {
|
||||
fn hash_stable(&self, hcx: &mut H, hasher: &mut StableHasher) {
|
||||
self.0.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Encoder> Encodable<S> for RelativeBytePos {
|
||||
fn encode(&self, s: &mut S) {
|
||||
s.emit_u32(self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: Decoder> Decodable<D> for RelativeBytePos {
|
||||
fn decode(d: &mut D) -> RelativeBytePos {
|
||||
RelativeBytePos(d.read_u32())
|
||||
}
|
||||
}
|
||||
|
||||
// _____________________________________________________________________________
|
||||
// Loc, SourceFileAndLine, SourceFileAndBytePos
|
||||
//
|
||||
|
@ -14,13 +14,10 @@ pub use crate::*;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::{Hash128, Hash64, StableHasher};
|
||||
use rustc_data_structures::sync::{
|
||||
AtomicU32, IntoDynSyncSend, Lrc, MappedReadGuard, ReadGuard, RwLock,
|
||||
};
|
||||
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc, MappedReadGuard, ReadGuard, RwLock};
|
||||
use std::cmp;
|
||||
use std::hash::Hash;
|
||||
use std::path::{self, Path, PathBuf};
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use std::fs;
|
||||
use std::io;
|
||||
@ -187,9 +184,6 @@ pub(super) struct SourceMapFiles {
|
||||
}
|
||||
|
||||
pub struct SourceMap {
|
||||
/// The address space below this value is currently used by the files in the source map.
|
||||
used_address_space: AtomicU32,
|
||||
|
||||
files: RwLock<SourceMapFiles>,
|
||||
file_loader: IntoDynSyncSend<Box<dyn FileLoader + Sync + Send>>,
|
||||
// This is used to apply the file path remapping as specified via
|
||||
@ -215,7 +209,6 @@ impl SourceMap {
|
||||
hash_kind: SourceFileHashAlgorithm,
|
||||
) -> SourceMap {
|
||||
SourceMap {
|
||||
used_address_space: AtomicU32::new(0),
|
||||
files: Default::default(),
|
||||
file_loader: IntoDynSyncSend(file_loader),
|
||||
path_mapping,
|
||||
@ -267,26 +260,26 @@ impl SourceMap {
|
||||
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
|
||||
}
|
||||
|
||||
fn allocate_address_space(&self, size: usize) -> Result<usize, OffsetOverflowError> {
|
||||
let size = u32::try_from(size).map_err(|_| OffsetOverflowError)?;
|
||||
fn register_source_file(
|
||||
&self,
|
||||
file_id: StableSourceFileId,
|
||||
mut file: SourceFile,
|
||||
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
||||
let mut files = self.files.borrow_mut();
|
||||
|
||||
loop {
|
||||
let current = self.used_address_space.load(Ordering::Relaxed);
|
||||
let next = current
|
||||
.checked_add(size)
|
||||
// Add one so there is some space between files. This lets us distinguish
|
||||
// positions in the `SourceMap`, even in the presence of zero-length files.
|
||||
.and_then(|next| next.checked_add(1))
|
||||
.ok_or(OffsetOverflowError)?;
|
||||
file.start_pos = BytePos(if let Some(last_file) = files.source_files.last() {
|
||||
// Add one so there is some space between files. This lets us distinguish
|
||||
// positions in the `SourceMap`, even in the presence of zero-length files.
|
||||
last_file.end_position().0.checked_add(1).ok_or(OffsetOverflowError)?
|
||||
} else {
|
||||
0
|
||||
});
|
||||
|
||||
if self
|
||||
.used_address_space
|
||||
.compare_exchange(current, next, Ordering::Relaxed, Ordering::Relaxed)
|
||||
.is_ok()
|
||||
{
|
||||
return Ok(usize::try_from(current).unwrap());
|
||||
}
|
||||
}
|
||||
let file = Lrc::new(file);
|
||||
files.source_files.push(file.clone());
|
||||
files.stable_id_to_source_file.insert(file_id, file.clone());
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
/// Creates a new `SourceFile`.
|
||||
@ -310,32 +303,18 @@ impl SourceMap {
|
||||
let (filename, _) = self.path_mapping.map_filename_prefix(&filename);
|
||||
|
||||
let file_id = StableSourceFileId::new_from_name(&filename, LOCAL_CRATE);
|
||||
|
||||
let lrc_sf = match self.source_file_by_stable_id(file_id) {
|
||||
Some(lrc_sf) => lrc_sf,
|
||||
match self.source_file_by_stable_id(file_id) {
|
||||
Some(lrc_sf) => Ok(lrc_sf),
|
||||
None => {
|
||||
let start_pos = self.allocate_address_space(src.len())?;
|
||||
|
||||
let source_file = Lrc::new(SourceFile::new(
|
||||
filename,
|
||||
src,
|
||||
Pos::from_usize(start_pos),
|
||||
self.hash_kind,
|
||||
));
|
||||
let source_file = SourceFile::new(filename, src, self.hash_kind)?;
|
||||
|
||||
// Let's make sure the file_id we generated above actually matches
|
||||
// the ID we generate for the SourceFile we just created.
|
||||
debug_assert_eq!(StableSourceFileId::new(&source_file), file_id);
|
||||
|
||||
let mut files = self.files.borrow_mut();
|
||||
|
||||
files.source_files.push(source_file.clone());
|
||||
files.stable_id_to_source_file.insert(file_id, source_file.clone());
|
||||
|
||||
source_file
|
||||
self.register_source_file(file_id, source_file)
|
||||
}
|
||||
};
|
||||
Ok(lrc_sf)
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates a new `SourceFile` representing a source file from an external
|
||||
@ -347,53 +326,17 @@ impl SourceMap {
|
||||
filename: FileName,
|
||||
src_hash: SourceFileHash,
|
||||
name_hash: Hash128,
|
||||
source_len: usize,
|
||||
source_len: u32,
|
||||
cnum: CrateNum,
|
||||
file_local_lines: Lock<SourceFileLines>,
|
||||
mut file_local_multibyte_chars: Vec<MultiByteChar>,
|
||||
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
|
||||
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
||||
original_start_pos: BytePos,
|
||||
multibyte_chars: Vec<MultiByteChar>,
|
||||
non_narrow_chars: Vec<NonNarrowChar>,
|
||||
normalized_pos: Vec<NormalizedPos>,
|
||||
metadata_index: u32,
|
||||
) -> Lrc<SourceFile> {
|
||||
let start_pos = self
|
||||
.allocate_address_space(source_len)
|
||||
.expect("not enough address space for imported source file");
|
||||
let source_len = RelativeBytePos::from_u32(source_len);
|
||||
|
||||
let end_pos = Pos::from_usize(start_pos + source_len);
|
||||
let start_pos = Pos::from_usize(start_pos);
|
||||
|
||||
// Translate these positions into the new global frame of reference,
|
||||
// now that the offset of the SourceFile is known.
|
||||
//
|
||||
// These are all unsigned values. `original_start_pos` may be larger or
|
||||
// smaller than `start_pos`, but `pos` is always larger than both.
|
||||
// Therefore, `(pos - original_start_pos) + start_pos` won't overflow
|
||||
// but `start_pos - original_start_pos` might. So we use the former
|
||||
// form rather than pre-computing the offset into a local variable. The
|
||||
// compiler backend can optimize away the repeated computations in a
|
||||
// way that won't trigger overflow checks.
|
||||
match &mut *file_local_lines.borrow_mut() {
|
||||
SourceFileLines::Lines(lines) => {
|
||||
for pos in lines {
|
||||
*pos = (*pos - original_start_pos) + start_pos;
|
||||
}
|
||||
}
|
||||
SourceFileLines::Diffs(SourceFileDiffs { line_start, .. }) => {
|
||||
*line_start = (*line_start - original_start_pos) + start_pos;
|
||||
}
|
||||
}
|
||||
for mbc in &mut file_local_multibyte_chars {
|
||||
mbc.pos = (mbc.pos - original_start_pos) + start_pos;
|
||||
}
|
||||
for swc in &mut file_local_non_narrow_chars {
|
||||
*swc = (*swc - original_start_pos) + start_pos;
|
||||
}
|
||||
for nc in &mut file_local_normalized_pos {
|
||||
nc.pos = (nc.pos - original_start_pos) + start_pos;
|
||||
}
|
||||
|
||||
let source_file = Lrc::new(SourceFile {
|
||||
let source_file = SourceFile {
|
||||
name: filename,
|
||||
src: None,
|
||||
src_hash,
|
||||
@ -401,24 +344,19 @@ impl SourceMap {
|
||||
kind: ExternalSourceKind::AbsentOk,
|
||||
metadata_index,
|
||||
}),
|
||||
start_pos,
|
||||
end_pos,
|
||||
start_pos: BytePos(0),
|
||||
source_len,
|
||||
lines: file_local_lines,
|
||||
multibyte_chars: file_local_multibyte_chars,
|
||||
non_narrow_chars: file_local_non_narrow_chars,
|
||||
normalized_pos: file_local_normalized_pos,
|
||||
multibyte_chars,
|
||||
non_narrow_chars,
|
||||
normalized_pos,
|
||||
name_hash,
|
||||
cnum,
|
||||
});
|
||||
};
|
||||
|
||||
let mut files = self.files.borrow_mut();
|
||||
|
||||
files.source_files.push(source_file.clone());
|
||||
files
|
||||
.stable_id_to_source_file
|
||||
.insert(StableSourceFileId::new(&source_file), source_file.clone());
|
||||
|
||||
source_file
|
||||
let file_id = StableSourceFileId::new(&source_file);
|
||||
self.register_source_file(file_id, source_file)
|
||||
.expect("not enough address space for imported source file")
|
||||
}
|
||||
|
||||
/// If there is a doctest offset, applies it to the line.
|
||||
@ -452,6 +390,7 @@ impl SourceMap {
|
||||
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
|
||||
let f = self.lookup_source_file(pos);
|
||||
|
||||
let pos = f.relative_position(pos);
|
||||
match f.lookup_line(pos) {
|
||||
Some(line) => Ok(SourceFileAndLine { sf: f, line }),
|
||||
None => Err(f),
|
||||
@ -547,7 +486,9 @@ impl SourceMap {
|
||||
return true;
|
||||
}
|
||||
let f = (*self.files.borrow().source_files)[lo].clone();
|
||||
f.lookup_line(sp.lo()) != f.lookup_line(sp.hi())
|
||||
let lo = f.relative_position(sp.lo());
|
||||
let hi = f.relative_position(sp.hi());
|
||||
f.lookup_line(lo) != f.lookup_line(hi)
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "trace")]
|
||||
@ -627,7 +568,7 @@ impl SourceMap {
|
||||
|
||||
let start_index = local_begin.pos.to_usize();
|
||||
let end_index = local_end.pos.to_usize();
|
||||
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
||||
let source_len = local_begin.sf.source_len.to_usize();
|
||||
|
||||
if start_index > end_index || end_index > source_len {
|
||||
return Err(SpanSnippetError::MalformedForSourcemap(MalformedSourceMapPositions {
|
||||
@ -1034,7 +975,7 @@ impl SourceMap {
|
||||
return 1;
|
||||
}
|
||||
|
||||
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
||||
let source_len = local_begin.sf.source_len.to_usize();
|
||||
debug!("source_len=`{:?}`", source_len);
|
||||
// Ensure indexes are also not malformed.
|
||||
if start_index > end_index || end_index > source_len - 1 {
|
||||
|
@ -50,6 +50,7 @@ impl SourceMap {
|
||||
fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
|
||||
let idx = self.lookup_source_file_idx(bpos);
|
||||
let sf = &(*self.files.borrow().source_files)[idx];
|
||||
let bpos = sf.relative_position(bpos);
|
||||
sf.bytepos_to_file_charpos(bpos)
|
||||
}
|
||||
}
|
||||
@ -230,8 +231,7 @@ fn t10() {
|
||||
let SourceFile {
|
||||
name,
|
||||
src_hash,
|
||||
start_pos,
|
||||
end_pos,
|
||||
source_len,
|
||||
lines,
|
||||
multibyte_chars,
|
||||
non_narrow_chars,
|
||||
@ -244,13 +244,12 @@ fn t10() {
|
||||
name,
|
||||
src_hash,
|
||||
name_hash,
|
||||
(end_pos - start_pos).to_usize(),
|
||||
source_len.to_u32(),
|
||||
CrateNum::new(0),
|
||||
lines,
|
||||
multibyte_chars,
|
||||
non_narrow_chars,
|
||||
normalized_pos,
|
||||
start_pos,
|
||||
0,
|
||||
);
|
||||
|
||||
|
@ -3,24 +3,23 @@ use super::*;
|
||||
#[test]
|
||||
fn test_lookup_line() {
|
||||
let source = "abcdefghijklm\nabcdefghij\n...".to_owned();
|
||||
let sf = SourceFile::new(
|
||||
FileName::Anon(Hash64::ZERO),
|
||||
source,
|
||||
BytePos(3),
|
||||
SourceFileHashAlgorithm::Sha256,
|
||||
);
|
||||
sf.lines(|lines| assert_eq!(lines, &[BytePos(3), BytePos(17), BytePos(28)]));
|
||||
let mut sf =
|
||||
SourceFile::new(FileName::Anon(Hash64::ZERO), source, SourceFileHashAlgorithm::Sha256)
|
||||
.unwrap();
|
||||
sf.start_pos = BytePos(3);
|
||||
sf.lines(|lines| {
|
||||
assert_eq!(lines, &[RelativeBytePos(0), RelativeBytePos(14), RelativeBytePos(25)])
|
||||
});
|
||||
|
||||
assert_eq!(sf.lookup_line(BytePos(0)), None);
|
||||
assert_eq!(sf.lookup_line(BytePos(3)), Some(0));
|
||||
assert_eq!(sf.lookup_line(BytePos(4)), Some(0));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(0)), Some(0));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(1)), Some(0));
|
||||
|
||||
assert_eq!(sf.lookup_line(BytePos(16)), Some(0));
|
||||
assert_eq!(sf.lookup_line(BytePos(17)), Some(1));
|
||||
assert_eq!(sf.lookup_line(BytePos(18)), Some(1));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(13)), Some(0));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(14)), Some(1));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(15)), Some(1));
|
||||
|
||||
assert_eq!(sf.lookup_line(BytePos(28)), Some(2));
|
||||
assert_eq!(sf.lookup_line(BytePos(29)), Some(2));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(25)), Some(2));
|
||||
assert_eq!(sf.lookup_line(RelativeBytePos(26)), Some(2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -354,6 +354,12 @@ pub(in crate::solve) fn predicates_for_object_candidate<'tcx>(
|
||||
// FIXME(associated_const_equality): Also add associated consts to
|
||||
// the requirements here.
|
||||
if item.kind == ty::AssocKind::Type {
|
||||
// associated types that require `Self: Sized` do not show up in the built-in
|
||||
// implementation of `Trait for dyn Trait`, and can be dropped here.
|
||||
if tcx.generics_require_sized_self(item.def_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
requirements
|
||||
.extend(tcx.item_bounds(item.def_id).iter_instantiated(tcx, trait_ref.args));
|
||||
}
|
||||
|
@ -244,7 +244,21 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
// Finally we construct the actual value of the associated type.
|
||||
let term = match assoc_def.item.kind {
|
||||
ty::AssocKind::Type => tcx.type_of(assoc_def.item.def_id).map_bound(|ty| ty.into()),
|
||||
ty::AssocKind::Const => bug!("associated const projection is not supported yet"),
|
||||
ty::AssocKind::Const => {
|
||||
if tcx.features().associated_const_equality {
|
||||
bug!("associated const projection is not supported yet")
|
||||
} else {
|
||||
ty::EarlyBinder::bind(
|
||||
ty::Const::new_error_with_message(
|
||||
tcx,
|
||||
tcx.type_of(assoc_def.item.def_id).instantiate_identity(),
|
||||
DUMMY_SP,
|
||||
"associated const projection is not supported yet",
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
}
|
||||
ty::AssocKind::Fn => unreachable!("we should never project to a fn"),
|
||||
};
|
||||
|
||||
|
@ -2920,6 +2920,16 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||
rustc_transmute::Reason::DstIsTooBig => {
|
||||
format!("The size of `{src}` is smaller than the size of `{dst}`")
|
||||
}
|
||||
rustc_transmute::Reason::SrcSizeOverflow => {
|
||||
format!(
|
||||
"values of the type `{src}` are too big for the current architecture"
|
||||
)
|
||||
}
|
||||
rustc_transmute::Reason::DstSizeOverflow => {
|
||||
format!(
|
||||
"values of the type `{dst}` are too big for the current architecture"
|
||||
)
|
||||
}
|
||||
rustc_transmute::Reason::DstHasStricterAlignment {
|
||||
src_min_align,
|
||||
dst_min_align,
|
||||
|
@ -57,16 +57,12 @@ impl<'a, 'tcx: 'a> InferCtxtExt<'a, 'tcx> for InferCtxt<'tcx> {
|
||||
let ty = OpportunisticRegionResolver::new(self).fold_ty(ty);
|
||||
|
||||
// We do not expect existential variables in implied bounds.
|
||||
// We may however encounter unconstrained lifetime variables in invalid
|
||||
// code. See #110161 for context.
|
||||
// We may however encounter unconstrained lifetime variables
|
||||
// in very rare cases.
|
||||
//
|
||||
// See `ui/implied-bounds/implied-bounds-unconstrained-2.rs` for
|
||||
// an example.
|
||||
assert!(!ty.has_non_region_infer());
|
||||
if ty.has_infer() {
|
||||
self.tcx.sess.delay_span_bug(
|
||||
self.tcx.def_span(body_id),
|
||||
"skipped implied_outlives_bounds due to unconstrained lifetimes",
|
||||
);
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let mut canonical_var_values = OriginalQueryValues::default();
|
||||
let canonical_ty =
|
||||
|
@ -535,6 +535,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
let assoc_types: Vec<_> = tcx
|
||||
.associated_items(trait_predicate.def_id())
|
||||
.in_definition_order()
|
||||
// Associated types that require `Self: Sized` do not show up in the built-in
|
||||
// implementation of `Trait for dyn Trait`, and can be dropped here.
|
||||
.filter(|item| !tcx.generics_require_sized_self(item.def_id))
|
||||
.filter_map(
|
||||
|item| if item.kind == ty::AssocKind::Type { Some(item.def_id) } else { None },
|
||||
)
|
||||
|
@ -8,9 +8,6 @@ tracing = "0.1"
|
||||
rustc_middle = { path = "../rustc_middle" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_hir = { path = "../rustc_hir" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
rustc_target = { path = "../rustc_target" }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
rustc_infer = { path = "../rustc_infer" }
|
||||
rustc_trait_selection = { path = "../rustc_trait_selection" }
|
||||
|
@ -189,6 +189,8 @@ pub(crate) mod rustc {
|
||||
Unspecified,
|
||||
/// This error will be surfaced elsewhere by rustc, so don't surface it.
|
||||
UnknownLayout,
|
||||
/// Overflow size
|
||||
SizeOverflow,
|
||||
TypeError(ErrorGuaranteed),
|
||||
}
|
||||
|
||||
@ -196,6 +198,7 @@ pub(crate) mod rustc {
|
||||
fn from(err: &LayoutError<'tcx>) -> Self {
|
||||
match err {
|
||||
LayoutError::Unknown(..) | LayoutError::ReferencesError(..) => Self::UnknownLayout,
|
||||
LayoutError::SizeOverflow(..) => Self::SizeOverflow,
|
||||
err => unimplemented!("{:?}", err),
|
||||
}
|
||||
}
|
||||
|
@ -64,6 +64,10 @@ pub enum Reason {
|
||||
SrcLayoutUnknown,
|
||||
/// The layout of dst is unknown
|
||||
DstLayoutUnknown,
|
||||
/// The size of src is overflow
|
||||
SrcSizeOverflow,
|
||||
/// The size of dst is overflow
|
||||
DstSizeOverflow,
|
||||
}
|
||||
|
||||
#[cfg(feature = "rustc")]
|
||||
|
@ -85,6 +85,8 @@ mod rustc {
|
||||
(_, Err(Err::UnknownLayout)) => Answer::No(Reason::DstLayoutUnknown),
|
||||
(Err(Err::Unspecified), _) => Answer::No(Reason::SrcIsUnspecified),
|
||||
(_, Err(Err::Unspecified)) => Answer::No(Reason::DstIsUnspecified),
|
||||
(Err(Err::SizeOverflow), _) => Answer::No(Reason::SrcSizeOverflow),
|
||||
(_, Err(Err::SizeOverflow)) => Answer::No(Reason::DstSizeOverflow),
|
||||
(Ok(src), Ok(dst)) => {
|
||||
MaybeTransmutableQuery { src, dst, scope, assume, context }.answer()
|
||||
}
|
||||
|
@ -753,6 +753,22 @@ impl Display for BorrowMutError {
|
||||
}
|
||||
}
|
||||
|
||||
// This ensures the panicking code is outlined from `borrow_mut` for `RefCell`.
|
||||
#[inline(never)]
|
||||
#[track_caller]
|
||||
#[cold]
|
||||
fn panic_already_borrowed(err: BorrowMutError) -> ! {
|
||||
panic!("already borrowed: {:?}", err)
|
||||
}
|
||||
|
||||
// This ensures the panicking code is outlined from `borrow` for `RefCell`.
|
||||
#[inline(never)]
|
||||
#[track_caller]
|
||||
#[cold]
|
||||
fn panic_already_mutably_borrowed(err: BorrowError) -> ! {
|
||||
panic!("already mutably borrowed: {:?}", err)
|
||||
}
|
||||
|
||||
// Positive values represent the number of `Ref` active. Negative values
|
||||
// represent the number of `RefMut` active. Multiple `RefMut`s can only be
|
||||
// active at a time if they refer to distinct, nonoverlapping components of a
|
||||
@ -934,7 +950,10 @@ impl<T: ?Sized> RefCell<T> {
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn borrow(&self) -> Ref<'_, T> {
|
||||
self.try_borrow().expect("already mutably borrowed")
|
||||
match self.try_borrow() {
|
||||
Ok(b) => b,
|
||||
Err(err) => panic_already_mutably_borrowed(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Immutably borrows the wrapped value, returning an error if the value is currently mutably
|
||||
@ -1027,7 +1046,10 @@ impl<T: ?Sized> RefCell<T> {
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn borrow_mut(&self) -> RefMut<'_, T> {
|
||||
self.try_borrow_mut().expect("already borrowed")
|
||||
match self.try_borrow_mut() {
|
||||
Ok(b) => b,
|
||||
Err(err) => panic_already_borrowed(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Mutably borrows the wrapped value, returning an error if the value is currently borrowed.
|
||||
|
@ -1130,7 +1130,10 @@ extern "rust-intrinsic" {
|
||||
/// may lead to unexpected and unstable compilation results. This makes `transmute` **incredibly
|
||||
/// unsafe**. `transmute` should be the absolute last resort.
|
||||
///
|
||||
/// Transmuting pointers to integers in a `const` context is [undefined behavior][ub].
|
||||
/// Transmuting pointers *to* integers in a `const` context is [undefined behavior][ub],
|
||||
/// unless the pointer was originally created *from* an integer.
|
||||
/// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::invalid],
|
||||
/// but also semantically-equivalent conversions such as punning through `repr(C)` union fields.)
|
||||
/// Any attempt to use the resulting value for integer operations will abort const-evaluation.
|
||||
/// (And even outside `const`, such transmutation is touching on many unspecified aspects of the
|
||||
/// Rust memory model and should be avoided. See below for alternatives.)
|
||||
@ -2704,9 +2707,13 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
|
||||
///
|
||||
/// Behavior is undefined if any of the following conditions are violated:
|
||||
///
|
||||
/// * `src` must be [valid] for reads of `count * size_of::<T>()` bytes.
|
||||
/// * `src` must be [valid] for reads of `count * size_of::<T>()` bytes, and must remain valid even
|
||||
/// when `dst` is written for `count * size_of::<T>()` bytes. (This means if the memory ranges
|
||||
/// overlap, the two pointers must not be subject to aliasing restrictions relative to each
|
||||
/// other.)
|
||||
///
|
||||
/// * `dst` must be [valid] for writes of `count * size_of::<T>()` bytes.
|
||||
/// * `dst` must be [valid] for writes of `count * size_of::<T>()` bytes, and must remain valid even
|
||||
/// when `src` is read for `count * size_of::<T>()` bytes.
|
||||
///
|
||||
/// * Both `src` and `dst` must be properly aligned.
|
||||
///
|
||||
|
@ -12,7 +12,8 @@
|
||||
//!
|
||||
//! Typical usage will look like this:
|
||||
//!
|
||||
//! ```rust
|
||||
#![cfg_attr(bootstrap, doc = "```rust,ignore")]
|
||||
#![cfg_attr(not(bootstrap), doc = "```rust")]
|
||||
//! #![feature(core_intrinsics, custom_mir)]
|
||||
//! #![allow(internal_features)]
|
||||
//!
|
||||
@ -62,7 +63,8 @@
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```rust
|
||||
#![cfg_attr(bootstrap, doc = "```rust,ignore")]
|
||||
#![cfg_attr(not(bootstrap), doc = "```rust")]
|
||||
//! #![feature(core_intrinsics, custom_mir)]
|
||||
//! #![allow(internal_features)]
|
||||
//!
|
||||
@ -317,7 +319,8 @@ define!(
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
#[cfg_attr(bootstrap, doc = "```rust,ignore")]
|
||||
#[cfg_attr(not(bootstrap), doc = "```rust")]
|
||||
/// #![allow(internal_features)]
|
||||
/// #![feature(custom_mir, core_intrinsics)]
|
||||
///
|
||||
@ -361,6 +364,11 @@ define!(
|
||||
#[doc(hidden)]
|
||||
fn __internal_make_place<T>(place: T) -> *mut T
|
||||
);
|
||||
define!(
|
||||
"mir_debuginfo",
|
||||
#[doc(hidden)]
|
||||
fn __debuginfo<T>(name: &'static str, s: T)
|
||||
);
|
||||
|
||||
/// Macro for generating custom MIR.
|
||||
///
|
||||
@ -371,6 +379,7 @@ pub macro mir {
|
||||
(
|
||||
$(type RET = $ret_ty:ty ;)?
|
||||
$(let $local_decl:ident $(: $local_decl_ty:ty)? ;)*
|
||||
$(debug $dbg_name:ident => $dbg_data:expr ;)*
|
||||
|
||||
{
|
||||
$($entry:tt)*
|
||||
@ -394,26 +403,32 @@ pub macro mir {
|
||||
$(
|
||||
let $local_decl $(: $local_decl_ty)? ;
|
||||
)*
|
||||
|
||||
::core::intrinsics::mir::__internal_extract_let!($($entry)*);
|
||||
$(
|
||||
::core::intrinsics::mir::__internal_extract_let!($($block)*);
|
||||
)*
|
||||
|
||||
{
|
||||
// Finally, the contents of the basic blocks
|
||||
::core::intrinsics::mir::__internal_remove_let!({
|
||||
{}
|
||||
{ $($entry)* }
|
||||
});
|
||||
// Now debuginfo
|
||||
$(
|
||||
::core::intrinsics::mir::__internal_remove_let!({
|
||||
{}
|
||||
{ $($block)* }
|
||||
});
|
||||
__debuginfo(stringify!($dbg_name), $dbg_data);
|
||||
)*
|
||||
|
||||
RET
|
||||
{
|
||||
// Finally, the contents of the basic blocks
|
||||
::core::intrinsics::mir::__internal_remove_let!({
|
||||
{}
|
||||
{ $($entry)* }
|
||||
});
|
||||
$(
|
||||
::core::intrinsics::mir::__internal_remove_let!({
|
||||
{}
|
||||
{ $($block)* }
|
||||
});
|
||||
)*
|
||||
|
||||
RET
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
|
@ -20,11 +20,19 @@
|
||||
// FIXME: Fill me in with more detail when the interface settles
|
||||
//! This library is built on the assumption of a few existing symbols:
|
||||
//!
|
||||
//! * `memcpy`, `memcmp`, `memset`, `strlen` - These are core memory routines which are
|
||||
//! often generated by LLVM. Additionally, this library can make explicit
|
||||
//! calls to these functions. Their signatures are the same as found in C.
|
||||
//! These functions are often provided by the system libc, but can also be
|
||||
//! provided by the [compiler-builtins crate](https://crates.io/crates/compiler_builtins).
|
||||
//! * `memcpy`, `memmove`, `memset`, `memcmp`, `bcmp`, `strlen` - These are core memory routines
|
||||
//! which are generated by Rust codegen backends. Additionally, this library can make explicit
|
||||
//! calls to `strlen`. Their signatures are the same as found in C, but there are extra
|
||||
//! assumptions about their semantics: For `memcpy`, `memmove`, `memset`, `memcmp`, and `bcmp`, if
|
||||
//! the `n` parameter is 0, the function is assumed to not be UB. Furthermore, for `memcpy`, if
|
||||
//! source and target pointer are equal, the function is assumed to not be UB.
|
||||
//! (Note that these are [standard assumptions](https://reviews.llvm.org/D86993) among compilers.)
|
||||
//! These functions are often provided by the system libc, but can also be provided by the
|
||||
//! [compiler-builtins crate](https://crates.io/crates/compiler_builtins).
|
||||
//! Note that the library does not guarantee that it will always make these assumptions, so Rust
|
||||
//! user code directly calling the C functions should follow the C specification! The advice for
|
||||
//! Rust user code is to call the functions provided by this library instead (such as
|
||||
//! `ptr::copy`).
|
||||
//!
|
||||
//! * `rust_begin_panic` - This function takes four arguments, a
|
||||
//! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments
|
||||
|
@ -8,6 +8,17 @@ pub use fpu_precision::set_precision;
|
||||
// round to 80 bits causing double rounding to happen when values are eventually represented as
|
||||
// 32/64 bit float values. To overcome this, the FPU control word can be set so that the
|
||||
// computations are performed in the desired precision.
|
||||
//
|
||||
// Note that normally, it is Undefined Behavior to alter the FPU control word while Rust code runs.
|
||||
// The compiler assumes that the control word is always in its default state. However, in this
|
||||
// particular case the semantics with the altered control word are actually *more faithful*
|
||||
// to Rust semantics than the default -- arguably it is all the code that runs *outside* of the scope
|
||||
// of a `set_precision` guard that is wrong.
|
||||
// In other words, we are only using this to work around <https://github.com/rust-lang/rust/issues/114479>.
|
||||
// Sometimes killing UB with UB actually works...
|
||||
// (If this is used to set 32bit precision, there is still a risk that the compiler moves some 64bit
|
||||
// operation into the scope of the `set_precision` guard. So it's not like this is totally sound.
|
||||
// But it's not really any less sound than the default state of 80bit precision...)
|
||||
#[cfg(all(target_arch = "x86", not(target_feature = "sse2")))]
|
||||
mod fpu_precision {
|
||||
use core::arch::asm;
|
||||
|
@ -51,6 +51,7 @@ impl Number {
|
||||
/// There is an exception: disguised fast-path cases, where we can shift
|
||||
/// powers-of-10 from the exponent to the significant digits.
|
||||
pub fn try_fast_path<F: RawFloat>(&self) -> Option<F> {
|
||||
// Here we need to work around <https://github.com/rust-lang/rust/issues/114479>.
|
||||
// The fast path crucially depends on arithmetic being rounded to the correct number of bits
|
||||
// without any intermediate rounding. On x86 (without SSE or SSE2) this requires the precision
|
||||
// of the x87 FPU stack to be changed so that it directly rounds to 64/32 bit.
|
||||
|
@ -11,7 +11,7 @@ use crate::hash::Hash;
|
||||
/// The `..` syntax is a `RangeFull`:
|
||||
///
|
||||
/// ```
|
||||
/// assert_eq!((..), std::ops::RangeFull);
|
||||
/// assert_eq!(.., std::ops::RangeFull);
|
||||
/// ```
|
||||
///
|
||||
/// It does not have an [`IntoIterator`] implementation, so you can't use it in
|
||||
|
@ -795,7 +795,9 @@ pub const fn slice_from_raw_parts_mut<T>(data: *mut T, len: usize) -> *mut [T] {
|
||||
///
|
||||
/// Behavior is undefined if any of the following conditions are violated:
|
||||
///
|
||||
/// * Both `x` and `y` must be [valid] for both reads and writes.
|
||||
/// * Both `x` and `y` must be [valid] for both reads and writes. They must remain valid even when the
|
||||
/// other pointer is written. (This means if the memory ranges overlap, the two pointers must not
|
||||
/// be subject to aliasing restrictions relative to each other.)
|
||||
///
|
||||
/// * Both `x` and `y` must be properly aligned.
|
||||
///
|
||||
|
@ -237,7 +237,7 @@ impl<W: ?Sized + Write> BufWriter<W> {
|
||||
));
|
||||
}
|
||||
Ok(n) => guard.consume(n),
|
||||
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{BorrowedBuf, BufReader, BufWriter, ErrorKind, Read, Result, Write, DEFAULT_BUF_SIZE};
|
||||
use super::{BorrowedBuf, BufReader, BufWriter, Read, Result, Write, DEFAULT_BUF_SIZE};
|
||||
use crate::alloc::Allocator;
|
||||
use crate::cmp;
|
||||
use crate::collections::VecDeque;
|
||||
@ -30,6 +30,7 @@ mod tests;
|
||||
///
|
||||
/// [`read`]: Read::read
|
||||
/// [`write`]: Write::write
|
||||
/// [`ErrorKind::Interrupted`]: crate::io::ErrorKind::Interrupted
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@ -163,7 +164,7 @@ where
|
||||
// from adding I: Read
|
||||
match self.read(&mut []) {
|
||||
Ok(_) => {}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.is_interrupted() => continue,
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
let buf = self.buffer();
|
||||
@ -243,7 +244,7 @@ impl<I: Write + ?Sized> BufferedWriterSpec for BufWriter<I> {
|
||||
// Read again if the buffer still has enough capacity, as BufWriter itself would do
|
||||
// This will occur if the reader returns short reads
|
||||
}
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
} else {
|
||||
@ -275,7 +276,7 @@ impl<A: Allocator> BufferedWriterSpec for Vec<u8, A> {
|
||||
let mut buf: BorrowedBuf<'_> = self.spare_capacity_mut().into();
|
||||
match reader.read_buf(buf.unfilled()) {
|
||||
Ok(()) => {}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.is_interrupted() => continue,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
@ -307,7 +308,7 @@ fn stack_buffer_copy<R: Read + ?Sized, W: Write + ?Sized>(
|
||||
loop {
|
||||
match reader.read_buf(buf.unfilled()) {
|
||||
Ok(()) => {}
|
||||
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
|
||||
Err(e) if e.is_interrupted() => continue,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
|
@ -102,7 +102,7 @@ enum ErrorData<C> {
|
||||
///
|
||||
/// [`into`]: Into::into
|
||||
#[unstable(feature = "raw_os_error_ty", issue = "107792")]
|
||||
pub type RawOsError = i32;
|
||||
pub type RawOsError = sys::RawOsError;
|
||||
|
||||
// `#[repr(align(4))]` is probably redundant, it should have that value or
|
||||
// higher already. We include it just because repr_bitpacked.rs's encoding
|
||||
|
@ -374,9 +374,6 @@ static_assert!((TAG_MASK + 1).is_power_of_two());
|
||||
static_assert!(align_of::<SimpleMessage>() >= TAG_MASK + 1);
|
||||
static_assert!(align_of::<Custom>() >= TAG_MASK + 1);
|
||||
|
||||
// `RawOsError` must be an alias for `i32`.
|
||||
const _: fn(RawOsError) -> i32 = |os| os;
|
||||
|
||||
static_assert!(@usize_eq: TAG_MASK & TAG_SIMPLE_MESSAGE, TAG_SIMPLE_MESSAGE);
|
||||
static_assert!(@usize_eq: TAG_MASK & TAG_CUSTOM, TAG_CUSTOM);
|
||||
static_assert!(@usize_eq: TAG_MASK & TAG_OS, TAG_OS);
|
||||
|
@ -1647,7 +1647,7 @@ pub trait Write {
|
||||
));
|
||||
}
|
||||
Ok(n) => IoSlice::advance_slices(&mut bufs, n),
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ pub trait FileExt {
|
||||
buf = &mut tmp[n..];
|
||||
offset += n as u64;
|
||||
}
|
||||
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
@ -258,7 +258,7 @@ pub trait FileExt {
|
||||
buf = &buf[n..];
|
||||
offset += n as u64
|
||||
}
|
||||
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ pub trait FileExt {
|
||||
buf = &mut tmp[n..];
|
||||
offset += n as u64;
|
||||
}
|
||||
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
@ -162,7 +162,7 @@ pub trait FileExt {
|
||||
buf = &buf[n..];
|
||||
offset += n as u64
|
||||
}
|
||||
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +200,7 @@ where
|
||||
{
|
||||
loop {
|
||||
match cvt(f()) {
|
||||
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
|
||||
Err(ref e) if e.is_interrupted() => {}
|
||||
other => return other,
|
||||
}
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ impl Socket {
|
||||
match unsafe { netc::poll(&mut pollfd, 1, timeout) } {
|
||||
-1 => {
|
||||
let err = io::Error::last_os_error();
|
||||
if err.kind() != io::ErrorKind::Interrupted {
|
||||
if !err.is_interrupted() {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
|
@ -110,3 +110,5 @@ pub fn log_wrapper<F: Fn(f64) -> f64>(n: f64, log_fn: F) -> f64 {
|
||||
pub fn log_wrapper<F: Fn(f64) -> f64>(n: f64, log_fn: F) -> f64 {
|
||||
log_fn(n)
|
||||
}
|
||||
|
||||
pub type RawOsError = i32;
|
||||
|
@ -792,7 +792,7 @@ impl Drop for Dir {
|
||||
fn drop(&mut self) {
|
||||
let r = unsafe { libc::closedir(self.0) };
|
||||
assert!(
|
||||
r == 0 || crate::io::Error::last_os_error().kind() == crate::io::ErrorKind::Interrupted,
|
||||
r == 0 || crate::io::Error::last_os_error().is_interrupted(),
|
||||
"unexpected error during closedir: {:?}",
|
||||
crate::io::Error::last_os_error()
|
||||
);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user