Auto merge of #119648 - compiler-errors:rollup-42inxd8, r=compiler-errors

Rollup of 9 pull requests

Successful merges:

 - #119208 (coverage: Hoist some complex code out of the main span refinement loop)
 - #119216 (Use diagnostic namespace in stdlib)
 - #119414 (bootstrap: Move -Clto= setting from Rustc::run to rustc_cargo)
 - #119420 (Handle ForeignItem as TAIT scope.)
 - #119468 (rustdoc-search: tighter encoding for f index)
 - #119628 (remove duplicate test)
 - #119638 (fix cyle error when suggesting to use associated function instead of constructor)
 - #119640 (library: Fix warnings in rtstartup)
 - #119642 (library: Fix a symlink test failing on Windows)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-01-06 06:00:27 +00:00
commit aa7e9f21e9
37 changed files with 658 additions and 418 deletions

View File

@ -69,6 +69,7 @@ pub(super) fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: Local
Node::Item(it) => locator.visit_item(it),
Node::ImplItem(it) => locator.visit_impl_item(it),
Node::TraitItem(it) => locator.visit_trait_item(it),
Node::ForeignItem(it) => locator.visit_foreign_item(it),
other => bug!("{:?} is not a valid scope for an opaque type item", other),
}
}
@ -240,6 +241,12 @@ fn visit_trait_item(&mut self, it: &'tcx TraitItem<'tcx>) {
self.check(it.owner_id.def_id);
intravisit::walk_trait_item(self, it);
}
fn visit_foreign_item(&mut self, it: &'tcx hir::ForeignItem<'tcx>) {
trace!(?it.owner_id);
assert_ne!(it.owner_id.def_id, self.def_id);
// No need to call `check`, as we do not run borrowck on foreign items.
intravisit::walk_foreign_item(self, it);
}
}
pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>(

View File

@ -293,12 +293,16 @@ pub fn has_polymorphic_mir_body(&self) -> bool {
fn fmt_instance(
f: &mut fmt::Formatter<'_>,
instance: &Instance<'_>,
type_length: rustc_session::Limit,
type_length: Option<rustc_session::Limit>,
) -> fmt::Result {
ty::tls::with(|tcx| {
let args = tcx.lift(instance.args).expect("could not lift for printing");
let mut cx = FmtPrinter::new_with_limit(tcx, Namespace::ValueNS, type_length);
let mut cx = if let Some(type_length) = type_length {
FmtPrinter::new_with_limit(tcx, Namespace::ValueNS, type_length)
} else {
FmtPrinter::new(tcx, Namespace::ValueNS)
};
cx.print_def_path(instance.def_id(), args)?;
let s = cx.into_buffer();
f.write_str(&s)
@ -324,13 +328,13 @@ fn fmt_instance(
impl<'a, 'tcx> fmt::Display for ShortInstance<'a, 'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt_instance(f, self.0, rustc_session::Limit(self.1))
fmt_instance(f, self.0, Some(rustc_session::Limit(self.1)))
}
}
impl<'tcx> fmt::Display for Instance<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ty::tls::with(|tcx| fmt_instance(f, self, tcx.type_length_limit()))
fmt_instance(f, self, None)
}
}

View File

@ -1,11 +1,9 @@
use std::cell::OnceCell;
use rustc_data_structures::graph::WithNumNodes;
use rustc_index::IndexVec;
use rustc_middle::mir;
use rustc_span::{BytePos, ExpnKind, MacroKind, Span, Symbol, DUMMY_SP};
use rustc_span::{BytePos, Span, DUMMY_SP};
use super::graph::{BasicCoverageBlock, CoverageGraph, START_BCB};
use super::graph::{BasicCoverageBlock, CoverageGraph};
use crate::coverage::ExtractedHirInfo;
mod from_mir;
@ -70,35 +68,17 @@ pub(super) fn bcbs_with_coverage_spans(
/// `dominates()` the `BasicBlock`s in this `CoverageSpan`.
#[derive(Debug, Clone)]
struct CoverageSpan {
pub span: Span,
pub expn_span: Span,
pub current_macro_or_none: OnceCell<Option<Symbol>>,
pub bcb: BasicCoverageBlock,
span: Span,
bcb: BasicCoverageBlock,
/// List of all the original spans from MIR that have been merged into this
/// span. Mainly used to precisely skip over gaps when truncating a span.
pub merged_spans: Vec<Span>,
pub is_closure: bool,
merged_spans: Vec<Span>,
is_closure: bool,
}
impl CoverageSpan {
pub fn for_fn_sig(fn_sig_span: Span) -> Self {
Self::new(fn_sig_span, fn_sig_span, START_BCB, false)
}
pub(super) fn new(
span: Span,
expn_span: Span,
bcb: BasicCoverageBlock,
is_closure: bool,
) -> Self {
Self {
span,
expn_span,
current_macro_or_none: Default::default(),
bcb,
merged_spans: vec![span],
is_closure,
}
fn new(span: Span, bcb: BasicCoverageBlock, is_closure: bool) -> Self {
Self { span, bcb, merged_spans: vec![span], is_closure }
}
pub fn merge_from(&mut self, other: &Self) {
@ -123,37 +103,6 @@ pub fn is_mergeable(&self, other: &Self) -> bool {
pub fn is_in_same_bcb(&self, other: &Self) -> bool {
self.bcb == other.bcb
}
/// If the span is part of a macro, returns the macro name symbol.
pub fn current_macro(&self) -> Option<Symbol> {
self.current_macro_or_none
.get_or_init(|| {
if let ExpnKind::Macro(MacroKind::Bang, current_macro) =
self.expn_span.ctxt().outer_expn_data().kind
{
return Some(current_macro);
}
None
})
.map(|symbol| symbol)
}
/// If the span is part of a macro, and the macro is visible (expands directly to the given
/// body_span), returns the macro name symbol.
pub fn visible_macro(&self, body_span: Span) -> Option<Symbol> {
let current_macro = self.current_macro()?;
let parent_callsite = self.expn_span.parent_callsite()?;
// In addition to matching the context of the body span, the parent callsite
// must also be the source callsite, i.e. the parent must have no parent.
let is_visible_macro =
parent_callsite.parent_callsite().is_none() && parent_callsite.eq_ctxt(body_span);
is_visible_macro.then_some(current_macro)
}
pub fn is_macro_expansion(&self) -> bool {
self.current_macro().is_some()
}
}
/// Converts the initial set of `CoverageSpan`s (one per MIR `Statement` or `Terminator`) into a
@ -164,10 +113,6 @@ pub fn is_macro_expansion(&self) -> bool {
/// execution
/// * Carve out (leave uncovered) any span that will be counted by another MIR (notably, closures)
struct CoverageSpansGenerator<'a> {
/// A `Span` covering the function body of the MIR (typically from left curly brace to right
/// curly brace).
body_span: Span,
/// The BasicCoverageBlock Control Flow Graph (BCB CFG).
basic_coverage_blocks: &'a CoverageGraph,
@ -244,7 +189,6 @@ pub(super) fn generate_coverage_spans(
);
let coverage_spans = Self {
body_span: hir_info.body_span,
basic_coverage_blocks,
sorted_spans_iter: sorted_spans.into_iter(),
some_curr: None,
@ -266,7 +210,6 @@ fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
// span-processing steps don't make sense yet.
if self.some_prev.is_none() {
debug!(" initial span");
self.maybe_push_macro_name_span();
continue;
}
@ -278,7 +221,6 @@ fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
debug!(" same bcb (and neither is a closure), merge with prev={prev:?}");
let prev = self.take_prev();
self.curr_mut().merge_from(&prev);
self.maybe_push_macro_name_span();
// Note that curr.span may now differ from curr_original_span
} else if prev.span.hi() <= curr.span.lo() {
debug!(
@ -286,7 +228,6 @@ fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
);
let prev = self.take_prev();
self.refined_spans.push(prev);
self.maybe_push_macro_name_span();
} else if prev.is_closure {
// drop any equal or overlapping span (`curr`) and keep `prev` to test again in the
// next iter
@ -297,35 +238,11 @@ fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
} else if curr.is_closure {
self.carve_out_span_for_closure();
} else if self.prev_original_span == curr.span {
// Note that this compares the new (`curr`) span to `prev_original_span`.
// In this branch, the actual span byte range of `prev_original_span` is not
// important. What is important is knowing whether the new `curr` span was
// **originally** the same as the original span of `prev()`. The original spans
// reflect their original sort order, and for equal spans, conveys a partial
// ordering based on CFG dominator priority.
if prev.is_macro_expansion() && curr.is_macro_expansion() {
// Macros that expand to include branching (such as
// `assert_eq!()`, `assert_ne!()`, `info!()`, `debug!()`, or
// `trace!()`) typically generate callee spans with identical
// ranges (typically the full span of the macro) for all
// `BasicBlocks`. This makes it impossible to distinguish
// the condition (`if val1 != val2`) from the optional
// branched statements (such as the call to `panic!()` on
// assert failure). In this case it is better (or less
// worse) to drop the optional branch bcbs and keep the
// non-conditional statements, to count when reached.
debug!(
" curr and prev are part of a macro expansion, and curr has the same span \
as prev, but is in a different bcb. Drop curr and keep prev for next iter. \
prev={prev:?}",
);
self.take_curr(); // Discards curr.
} else {
self.update_pending_dups();
}
// `prev` and `curr` have the same span, or would have had the
// same span before `prev` was modified by other spans.
self.update_pending_dups();
} else {
self.cutoff_prev_at_overlapping_curr();
self.maybe_push_macro_name_span();
}
}
@ -360,41 +277,6 @@ fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
self.refined_spans
}
/// If `curr` is part of a new macro expansion, carve out and push a separate
/// span that ends just after the macro name and its subsequent `!`.
fn maybe_push_macro_name_span(&mut self) {
let curr = self.curr();
let Some(visible_macro) = curr.visible_macro(self.body_span) else { return };
if let Some(prev) = &self.some_prev
&& prev.expn_span.eq_ctxt(curr.expn_span)
{
return;
}
// The split point is relative to `curr_original_span`,
// because `curr.span` may have been merged with preceding spans.
let split_point_after_macro_bang = self.curr_original_span.lo()
+ BytePos(visible_macro.as_str().len() as u32)
+ BytePos(1); // add 1 for the `!`
debug_assert!(split_point_after_macro_bang <= curr.span.hi());
if split_point_after_macro_bang > curr.span.hi() {
// Something is wrong with the macro name span;
// return now to avoid emitting malformed mappings (e.g. #117788).
return;
}
let mut macro_name_cov = curr.clone();
macro_name_cov.span = macro_name_cov.span.with_hi(split_point_after_macro_bang);
self.curr_mut().span = curr.span.with_lo(split_point_after_macro_bang);
debug!(
" and curr starts a new macro expansion, so add a new span just for \
the macro `{visible_macro}!`, new span={macro_name_cov:?}",
);
self.refined_spans.push(macro_name_cov);
}
#[track_caller]
fn curr(&self) -> &CoverageSpan {
self.some_curr.as_ref().unwrap_or_else(|| bug!("some_curr is None (curr)"))

View File

@ -1,11 +1,14 @@
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::FxHashSet;
use rustc_middle::mir::{
self, AggregateKind, FakeReadCause, Rvalue, Statement, StatementKind, Terminator,
TerminatorKind,
};
use rustc_span::Span;
use rustc_span::{ExpnKind, MacroKind, Span, Symbol};
use crate::coverage::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph};
use crate::coverage::graph::{
BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB,
};
use crate::coverage::spans::CoverageSpan;
use crate::coverage::ExtractedHirInfo;
@ -15,26 +18,29 @@ pub(super) fn mir_to_initial_sorted_coverage_spans(
basic_coverage_blocks: &CoverageGraph,
) -> Vec<CoverageSpan> {
let &ExtractedHirInfo { is_async_fn, fn_sig_span, body_span, .. } = hir_info;
let mut initial_spans = vec![SpanFromMir::for_fn_sig(fn_sig_span)];
if is_async_fn {
// An async function desugars into a function that returns a future,
// with the user code wrapped in a closure. Any spans in the desugared
// outer function will be unhelpful, so just produce a single span
// associating the function signature with its entry BCB.
return vec![CoverageSpan::for_fn_sig(fn_sig_span)];
// outer function will be unhelpful, so just keep the signature span
// and ignore all of the spans in the MIR body.
} else {
for (bcb, bcb_data) in basic_coverage_blocks.iter_enumerated() {
initial_spans.extend(bcb_to_initial_coverage_spans(mir_body, body_span, bcb, bcb_data));
}
// If no spans were extracted from the body, discard the signature span.
// FIXME: This preserves existing behavior; consider getting rid of it.
if initial_spans.len() == 1 {
initial_spans.clear();
}
}
let mut initial_spans = Vec::with_capacity(mir_body.basic_blocks.len() * 2);
for (bcb, bcb_data) in basic_coverage_blocks.iter_enumerated() {
initial_spans.extend(bcb_to_initial_coverage_spans(mir_body, body_span, bcb, bcb_data));
}
if initial_spans.is_empty() {
// This can happen if, for example, the function is unreachable (contains only a
// `BasicBlock`(s) with an `Unreachable` terminator).
return initial_spans;
}
initial_spans.push(CoverageSpan::for_fn_sig(fn_sig_span));
initial_spans.sort_by(|a, b| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb));
remove_unwanted_macro_spans(&mut initial_spans);
split_visible_macro_spans(&mut initial_spans);
initial_spans.sort_by(|a, b| {
// First sort by span start.
@ -53,7 +59,62 @@ pub(super) fn mir_to_initial_sorted_coverage_spans(
.then_with(|| Ord::cmp(&a.is_closure, &b.is_closure).reverse())
});
initial_spans
initial_spans.into_iter().map(SpanFromMir::into_coverage_span).collect::<Vec<_>>()
}
/// Macros that expand into branches (e.g. `assert!`, `trace!`) tend to generate
/// multiple condition/consequent blocks that have the span of the whole macro
/// invocation, which is unhelpful. Keeping only the first such span seems to
/// give better mappings, so remove the others.
///
/// (The input spans should be sorted in BCB dominator order, so that the
/// retained "first" span is likely to dominate the others.)
fn remove_unwanted_macro_spans(initial_spans: &mut Vec<SpanFromMir>) {
let mut seen_macro_spans = FxHashSet::default();
initial_spans.retain(|covspan| {
// Ignore (retain) closure spans and non-macro-expansion spans.
if covspan.is_closure || covspan.visible_macro.is_none() {
return true;
}
// Retain only the first macro-expanded covspan with this span.
seen_macro_spans.insert(covspan.span)
});
}
/// When a span corresponds to a macro invocation that is visible from the
/// function body, split it into two parts. The first part covers just the
/// macro name plus `!`, and the second part covers the rest of the macro
/// invocation. This seems to give better results for code that uses macros.
fn split_visible_macro_spans(initial_spans: &mut Vec<SpanFromMir>) {
let mut extra_spans = vec![];
initial_spans.retain(|covspan| {
if covspan.is_closure {
return true;
}
let Some(visible_macro) = covspan.visible_macro else { return true };
let split_len = visible_macro.as_str().len() as u32 + 1;
let (before, after) = covspan.span.split_at(split_len);
if !covspan.span.contains(before) || !covspan.span.contains(after) {
// Something is unexpectedly wrong with the split point.
// The debug assertion in `split_at` will have already caught this,
// but in release builds it's safer to do nothing and maybe get a
// bug report for unexpected coverage, rather than risk an ICE.
return true;
}
assert!(!covspan.is_closure);
extra_spans.push(SpanFromMir::new(before, covspan.visible_macro, covspan.bcb, false));
extra_spans.push(SpanFromMir::new(after, covspan.visible_macro, covspan.bcb, false));
false // Discard the original covspan that we just split.
});
// The newly-split spans are added at the end, so any previous sorting
// is not preserved.
initial_spans.extend(extra_spans);
}
// Generate a set of `CoverageSpan`s from the filtered set of `Statement`s and `Terminator`s of
@ -66,22 +127,24 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>(
body_span: Span,
bcb: BasicCoverageBlock,
bcb_data: &'a BasicCoverageBlockData,
) -> impl Iterator<Item = CoverageSpan> + Captures<'a> + Captures<'tcx> {
) -> impl Iterator<Item = SpanFromMir> + Captures<'a> + Captures<'tcx> {
bcb_data.basic_blocks.iter().flat_map(move |&bb| {
let data = &mir_body[bb];
let statement_spans = data.statements.iter().filter_map(move |statement| {
let expn_span = filtered_statement_span(statement)?;
let span = unexpand_into_body_span(expn_span, body_span)?;
let (span, visible_macro) =
unexpand_into_body_span_with_visible_macro(expn_span, body_span)?;
Some(CoverageSpan::new(span, expn_span, bcb, is_closure_or_coroutine(statement)))
Some(SpanFromMir::new(span, visible_macro, bcb, is_closure_or_coroutine(statement)))
});
let terminator_span = Some(data.terminator()).into_iter().filter_map(move |terminator| {
let expn_span = filtered_terminator_span(terminator)?;
let span = unexpand_into_body_span(expn_span, body_span)?;
let (span, visible_macro) =
unexpand_into_body_span_with_visible_macro(expn_span, body_span)?;
Some(CoverageSpan::new(span, expn_span, bcb, false))
Some(SpanFromMir::new(span, visible_macro, bcb, false))
});
statement_spans.chain(terminator_span)
@ -202,7 +265,83 @@ fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option<Span> {
///
/// [^1]Expansions result from Rust syntax including macros, syntactic sugar,
/// etc.).
#[inline]
fn unexpand_into_body_span(span: Span, body_span: Span) -> Option<Span> {
span.find_ancestor_inside_same_ctxt(body_span)
fn unexpand_into_body_span_with_visible_macro(
original_span: Span,
body_span: Span,
) -> Option<(Span, Option<Symbol>)> {
let (span, prev) = unexpand_into_body_span_with_prev(original_span, body_span)?;
let visible_macro = prev
.map(|prev| match prev.ctxt().outer_expn_data().kind {
ExpnKind::Macro(MacroKind::Bang, name) => Some(name),
_ => None,
})
.flatten();
Some((span, visible_macro))
}
/// Walks through the expansion ancestors of `original_span` to find a span that
/// is contained in `body_span` and has the same [`SyntaxContext`] as `body_span`.
/// The ancestor that was traversed just before the matching span (if any) is
/// also returned.
///
/// For example, a return value of `Some((ancestor, Some(prev))` means that:
/// - `ancestor == original_span.find_ancestor_inside_same_ctxt(body_span)`
/// - `ancestor == prev.parent_callsite()`
///
/// [`SyntaxContext`]: rustc_span::SyntaxContext
fn unexpand_into_body_span_with_prev(
original_span: Span,
body_span: Span,
) -> Option<(Span, Option<Span>)> {
let mut prev = None;
let mut curr = original_span;
while !body_span.contains(curr) || !curr.eq_ctxt(body_span) {
prev = Some(curr);
curr = curr.parent_callsite()?;
}
debug_assert_eq!(Some(curr), original_span.find_ancestor_in_same_ctxt(body_span));
if let Some(prev) = prev {
debug_assert_eq!(Some(curr), prev.parent_callsite());
}
Some((curr, prev))
}
#[derive(Debug)]
struct SpanFromMir {
/// A span that has been extracted from MIR and then "un-expanded" back to
/// within the current function's `body_span`. After various intermediate
/// processing steps, this span is emitted as part of the final coverage
/// mappings.
///
/// With the exception of `fn_sig_span`, this should always be contained
/// within `body_span`.
span: Span,
visible_macro: Option<Symbol>,
bcb: BasicCoverageBlock,
is_closure: bool,
}
impl SpanFromMir {
fn for_fn_sig(fn_sig_span: Span) -> Self {
Self::new(fn_sig_span, None, START_BCB, false)
}
fn new(
span: Span,
visible_macro: Option<Symbol>,
bcb: BasicCoverageBlock,
is_closure: bool,
) -> Self {
Self { span, visible_macro, bcb, is_closure }
}
fn into_coverage_span(self) -> CoverageSpan {
let Self { span, visible_macro: _, bcb, is_closure } = self;
CoverageSpan::new(span, bcb, is_closure)
}
}

View File

@ -1755,11 +1755,8 @@ fn suggest_alternative_construction_methods(
.filter_map(|item| {
// Only assoc fns that return `Self`
let fn_sig = self.r.tcx.fn_sig(item.def_id).skip_binder();
let ret_ty = fn_sig.output();
let ret_ty = self
.r
.tcx
.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), ret_ty);
// Don't normalize the return type, because that can cause cycle errors.
let ret_ty = fn_sig.output().skip_binder();
let ty::Adt(def, _args) = ret_ty.kind() else {
return None;
};

View File

@ -521,7 +521,7 @@ fn parse(
pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<Option<Self>, ErrorGuaranteed> {
if let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) {
return Self::parse_attribute(attr, false, tcx, item_def_id);
} else if tcx.features().diagnostic_namespace {
} else {
tcx.get_attrs_by_path(item_def_id, &[sym::diagnostic, sym::on_unimplemented])
.filter_map(|attr| Self::parse_attribute(attr, true, tcx, item_def_id).transpose())
.try_fold(None, |aggr: Option<Self>, directive| {
@ -592,8 +592,6 @@ pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<Option<Self>, Er
Ok(Some(directive))
}
})
} else {
Ok(None)
}
}

View File

@ -28,7 +28,7 @@
#[must_use = "futures do nothing unless you `.await` or poll them"]
#[stable(feature = "futures_api", since = "1.36.0")]
#[lang = "future_trait"]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
label = "`{Self}` is not a future",
message = "`{Self}` is not a future",
note = "{Self} must be a future or must implement `IntoFuture` to be awaited"

View File

@ -217,6 +217,7 @@
#![feature(const_trait_impl)]
#![feature(decl_macro)]
#![feature(deprecated_suggestion)]
#![feature(diagnostic_namespace)]
#![feature(doc_cfg)]
#![feature(doc_cfg_hide)]
#![feature(doc_notable_trait)]

View File

@ -75,7 +75,7 @@
/// [ub]: ../../reference/behavior-considered-undefined.html
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Send")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "`{Self}` cannot be sent between threads safely",
label = "`{Self}` cannot be sent between threads safely"
)]
@ -134,7 +134,7 @@ unsafe impl<T: Sync + ?Sized> Send for &T {}
#[doc(alias = "?", alias = "?Sized")]
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "sized"]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "the size for values of type `{Self}` cannot be known at compilation time",
label = "doesn't have a size known at compile-time"
)]
@ -205,7 +205,7 @@ pub trait Unsize<T: ?Sized> {
/// [RFC1445]: https://github.com/rust-lang/rfcs/blob/master/text/1445-restrict-constants-in-patterns.md
/// [issue 63438]: https://github.com/rust-lang/rust/issues/63438
#[unstable(feature = "structural_match", issue = "31434")]
#[rustc_on_unimplemented(message = "the type `{Self}` does not `#[derive(PartialEq)]`")]
#[diagnostic::on_unimplemented(message = "the type `{Self}` does not `#[derive(PartialEq)]`")]
#[lang = "structural_peq"]
pub trait StructuralPartialEq {
// Empty.
@ -273,7 +273,7 @@ pub trait StructuralPartialEq {
/// of the two derives (`#[derive(PartialEq)]` and `#[derive(Eq)]`) and check
/// that both of them are present as part of structural-match checking.
#[unstable(feature = "structural_match", issue = "31434")]
#[rustc_on_unimplemented(message = "the type `{Self}` does not `#[derive(Eq)]`")]
#[diagnostic::on_unimplemented(message = "the type `{Self}` does not `#[derive(Eq)]`")]
#[lang = "structural_teq"]
pub trait StructuralEq {
// Empty.
@ -941,7 +941,7 @@ impl<T: ?Sized> !Freeze for UnsafeCell<T> {}
/// [Pin]: crate::pin::Pin
/// [`pin` module]: crate::pin
#[stable(feature = "pin", since = "1.33.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
note = "consider using the `pin!` macro\nconsider using `Box::pin` if you need to access the pinned value outside of the current scope",
message = "`{Self}` cannot be unpinned"
)]
@ -989,7 +989,7 @@ pub trait Destruct {}
/// for any user type.
#[unstable(feature = "tuple_trait", issue = "none")]
#[lang = "tuple_trait"]
#[rustc_on_unimplemented(message = "`{Self}` is not a tuple")]
#[diagnostic::on_unimplemented(message = "`{Self}` is not a tuple")]
#[rustc_deny_explicit_impl(implement_via_object = false)]
pub trait Tuple {}
@ -999,7 +999,7 @@ pub trait Tuple {}
/// `*const ()` automatically implement this trait.
#[unstable(feature = "pointer_like_trait", issue = "none")]
#[lang = "pointer_like"]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "`{Self}` needs to have the same ABI as a pointer",
label = "`{Self}` needs to be a pointer-like type"
)]
@ -1013,7 +1013,7 @@ pub trait PointerLike {}
/// are `StructuralPartialEq`.
#[lang = "const_param_ty"]
#[unstable(feature = "adt_const_params", issue = "95174")]
#[rustc_on_unimplemented(message = "`{Self}` can't be used as a const parameter type")]
#[diagnostic::on_unimplemented(message = "`{Self}` can't be used as a const parameter type")]
#[allow(multiple_supertrait_upcastable)]
pub trait ConstParamTy: StructuralEq + StructuralPartialEq + Eq {}

View File

@ -307,7 +307,7 @@ fn sub(self, other: $t) -> $t { self - other }
/// ```
#[lang = "mul"]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot multiply `{Self}` by `{Rhs}`",
label = "no implementation for `{Self} * {Rhs}`"
)]
@ -441,7 +441,7 @@ fn mul(self, other: $t) -> $t { self * other }
/// ```
#[lang = "div"]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot divide `{Self}` by `{Rhs}`",
label = "no implementation for `{Self} / {Rhs}`"
)]
@ -543,7 +543,7 @@ fn div(self, other: $t) -> $t { self / other }
/// ```
#[lang = "rem"]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot calculate the remainder of `{Self}` divided by `{Rhs}`",
label = "no implementation for `{Self} % {Rhs}`"
)]
@ -729,7 +729,7 @@ fn neg(self) -> $t { -self }
/// ```
#[lang = "add_assign"]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot add-assign `{Rhs}` to `{Self}`",
label = "no implementation for `{Self} += {Rhs}`"
)]
@ -796,7 +796,7 @@ fn add_assign(&mut self, other: $t) { *self += other }
/// ```
#[lang = "sub_assign"]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot subtract-assign `{Rhs}` from `{Self}`",
label = "no implementation for `{Self} -= {Rhs}`"
)]
@ -854,7 +854,7 @@ fn sub_assign(&mut self, other: $t) { *self -= other }
/// ```
#[lang = "mul_assign"]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot multiply-assign `{Self}` by `{Rhs}`",
label = "no implementation for `{Self} *= {Rhs}`"
)]
@ -912,7 +912,7 @@ fn mul_assign(&mut self, other: $t) { *self *= other }
/// ```
#[lang = "div_assign"]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot divide-assign `{Self}` by `{Rhs}`",
label = "no implementation for `{Self} /= {Rhs}`"
)]
@ -973,7 +973,7 @@ fn div_assign(&mut self, other: $t) { *self /= other }
/// ```
#[lang = "rem_assign"]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "cannot calculate and assign the remainder of `{Self}` divided by `{Rhs}`",
label = "no implementation for `{Self} %= {Rhs}`"
)]

View File

@ -137,7 +137,7 @@ fn not(self) -> ! {
#[lang = "bitand"]
#[doc(alias = "&")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} & {Rhs}`",
label = "no implementation for `{Self} & {Rhs}`"
)]
@ -237,7 +237,7 @@ fn bitand(self, rhs: $t) -> $t { self & rhs }
#[lang = "bitor"]
#[doc(alias = "|")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} | {Rhs}`",
label = "no implementation for `{Self} | {Rhs}`"
)]
@ -337,7 +337,7 @@ fn bitor(self, rhs: $t) -> $t { self | rhs }
#[lang = "bitxor"]
#[doc(alias = "^")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} ^ {Rhs}`",
label = "no implementation for `{Self} ^ {Rhs}`"
)]
@ -436,7 +436,7 @@ fn bitxor(self, other: $t) -> $t { self ^ other }
#[lang = "shl"]
#[doc(alias = "<<")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} << {Rhs}`",
label = "no implementation for `{Self} << {Rhs}`"
)]
@ -554,7 +554,7 @@ macro_rules! shl_impl_all {
#[lang = "shr"]
#[doc(alias = ">>")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} >> {Rhs}`",
label = "no implementation for `{Self} >> {Rhs}`"
)]
@ -681,7 +681,7 @@ macro_rules! shr_impl_all {
#[lang = "bitand_assign"]
#[doc(alias = "&=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} &= {Rhs}`",
label = "no implementation for `{Self} &= {Rhs}`"
)]
@ -752,7 +752,7 @@ fn bitand_assign(&mut self, other: $t) { *self &= other }
#[lang = "bitor_assign"]
#[doc(alias = "|=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} |= {Rhs}`",
label = "no implementation for `{Self} |= {Rhs}`"
)]
@ -823,7 +823,7 @@ fn bitor_assign(&mut self, other: $t) { *self |= other }
#[lang = "bitxor_assign"]
#[doc(alias = "^=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} ^= {Rhs}`",
label = "no implementation for `{Self} ^= {Rhs}`"
)]
@ -892,7 +892,7 @@ fn bitxor_assign(&mut self, other: $t) { *self ^= other }
#[lang = "shl_assign"]
#[doc(alias = "<<=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} <<= {Rhs}`",
label = "no implementation for `{Self} <<= {Rhs}`"
)]
@ -974,7 +974,7 @@ macro_rules! shl_assign_impl_all {
#[lang = "shr_assign"]
#[doc(alias = ">>=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "no implementation for `{Self} >>= {Rhs}`",
label = "no implementation for `{Self} >>= {Rhs}`"
)]

View File

@ -47,7 +47,7 @@
/// assert_eq!(nucleotide_count[Nucleotide::T], 12);
/// ```
#[lang = "index"]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "the type `{Self}` cannot be indexed by `{Idx}`",
label = "`{Self}` cannot be indexed by `{Idx}`"
)]

View File

@ -83,7 +83,7 @@
/// implemented for any closed over variables passed to `catch_unwind`.
#[stable(feature = "catch_unwind", since = "1.9.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "unwind_safe_trait")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "the type `{Self}` may not be safely transferred across an unwind boundary",
label = "`{Self}` may not be safely transferred across an unwind boundary"
)]
@ -99,7 +99,7 @@
/// [`UnwindSafe`] trait, for more information see that documentation.
#[stable(feature = "catch_unwind", since = "1.9.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "ref_unwind_safe_trait")]
#[rustc_on_unimplemented(
#[diagnostic::on_unimplemented(
message = "the type `{Self}` may contain interior mutability and a reference may not be safely \
transferrable across a catch_unwind boundary",
label = "`{Self}` may contain interior mutability and a reference may not be safely \

View File

@ -18,6 +18,7 @@
#![crate_type = "rlib"]
#![no_core]
#![allow(non_camel_case_types)]
#![allow(internal_features)]
#[lang = "sized"]
trait Sized {}

View File

@ -5,6 +5,7 @@
#![feature(auto_traits)]
#![crate_type = "rlib"]
#![no_core]
#![allow(internal_features)]
#[lang = "sized"]
trait Sized {}

View File

@ -936,8 +936,10 @@ fn read_link() {
}
// Check that readlink works with non-drive paths on Windows.
let link = tmpdir.join("link_unc");
check!(symlink_dir(r"\\localhost\c$\", &link));
assert_eq!(check!(fs::read_link(&link)), Path::new(r"\\localhost\c$\"));
if got_symlink_permission(&tmpdir) {
check!(symlink_dir(r"\\localhost\c$\", &link));
assert_eq!(check!(fs::read_link(&link)), Path::new(r"\\localhost\c$\"));
};
}
let link = tmpdir.join("link");
if !got_symlink_permission(&tmpdir) {

View File

@ -905,34 +905,6 @@ fn run(self, builder: &Builder<'_>) {
));
}
// We currently don't support cross-crate LTO in stage0. This also isn't hugely necessary
// and may just be a time sink.
if compiler.stage != 0 {
match builder.config.rust_lto {
RustcLto::Thin | RustcLto::Fat => {
// Since using LTO for optimizing dylibs is currently experimental,
// we need to pass -Zdylib-lto.
cargo.rustflag("-Zdylib-lto");
// Cargo by default passes `-Cembed-bitcode=no` and doesn't pass `-Clto` when
// compiling dylibs (and their dependencies), even when LTO is enabled for the
// crate. Therefore, we need to override `-Clto` and `-Cembed-bitcode` here.
let lto_type = match builder.config.rust_lto {
RustcLto::Thin => "thin",
RustcLto::Fat => "fat",
_ => unreachable!(),
};
cargo.rustflag(&format!("-Clto={lto_type}"));
cargo.rustflag("-Cembed-bitcode=yes");
}
RustcLto::ThinLocal => { /* Do nothing, this is the default */ }
RustcLto::Off => {
cargo.rustflag("-Clto=off");
}
}
} else if builder.config.rust_lto == RustcLto::Off {
cargo.rustflag("-Clto=off");
}
for krate in &*self.crates {
cargo.arg("-p").arg(krate);
}
@ -989,6 +961,34 @@ pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelec
cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)");
// We currently don't support cross-crate LTO in stage0. This also isn't hugely necessary
// and may just be a time sink.
if stage != 0 {
match builder.config.rust_lto {
RustcLto::Thin | RustcLto::Fat => {
// Since using LTO for optimizing dylibs is currently experimental,
// we need to pass -Zdylib-lto.
cargo.rustflag("-Zdylib-lto");
// Cargo by default passes `-Cembed-bitcode=no` and doesn't pass `-Clto` when
// compiling dylibs (and their dependencies), even when LTO is enabled for the
// crate. Therefore, we need to override `-Clto` and `-Cembed-bitcode` here.
let lto_type = match builder.config.rust_lto {
RustcLto::Thin => "thin",
RustcLto::Fat => "fat",
_ => unreachable!(),
};
cargo.rustflag(&format!("-Clto={lto_type}"));
cargo.rustflag("-Cembed-bitcode=yes");
}
RustcLto::ThinLocal => { /* Do nothing, this is the default */ }
RustcLto::Off => {
cargo.rustflag("-Clto=off");
}
}
} else if builder.config.rust_lto == RustcLto::Off {
cargo.rustflag("-Clto=off");
}
rustc_cargo_env(builder, cargo, target, stage);
}

View File

@ -58,7 +58,7 @@
symbol::{sym, Symbol},
BytePos, FileName, RealFileName,
};
use serde::ser::{SerializeMap, SerializeSeq};
use serde::ser::SerializeMap;
use serde::{Serialize, Serializer};
use crate::clean::{self, ItemId, RenderedLink, SelfTy};
@ -123,44 +123,58 @@ pub(crate) struct IndexItem {
}
/// A type used for the search index.
#[derive(Debug)]
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct RenderType {
id: Option<RenderTypeId>,
generics: Option<Vec<RenderType>>,
bindings: Option<Vec<(RenderTypeId, Vec<RenderType>)>>,
}
impl Serialize for RenderType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let id = match &self.id {
impl RenderType {
// Types are rendered as lists of lists, because that's pretty compact.
// The contents of the lists are always integers in self-terminating hex
// form, handled by `RenderTypeId::write_to_string`, so no commas are
// needed to separate the items.
pub fn write_to_string(&self, string: &mut String) {
fn write_optional_id(id: Option<RenderTypeId>, string: &mut String) {
// 0 is a sentinel, everything else is one-indexed
None => 0,
// concrete type
Some(RenderTypeId::Index(idx)) if *idx >= 0 => idx + 1,
// generic type parameter
Some(RenderTypeId::Index(idx)) => *idx,
_ => panic!("must convert render types to indexes before serializing"),
};
if self.generics.is_some() || self.bindings.is_some() {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&id)?;
seq.serialize_element(self.generics.as_ref().map(Vec::as_slice).unwrap_or_default())?;
if self.bindings.is_some() {
seq.serialize_element(
self.bindings.as_ref().map(Vec::as_slice).unwrap_or_default(),
)?;
match id {
Some(id) => id.write_to_string(string),
None => string.push('`'),
}
seq.end()
}
// Either just the type id, or `{type, generics, bindings?}`
// where generics is a list of types,
// and bindings is a list of `{id, typelist}` pairs.
if self.generics.is_some() || self.bindings.is_some() {
string.push('{');
write_optional_id(self.id, string);
string.push('{');
for generic in &self.generics.as_ref().map(Vec::as_slice).unwrap_or_default()[..] {
generic.write_to_string(string);
}
string.push('}');
if self.bindings.is_some() {
string.push('{');
for binding in &self.bindings.as_ref().map(Vec::as_slice).unwrap_or_default()[..] {
string.push('{');
binding.0.write_to_string(string);
string.push('{');
for constraint in &binding.1[..] {
constraint.write_to_string(string);
}
string.push_str("}}");
}
string.push('}');
}
string.push('}');
} else {
id.serialize(serializer)
write_optional_id(self.id, string);
}
}
}
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) enum RenderTypeId {
DefId(DefId),
Primitive(clean::PrimitiveType),
@ -168,70 +182,122 @@ pub(crate) enum RenderTypeId {
Index(isize),
}
impl Serialize for RenderTypeId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let id = match &self {
impl RenderTypeId {
pub fn write_to_string(&self, string: &mut String) {
// (sign, value)
let (sign, id): (bool, u32) = match &self {
// 0 is a sentinel, everything else is one-indexed
// concrete type
RenderTypeId::Index(idx) if *idx >= 0 => idx + 1,
RenderTypeId::Index(idx) if *idx >= 0 => (false, (idx + 1isize).try_into().unwrap()),
// generic type parameter
RenderTypeId::Index(idx) => *idx,
RenderTypeId::Index(idx) => (true, (-*idx).try_into().unwrap()),
_ => panic!("must convert render types to indexes before serializing"),
};
id.serialize(serializer)
// zig-zag encoding
let value: u32 = (id << 1) | (if sign { 1 } else { 0 });
// Self-terminating hex use capital letters for everything but the
// least significant digit, which is lowercase. For example, decimal 17
// would be `` Aa `` if zig-zag encoding weren't used.
//
// Zig-zag encoding, however, stores the sign bit as the last bit.
// This means, in the last hexit, 1 is actually `c`, -1 is `b`
// (`a` is the imaginary -0), and, because all the bits are shifted
// by one, `` A` `` is actually 8 and `` Aa `` is -8.
//
// https://rust-lang.github.io/rustc-dev-guide/rustdoc-internals/search.html
// describes the encoding in more detail.
let mut shift: u32 = 28;
let mut mask: u32 = 0xF0_00_00_00;
while shift < 32 {
let hexit = (value & mask) >> shift;
if hexit != 0 || shift == 0 {
let hex =
char::try_from(if shift == 0 { '`' } else { '@' } as u32 + hexit).unwrap();
string.push(hex);
}
shift = shift.wrapping_sub(4);
mask = mask >> 4;
}
}
}
/// Full type of functions/methods in the search index.
#[derive(Debug)]
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct IndexItemFunctionType {
inputs: Vec<RenderType>,
output: Vec<RenderType>,
where_clause: Vec<Vec<RenderType>>,
}
impl Serialize for IndexItemFunctionType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// If we couldn't figure out a type, just write `0`.
impl IndexItemFunctionType {
pub fn write_to_string<'a>(
&'a self,
string: &mut String,
backref_queue: &mut VecDeque<&'a IndexItemFunctionType>,
) {
assert!(backref_queue.len() <= 16);
// If we couldn't figure out a type, just write 0,
// which is encoded as `` ` `` (see RenderTypeId::write_to_string).
let has_missing = self
.inputs
.iter()
.chain(self.output.iter())
.any(|i| i.id.is_none() && i.generics.is_none());
if has_missing {
0.serialize(serializer)
string.push('`');
} else if let Some(idx) = backref_queue.iter().position(|other| *other == self) {
// The backref queue has 16 items, so backrefs use
// a single hexit, disjoint from the ones used for numbers.
string.push(
char::try_from('0' as u32 + u32::try_from(idx).unwrap())
.expect("last possible value is '?'"),
);
} else {
let mut seq = serializer.serialize_seq(None)?;
backref_queue.push_front(self);
if backref_queue.len() > 16 {
backref_queue.pop_back();
}
string.push('{');
match &self.inputs[..] {
[one] if one.generics.is_none() && one.bindings.is_none() => {
seq.serialize_element(one)?
one.write_to_string(string);
}
_ => {
string.push('{');
for item in &self.inputs[..] {
item.write_to_string(string);
}
string.push('}');
}
_ => seq.serialize_element(&self.inputs)?,
}
match &self.output[..] {
[] if self.where_clause.is_empty() => {}
[one] if one.generics.is_none() && one.bindings.is_none() => {
seq.serialize_element(one)?
one.write_to_string(string);
}
_ => {
string.push('{');
for item in &self.output[..] {
item.write_to_string(string);
}
string.push('}');
}
_ => seq.serialize_element(&self.output)?,
}
for constraint in &self.where_clause {
if let [one] = &constraint[..]
&& one.generics.is_none()
&& one.bindings.is_none()
{
seq.serialize_element(one)?;
one.write_to_string(string);
} else {
seq.serialize_element(constraint)?;
string.push('{');
for item in &constraint[..] {
item.write_to_string(string);
}
string.push('}');
}
}
seq.end()
string.push('}');
}
}
}

View File

@ -1,5 +1,5 @@
use std::collections::hash_map::Entry;
use std::collections::BTreeMap;
use std::collections::{BTreeMap, VecDeque};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_middle::ty::TyCtxt;
@ -409,9 +409,11 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
let mut full_paths = Vec::with_capacity(self.items.len());
let mut descriptions = Vec::with_capacity(self.items.len());
let mut parents = Vec::with_capacity(self.items.len());
let mut functions = Vec::with_capacity(self.items.len());
let mut functions = String::with_capacity(self.items.len());
let mut deprecated = Vec::with_capacity(self.items.len());
let mut backref_queue = VecDeque::new();
for (index, item) in self.items.iter().enumerate() {
let n = item.ty as u8;
let c = char::try_from(n + b'A').expect("item types must fit in ASCII");
@ -434,27 +436,10 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
full_paths.push((index, &item.path));
}
// Fake option to get `0` out as a sentinel instead of `null`.
// We want to use `0` because it's three less bytes.
enum FunctionOption<'a> {
Function(&'a IndexItemFunctionType),
None,
match &item.search_type {
Some(ty) => ty.write_to_string(&mut functions, &mut backref_queue),
None => functions.push('`'),
}
impl<'a> Serialize for FunctionOption<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
FunctionOption::None => 0.serialize(serializer),
FunctionOption::Function(ty) => ty.serialize(serializer),
}
}
}
functions.push(match &item.search_type {
Some(ty) => FunctionOption::Function(ty),
None => FunctionOption::None,
});
if item.deprecation.is_some() {
deprecated.push(index);

View File

@ -200,3 +200,59 @@ let FunctionSearchType;
* }}
*/
let FunctionType;
/**
* The raw search data for a given crate. `n`, `t`, `d`, `i`, and `f`
* are arrays with the same length. `q`, `a`, and `c` use a sparse
* representation for compactness.
*
* `n[i]` contains the name of an item.
*
* `t[i]` contains the type of that item
* (as a string of characters that represent an offset in `itemTypes`).
*
* `d[i]` contains the description of that item.
*
* `q` contains the full paths of the items. For compactness, it is a set of
* (index, path) pairs used to create a map. If a given index `i` is
* not present, this indicates "same as the last index present".
*
* `i[i]` contains an item's parent, usually a module. For compactness,
* it is a set of indexes into the `p` array.
*
* `f` contains function signatures, or `0` if the item isn't a function.
* More information on how they're encoded can be found in rustc-dev-guide
*
* Functions are themselves encoded as arrays. The first item is a list of
* types representing the function's inputs, and the second list item is a list
* of types representing the function's output. Tuples are flattened.
* Types are also represented as arrays; the first item is an index into the `p`
* array, while the second is a list of types representing any generic parameters.
*
* b[i] contains an item's impl disambiguator. This is only present if an item
* is defined in an impl block and, the impl block's type has more than one associated
* item with the same name.
*
* `a` defines aliases with an Array of pairs: [name, offset], where `offset`
* points into the n/t/d/q/i/f arrays.
*
* `doc` contains the description of the crate.
*
* `p` is a list of path/type pairs. It is used for parents and function parameters.
*
* `c` is an array of item indices that are deprecated.
* @typedef {{
* doc: string,
* a: Object,
* n: Array<string>,
* t: String,
* d: Array<string>,
* q: Array<[Number, string]>,
* i: Array<Number>,
* f: string,
* p: Array<Object>,
* b: Array<[Number, String]>,
* c: Array<Number>
* }}
*/
let RawSearchIndexCrate;

View File

@ -2767,19 +2767,65 @@ ${item.displayPath}<span class="${type}">${name}</span>\
* The raw function search type format is generated using serde in
* librustdoc/html/render/mod.rs: impl Serialize for IndexItemFunctionType
*
* @param {RawFunctionSearchType} functionSearchType
* @param {{
* string: string,
* offset: number,
* backrefQueue: FunctionSearchType[]
* }} itemFunctionDecoder
* @param {Array<{name: string, ty: number}>} lowercasePaths
* @param {Map<string, integer>}
*
* @return {null|FunctionSearchType}
*/
function buildFunctionSearchType(functionSearchType, lowercasePaths) {
const INPUTS_DATA = 0;
const OUTPUT_DATA = 1;
// `0` is used as a sentinel because it's fewer bytes than `null`
if (functionSearchType === 0) {
function buildFunctionSearchType(itemFunctionDecoder, lowercasePaths) {
const c = itemFunctionDecoder.string.charCodeAt(itemFunctionDecoder.offset);
itemFunctionDecoder.offset += 1;
const [zero, ua, la, ob, cb] = ["0", "@", "`", "{", "}"].map(c => c.charCodeAt(0));
// `` ` `` is used as a sentinel because it's fewer bytes than `null`, and decodes to zero
// `0` is a backref
if (c === la) {
return null;
}
// sixteen characters after "0" are backref
if (c >= zero && c < ua) {
return itemFunctionDecoder.backrefQueue[c - zero];
}
if (c !== ob) {
throw ["Unexpected ", c, " in function: expected ", "{", "; this is a bug"];
}
// call after consuming `{`
function decodeList() {
let c = itemFunctionDecoder.string.charCodeAt(itemFunctionDecoder.offset);
const ret = [];
while (c !== cb) {
ret.push(decode());
c = itemFunctionDecoder.string.charCodeAt(itemFunctionDecoder.offset);
}
itemFunctionDecoder.offset += 1; // eat cb
return ret;
}
// consumes and returns a list or integer
function decode() {
let n = 0;
let c = itemFunctionDecoder.string.charCodeAt(itemFunctionDecoder.offset);
if (c === ob) {
itemFunctionDecoder.offset += 1;
return decodeList();
}
while (c < la) {
n = (n << 4) | (c & 0xF);
itemFunctionDecoder.offset += 1;
c = itemFunctionDecoder.string.charCodeAt(itemFunctionDecoder.offset);
}
// last character >= la
n = (n << 4) | (c & 0xF);
const [sign, value] = [n & 1, n >> 1];
itemFunctionDecoder.offset += 1;
return sign ? -value : value;
}
const functionSearchType = decodeList();
const INPUTS_DATA = 0;
const OUTPUT_DATA = 1;
let inputs, output;
if (typeof functionSearchType[INPUTS_DATA] === "number") {
inputs = [buildItemSearchType(functionSearchType[INPUTS_DATA], lowercasePaths)];
@ -2808,9 +2854,14 @@ ${item.displayPath}<span class="${type}">${name}</span>\
? [buildItemSearchType(functionSearchType[i], lowercasePaths)]
: buildItemSearchTypeAll(functionSearchType[i], lowercasePaths));
}
return {
const ret = {
inputs, output, where_clause,
};
itemFunctionDecoder.backrefQueue.unshift(ret);
if (itemFunctionDecoder.backrefQueue.length > 16) {
itemFunctionDecoder.backrefQueue.pop();
}
return ret;
}
/**
@ -2924,6 +2975,11 @@ ${item.displayPath}<span class="${type}">${name}</span>\
return functionTypeFingerprint[(fullId * 4) + 3];
}
/**
* Convert raw search index into in-memory search index.
*
* @param {[string, RawSearchIndexCrate][]} rawSearchIndex
*/
function buildIndex(rawSearchIndex) {
searchIndex = [];
typeNameIdMap = new Map();
@ -2950,59 +3006,7 @@ ${item.displayPath}<span class="${type}">${name}</span>\
// This loop actually generates the search item indexes, including
// normalized names, type signature objects and fingerprints, and aliases.
id = 0;
/**
* The raw search data for a given crate. `n`, `t`, `d`, `i`, and `f`
* are arrays with the same length. `q`, `a`, and `c` use a sparse
* representation for compactness.
*
* `n[i]` contains the name of an item.
*
* `t[i]` contains the type of that item
* (as a string of characters that represent an offset in `itemTypes`).
*
* `d[i]` contains the description of that item.
*
* `q` contains the full paths of the items. For compactness, it is a set of
* (index, path) pairs used to create a map. If a given index `i` is
* not present, this indicates "same as the last index present".
*
* `i[i]` contains an item's parent, usually a module. For compactness,
* it is a set of indexes into the `p` array.
*
* `f[i]` contains function signatures, or `0` if the item isn't a function.
* Functions are themselves encoded as arrays. The first item is a list of
* types representing the function's inputs, and the second list item is a list
* of types representing the function's output. Tuples are flattened.
* Types are also represented as arrays; the first item is an index into the `p`
* array, while the second is a list of types representing any generic parameters.
*
* b[i] contains an item's impl disambiguator. This is only present if an item
* is defined in an impl block and, the impl block's type has more than one associated
* item with the same name.
*
* `a` defines aliases with an Array of pairs: [name, offset], where `offset`
* points into the n/t/d/q/i/f arrays.
*
* `doc` contains the description of the crate.
*
* `p` is a list of path/type pairs. It is used for parents and function parameters.
*
* `c` is an array of item indices that are deprecated.
*
* @type {{
* doc: string,
* a: Object,
* n: Array<string>,
* t: String,
* d: Array<string>,
* q: Array<[Number, string]>,
* i: Array<Number>,
* f: Array<RawFunctionSearchType>,
* p: Array<Object>,
* b: Array<[Number, String]>,
* c: Array<Number>
* }}
*/
for (const [crate, crateCorpus] of rawSearchIndex) {
// This object should have exactly the same set of fields as the "row"
// object defined below. Your JavaScript runtime will thank you.
@ -3039,8 +3043,12 @@ ${item.displayPath}<span class="${type}">${name}</span>\
const itemDescs = crateCorpus.d;
// an array of (Number) the parent path index + 1 to `paths`, or 0 if none
const itemParentIdxs = crateCorpus.i;
// an array of (Object | null) the type of the function, if any
const itemFunctionSearchTypes = crateCorpus.f;
// a string representing the list of function types
const itemFunctionDecoder = {
string: crateCorpus.f,
offset: 0,
backrefQueue: [],
};
// an array of (Number) indices for the deprecated items
const deprecatedItems = new Set(crateCorpus.c);
// an array of (Number) indices for the deprecated items
@ -3088,12 +3096,8 @@ ${item.displayPath}<span class="${type}">${name}</span>\
word = itemNames[i].toLowerCase();
}
const path = itemPaths.has(i) ? itemPaths.get(i) : lastPath;
let type = null;
if (itemFunctionSearchTypes[i] !== 0) {
type = buildFunctionSearchType(
itemFunctionSearchTypes[i],
lowercasePaths
);
const type = buildFunctionSearchType(itemFunctionDecoder, lowercasePaths);
if (type !== null) {
if (type) {
const fp = functionTypeFingerprint.subarray(id * 4, (id + 1) * 4);
const fps = new Set();

View File

@ -81,21 +81,18 @@ Number of file 0 mappings: 1
- Code(Zero) at (prev + 171, 13) to (start + 2, 14)
Function name: closure::main::{closure#14}
Raw bytes (36): 0x[01, 01, 03, 05, 0a, 01, 05, 01, 05, 05, 03, b2, 01, 0d, 00, 15, 01, 01, 11, 01, 1b, 05, 01, 1e, 00, 25, 0a, 00, 2f, 00, 33, 03, 01, 0d, 00, 0e]
Raw bytes (29): 0x[01, 01, 02, 01, 05, 05, 02, 04, 01, b2, 01, 0d, 02, 1b, 05, 02, 1e, 00, 25, 02, 00, 2f, 00, 33, 07, 01, 0d, 00, 0e]
Number of files: 1
- file 0 => global file 1
Number of expressions: 3
- expression 0 operands: lhs = Counter(1), rhs = Expression(2, Sub)
- expression 1 operands: lhs = Counter(0), rhs = Counter(1)
- expression 2 operands: lhs = Counter(0), rhs = Counter(1)
Number of file 0 mappings: 5
- Code(Expression(0, Add)) at (prev + 178, 13) to (start + 0, 21)
= (c1 + (c0 - c1))
- Code(Counter(0)) at (prev + 1, 17) to (start + 1, 27)
- Code(Counter(1)) at (prev + 1, 30) to (start + 0, 37)
- Code(Expression(2, Sub)) at (prev + 0, 47) to (start + 0, 51)
Number of expressions: 2
- expression 0 operands: lhs = Counter(0), rhs = Counter(1)
- expression 1 operands: lhs = Counter(1), rhs = Expression(0, Sub)
Number of file 0 mappings: 4
- Code(Counter(0)) at (prev + 178, 13) to (start + 2, 27)
- Code(Counter(1)) at (prev + 2, 30) to (start + 0, 37)
- Code(Expression(0, Sub)) at (prev + 0, 47) to (start + 0, 51)
= (c0 - c1)
- Code(Expression(0, Add)) at (prev + 1, 13) to (start + 0, 14)
- Code(Expression(1, Add)) at (prev + 1, 13) to (start + 0, 14)
= (c1 + (c0 - c1))
Function name: closure::main::{closure#15}
@ -118,21 +115,18 @@ Number of file 0 mappings: 6
= (c1 + (c0 - c1))
Function name: closure::main::{closure#16}
Raw bytes (36): 0x[01, 01, 03, 05, 0a, 01, 05, 01, 05, 05, 03, c4, 01, 0d, 00, 15, 01, 01, 11, 01, 1b, 05, 01, 1e, 00, 25, 0a, 00, 2f, 00, 33, 03, 01, 0d, 00, 0e]
Raw bytes (29): 0x[01, 01, 02, 01, 05, 05, 02, 04, 01, c4, 01, 0d, 02, 1b, 05, 02, 1e, 00, 25, 02, 00, 2f, 00, 33, 07, 01, 0d, 00, 0e]
Number of files: 1
- file 0 => global file 1
Number of expressions: 3
- expression 0 operands: lhs = Counter(1), rhs = Expression(2, Sub)
- expression 1 operands: lhs = Counter(0), rhs = Counter(1)
- expression 2 operands: lhs = Counter(0), rhs = Counter(1)
Number of file 0 mappings: 5
- Code(Expression(0, Add)) at (prev + 196, 13) to (start + 0, 21)
= (c1 + (c0 - c1))
- Code(Counter(0)) at (prev + 1, 17) to (start + 1, 27)
- Code(Counter(1)) at (prev + 1, 30) to (start + 0, 37)
- Code(Expression(2, Sub)) at (prev + 0, 47) to (start + 0, 51)
Number of expressions: 2
- expression 0 operands: lhs = Counter(0), rhs = Counter(1)
- expression 1 operands: lhs = Counter(1), rhs = Expression(0, Sub)
Number of file 0 mappings: 4
- Code(Counter(0)) at (prev + 196, 13) to (start + 2, 27)
- Code(Counter(1)) at (prev + 2, 30) to (start + 0, 37)
- Code(Expression(0, Sub)) at (prev + 0, 47) to (start + 0, 51)
= (c0 - c1)
- Code(Expression(0, Add)) at (prev + 1, 13) to (start + 0, 14)
- Code(Expression(1, Add)) at (prev + 1, 13) to (start + 0, 14)
= (c1 + (c0 - c1))
Function name: closure::main::{closure#17}

View File

@ -31,14 +31,14 @@ Number of file 0 mappings: 2
- Code(Counter(0)) at (prev + 7, 6) to (start + 2, 2)
Function name: inline_dead::main::{closure#0}
Raw bytes (23): 0x[01, 01, 02, 00, 06, 01, 00, 03, 01, 07, 17, 00, 18, 00, 02, 0d, 00, 0e, 03, 02, 05, 00, 06]
Raw bytes (23): 0x[01, 01, 02, 00, 06, 01, 00, 03, 01, 07, 17, 01, 16, 00, 02, 0d, 00, 0e, 03, 02, 05, 00, 06]
Number of files: 1
- file 0 => global file 1
Number of expressions: 2
- expression 0 operands: lhs = Zero, rhs = Expression(1, Sub)
- expression 1 operands: lhs = Counter(0), rhs = Zero
Number of file 0 mappings: 3
- Code(Counter(0)) at (prev + 7, 23) to (start + 0, 24)
- Code(Counter(0)) at (prev + 7, 23) to (start + 1, 22)
- Code(Zero) at (prev + 2, 13) to (start + 0, 14)
- Code(Expression(0, Add)) at (prev + 2, 5) to (start + 0, 6)
= (Zero + (c0 - Zero))

View File

@ -5,7 +5,7 @@
LL| 1| println!("{}", live::<false>());
LL| 1|
LL| 1| let f = |x: bool| {
LL| | debug_assert!(
LL| 1| debug_assert!(
LL| 0| x
LL| | );
LL| 1| };

View File

@ -1,3 +1,6 @@
// FIXME(fmease, #119216): Reenable this test!
// ignore-test
pub struct Inner<T> {
field: T,
}

View File

@ -0,0 +1,8 @@
#![feature(diagnostic_namespace)]
#[diagnostic::on_unimplemented(
message = "Message",
note = "Note",
label = "label"
)]
pub trait Foo {}

View File

@ -0,0 +1,12 @@
// aux-build:other.rs
extern crate other;
use other::Foo;
fn take_foo(_: impl Foo) {}
fn main() {
take_foo(());
//~^ERROR Message
}

View File

@ -0,0 +1,19 @@
error[E0277]: Message
--> $DIR/error_is_shown_in_downstream_crates.rs:10:14
|
LL | take_foo(());
| -------- ^^ label
| |
| required by a bound introduced by this call
|
= help: the trait `Foo` is not implemented for `()`
= note: Note
note: required by a bound in `take_foo`
--> $DIR/error_is_shown_in_downstream_crates.rs:7:21
|
LL | fn take_foo(_: impl Foo) {}
| ^^^ required by this bound in `take_foo`
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0277`.

View File

@ -1,9 +0,0 @@
// build-fail
// compile-flags: -C debug-assertions
#![deny(arithmetic_overflow)]
fn main() {
let _n = 1i64 >> [64][0];
//~^ ERROR: this arithmetic operation will overflow
}

View File

@ -1,14 +0,0 @@
error: this arithmetic operation will overflow
--> $DIR/overflowing-rsh-6.rs:7:14
|
LL | let _n = 1i64 >> [64][0];
| ^^^^^^^^^^^^^^^ attempt to shift right by `64_i32`, which would overflow
|
note: the lint level is defined here
--> $DIR/overflowing-rsh-6.rs:4:9
|
LL | #![deny(arithmetic_overflow)]
| ^^^^^^^^^^^^^^^^^^^
error: aborting due to 1 previous error

View File

@ -50,6 +50,9 @@ crate0::{{expn1}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt:
crate0::{{expn2}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "produce_it")
crate0::{{expn3}}: parent: crate0::{{expn2}}, call_site_ctxt: #3, def_site_ctxt: #0, kind: Macro(Bang, "meta_macro::print_def_site")
crate0::{{expn4}}: parent: crate0::{{expn3}}, call_site_ctxt: #4, def_site_ctxt: #0, kind: Macro(Bang, "$crate::dummy")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "diagnostic::on_unimplemented")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "diagnostic::on_unimplemented")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "diagnostic::on_unimplemented")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "include")

View File

@ -73,6 +73,9 @@ crate0::{{expn1}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt:
crate0::{{expn2}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "outer")
crate0::{{expn3}}: parent: crate0::{{expn2}}, call_site_ctxt: #3, def_site_ctxt: #3, kind: Macro(Bang, "inner")
crate0::{{expn4}}: parent: crate0::{{expn3}}, call_site_ctxt: #5, def_site_ctxt: #0, kind: Macro(Bang, "print_bang")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "diagnostic::on_unimplemented")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "diagnostic::on_unimplemented")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "diagnostic::on_unimplemented")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive")
crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "include")

View File

@ -0,0 +1,12 @@
mod m {
pub struct Uuid(());
impl Uuid {
pub fn encode_buffer() -> [u8; LENGTH] {
[]
}
}
const LENGTH: usize = 0;
}
pub use m::Uuid;

View File

@ -0,0 +1,10 @@
// aux-build:suggest-constructor-cycle-error.rs
// Regression test for https://github.com/rust-lang/rust/issues/119625
extern crate suggest_constructor_cycle_error as a;
const CONST_NAME: a::Uuid = a::Uuid(());
//~^ ERROR: cannot initialize a tuple struct which contains private fields [E0423]
fn main() {}

View File

@ -0,0 +1,15 @@
error[E0423]: cannot initialize a tuple struct which contains private fields
--> $DIR/suggest-constructor-cycle-error.rs:7:29
|
LL | const CONST_NAME: a::Uuid = a::Uuid(());
| ^^^^^^^
|
note: constructor is not visible here due to private fields
--> $DIR/auxiliary/suggest-constructor-cycle-error.rs:2:21
|
LL | pub struct Uuid(());
| ^^ private field
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0423`.

View File

@ -0,0 +1,21 @@
// Regression test for issue #119295.
#![feature(type_alias_impl_trait)]
type Bar<T> = T;
type S<const A: usize> = [i32; A];
extern "C" {
pub fn lint_me(
x: Bar<
S<
{ //~ ERROR mismatched types
type B<Z> = impl Sized;
//~^ ERROR unconstrained opaque type
},
>,
>,
);
}
fn main() {}

View File

@ -0,0 +1,20 @@
error: unconstrained opaque type
--> $DIR/nested-in-anon-const.rs:13:33
|
LL | type B<Z> = impl Sized;
| ^^^^^^^^^^
|
= note: `B` must be used in combination with a concrete type within the same item
error[E0308]: mismatched types
--> $DIR/nested-in-anon-const.rs:12:17
|
LL | / {
LL | | type B<Z> = impl Sized;
LL | |
LL | | },
| |_________________^ expected `usize`, found `()`
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0308`.