Use .into_diagnostic() less.

This commit replaces this pattern:
```
err.into_diagnostic(dcx)
```
with this pattern:
```
dcx.create_err(err)
```
in a lot of places.

It's a little shorter, makes the error level explicit, avoids some
`IntoDiagnostic` imports, and is a necessary prerequisite for the next
commit which will add a `level` arg to `into_diagnostic`.

This requires adding `track_caller` on `create_err` to avoid mucking up
the output of `tests/ui/track-diagnostics/track4.rs`. It probably should
have been there already.
This commit is contained in:
Nicholas Nethercote 2023-12-18 14:00:17 +11:00
parent cda4736f1e
commit cea683c08f
13 changed files with 109 additions and 130 deletions

View File

@ -1274,6 +1274,7 @@ impl DiagCtxt {
self.create_err(err).emit() self.create_err(err).emit()
} }
#[track_caller]
pub fn create_err<'a>( pub fn create_err<'a>(
&'a self, &'a self,
err: impl IntoDiagnostic<'a>, err: impl IntoDiagnostic<'a>,

View File

@ -1204,11 +1204,10 @@ pub fn resolve_path(
.expect("attempting to resolve a file path in an external file"), .expect("attempting to resolve a file path in an external file"),
FileName::DocTest(path, _) => path, FileName::DocTest(path, _) => path,
other => { other => {
return Err(errors::ResolveRelativePath { return Err(parse_sess.dcx.create_err(errors::ResolveRelativePath {
span, span,
path: parse_sess.source_map().filename_for_diagnostics(&other).to_string(), path: parse_sess.source_map().filename_for_diagnostics(&other).to_string(),
} }));
.into_diagnostic(&parse_sess.dcx));
} }
}; };
result.pop(); result.pop();

View File

@ -5,7 +5,6 @@ use crate::errors::{
use crate::infer::error_reporting::TypeErrCtxt; use crate::infer::error_reporting::TypeErrCtxt;
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use crate::infer::InferCtxt; use crate::infer::InferCtxt;
use rustc_errors::IntoDiagnostic;
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed, IntoDiagnosticArg}; use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed, IntoDiagnosticArg};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def::Res; use rustc_hir::def::Res;
@ -367,7 +366,7 @@ impl<'tcx> InferCtxt<'tcx> {
let multi_suggestions = Vec::new(); let multi_suggestions = Vec::new();
let bad_label = Some(arg_data.make_bad_error(span)); let bad_label = Some(arg_data.make_bad_error(span));
match error_code { match error_code {
TypeAnnotationNeeded::E0282 => AnnotationRequired { TypeAnnotationNeeded::E0282 => self.tcx.sess.dcx().create_err(AnnotationRequired {
span, span,
source_kind, source_kind,
source_name, source_name,
@ -375,9 +374,8 @@ impl<'tcx> InferCtxt<'tcx> {
infer_subdiags, infer_subdiags,
multi_suggestions, multi_suggestions,
bad_label, bad_label,
} }),
.into_diagnostic(self.tcx.sess.dcx()), TypeAnnotationNeeded::E0283 => self.tcx.sess.dcx().create_err(AmbiguousImpl {
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
span, span,
source_kind, source_kind,
source_name, source_name,
@ -385,9 +383,8 @@ impl<'tcx> InferCtxt<'tcx> {
infer_subdiags, infer_subdiags,
multi_suggestions, multi_suggestions,
bad_label, bad_label,
} }),
.into_diagnostic(self.tcx.sess.dcx()), TypeAnnotationNeeded::E0284 => self.tcx.sess.dcx().create_err(AmbiguousReturn {
TypeAnnotationNeeded::E0284 => AmbiguousReturn {
span, span,
source_kind, source_kind,
source_name, source_name,
@ -395,8 +392,7 @@ impl<'tcx> InferCtxt<'tcx> {
infer_subdiags, infer_subdiags,
multi_suggestions, multi_suggestions,
bad_label, bad_label,
} }),
.into_diagnostic(self.tcx.sess.dcx()),
} }
} }
} }
@ -574,7 +570,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
} }
} }
match error_code { match error_code {
TypeAnnotationNeeded::E0282 => AnnotationRequired { TypeAnnotationNeeded::E0282 => self.tcx.sess.dcx().create_err(AnnotationRequired {
span, span,
source_kind, source_kind,
source_name: &name, source_name: &name,
@ -582,9 +578,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
infer_subdiags, infer_subdiags,
multi_suggestions, multi_suggestions,
bad_label: None, bad_label: None,
} }),
.into_diagnostic(self.tcx.sess.dcx()), TypeAnnotationNeeded::E0283 => self.tcx.sess.dcx().create_err(AmbiguousImpl {
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
span, span,
source_kind, source_kind,
source_name: &name, source_name: &name,
@ -592,9 +587,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
infer_subdiags, infer_subdiags,
multi_suggestions, multi_suggestions,
bad_label: None, bad_label: None,
} }),
.into_diagnostic(self.tcx.sess.dcx()), TypeAnnotationNeeded::E0284 => self.tcx.sess.dcx().create_err(AmbiguousReturn {
TypeAnnotationNeeded::E0284 => AmbiguousReturn {
span, span,
source_kind, source_kind,
source_name: &name, source_name: &name,
@ -602,8 +596,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
infer_subdiags, infer_subdiags,
multi_suggestions, multi_suggestions,
bad_label: None, bad_label: None,
} }),
.into_diagnostic(self.tcx.sess.dcx()),
} }
} }
} }

View File

@ -5,9 +5,7 @@ use crate::errors::{
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
use crate::infer::error_reporting::{note_and_explain_region, TypeErrCtxt}; use crate::infer::error_reporting::{note_and_explain_region, TypeErrCtxt};
use crate::infer::{self, SubregionOrigin}; use crate::infer::{self, SubregionOrigin};
use rustc_errors::{ use rustc_errors::{AddToDiagnostic, Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
AddToDiagnostic, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic,
};
use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::traits::ObligationCauseCode; use rustc_middle::traits::ObligationCauseCode;
use rustc_middle::ty::error::TypeError; use rustc_middle::ty::error::TypeError;
@ -136,11 +134,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
note_and_explain::PrefixKind::ContentValidFor, note_and_explain::PrefixKind::ContentValidFor,
note_and_explain::SuffixKind::Empty, note_and_explain::SuffixKind::Empty,
); );
OutlivesContent { self.tcx.sess.dcx().create_err(OutlivesContent {
span, span,
notes: reference_valid.into_iter().chain(content_valid).collect(), notes: reference_valid.into_iter().chain(content_valid).collect(),
} })
.into_diagnostic(self.tcx.sess.dcx())
} }
infer::RelateObjectBound(span) => { infer::RelateObjectBound(span) => {
let object_valid = note_and_explain::RegionExplanation::new( let object_valid = note_and_explain::RegionExplanation::new(
@ -157,11 +154,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
note_and_explain::PrefixKind::SourcePointerValidFor, note_and_explain::PrefixKind::SourcePointerValidFor,
note_and_explain::SuffixKind::Empty, note_and_explain::SuffixKind::Empty,
); );
OutlivesBound { self.tcx.sess.dcx().create_err(OutlivesBound {
span, span,
notes: object_valid.into_iter().chain(pointer_valid).collect(), notes: object_valid.into_iter().chain(pointer_valid).collect(),
} })
.into_diagnostic(self.tcx.sess.dcx())
} }
infer::RelateParamBound(span, ty, opt_span) => { infer::RelateParamBound(span, ty, opt_span) => {
let prefix = match *sub { let prefix = match *sub {
@ -176,8 +172,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
let note = note_and_explain::RegionExplanation::new( let note = note_and_explain::RegionExplanation::new(
self.tcx, sub, opt_span, prefix, suffix, self.tcx, sub, opt_span, prefix, suffix,
); );
FulfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note } self.tcx.sess.dcx().create_err(FulfillReqLifetime {
.into_diagnostic(self.tcx.sess.dcx()) span,
ty: self.resolve_vars_if_possible(ty),
note,
})
} }
infer::RelateRegionParamBound(span) => { infer::RelateRegionParamBound(span) => {
let param_instantiated = note_and_explain::RegionExplanation::new( let param_instantiated = note_and_explain::RegionExplanation::new(
@ -194,11 +193,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
note_and_explain::PrefixKind::LfParamMustOutlive, note_and_explain::PrefixKind::LfParamMustOutlive,
note_and_explain::SuffixKind::Empty, note_and_explain::SuffixKind::Empty,
); );
LfBoundNotSatisfied { self.tcx.sess.dcx().create_err(LfBoundNotSatisfied {
span, span,
notes: param_instantiated.into_iter().chain(param_must_outlive).collect(), notes: param_instantiated.into_iter().chain(param_must_outlive).collect(),
} })
.into_diagnostic(self.tcx.sess.dcx())
} }
infer::ReferenceOutlivesReferent(ty, span) => { infer::ReferenceOutlivesReferent(ty, span) => {
let pointer_valid = note_and_explain::RegionExplanation::new( let pointer_valid = note_and_explain::RegionExplanation::new(
@ -215,12 +213,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
note_and_explain::PrefixKind::DataValidFor, note_and_explain::PrefixKind::DataValidFor,
note_and_explain::SuffixKind::Empty, note_and_explain::SuffixKind::Empty,
); );
RefLongerThanData { self.tcx.sess.dcx().create_err(RefLongerThanData {
span, span,
ty: self.resolve_vars_if_possible(ty), ty: self.resolve_vars_if_possible(ty),
notes: pointer_valid.into_iter().chain(data_valid).collect(), notes: pointer_valid.into_iter().chain(data_valid).collect(),
} })
.into_diagnostic(self.tcx.sess.dcx())
} }
infer::CompareImplItemObligation { span, impl_item_def_id, trait_item_def_id } => { infer::CompareImplItemObligation { span, impl_item_def_id, trait_item_def_id } => {
let mut err = self.report_extra_impl_obligation( let mut err = self.report_extra_impl_obligation(
@ -277,11 +274,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
note_and_explain::PrefixKind::LfMustOutlive, note_and_explain::PrefixKind::LfMustOutlive,
note_and_explain::SuffixKind::Empty, note_and_explain::SuffixKind::Empty,
); );
LfBoundNotSatisfied { self.tcx.sess.dcx().create_err(LfBoundNotSatisfied {
span, span,
notes: instantiated.into_iter().chain(must_outlive).collect(), notes: instantiated.into_iter().chain(must_outlive).collect(),
} })
.into_diagnostic(self.tcx.sess.dcx())
} }
}; };
if sub.is_error() || sup.is_error() { if sub.is_error() || sup.is_error() {

View File

@ -5,7 +5,7 @@ use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::attr; use rustc_ast::attr;
use rustc_ast::token::{self, Delimiter, Nonterminal}; use rustc_ast::token::{self, Delimiter, Nonterminal};
use rustc_errors::{error_code, Diagnostic, IntoDiagnostic, PResult}; use rustc_errors::{error_code, Diagnostic, PResult};
use rustc_span::{sym, BytePos, Span}; use rustc_span::{sym, BytePos, Span};
use thin_vec::ThinVec; use thin_vec::ThinVec;
use tracing::debug; use tracing::debug;
@ -416,8 +416,9 @@ impl<'a> Parser<'a> {
Err(err) => err.cancel(), Err(err) => err.cancel(),
} }
Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() } Err(self
.into_diagnostic(self.dcx())) .dcx()
.create_err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }))
} }
} }

View File

@ -35,7 +35,7 @@ use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{ use rustc_errors::{
pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder, pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
DiagnosticMessage, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan, PResult, DiagnosticMessage, ErrorGuaranteed, FatalError, MultiSpan, PResult,
}; };
use rustc_session::errors::ExprParenthesesNeeded; use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned; use rustc_span::source_map::Spanned;
@ -280,11 +280,10 @@ impl<'a> Parser<'a> {
recover: bool, recover: bool,
) -> PResult<'a, (Ident, /* is_raw */ bool)> { ) -> PResult<'a, (Ident, /* is_raw */ bool)> {
if let TokenKind::DocComment(..) = self.prev_token.kind { if let TokenKind::DocComment(..) = self.prev_token.kind {
return Err(DocCommentDoesNotDocumentAnything { return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
span: self.prev_token.span, span: self.prev_token.span,
missing_comma: None, missing_comma: None,
} }));
.into_diagnostic(self.dcx()));
} }
let valid_follow = &[ let valid_follow = &[
@ -347,7 +346,7 @@ impl<'a> Parser<'a> {
suggest_remove_comma, suggest_remove_comma,
help_cannot_start_number, help_cannot_start_number,
}; };
let mut err = err.into_diagnostic(self.dcx()); let mut err = self.dcx().create_err(err);
// if the token we have is a `<` // if the token we have is a `<`
// it *might* be a misplaced generic // it *might* be a misplaced generic
@ -1410,7 +1409,7 @@ impl<'a> Parser<'a> {
// Not entirely sure now, but we bubble the error up with the // Not entirely sure now, but we bubble the error up with the
// suggestion. // suggestion.
self.restore_snapshot(snapshot); self.restore_snapshot(snapshot);
Err(err.into_diagnostic(self.dcx())) Err(self.dcx().create_err(err))
} }
} }
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind { } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
@ -1425,7 +1424,7 @@ impl<'a> Parser<'a> {
} }
// Consume the fn call arguments. // Consume the fn call arguments.
match self.consume_fn_args() { match self.consume_fn_args() {
Err(()) => Err(err.into_diagnostic(self.dcx())), Err(()) => Err(self.dcx().create_err(err)),
Ok(()) => { Ok(()) => {
self.sess.emit_err(err); self.sess.emit_err(err);
// FIXME: actually check that the two expressions in the binop are // FIXME: actually check that the two expressions in the binop are
@ -1451,7 +1450,7 @@ impl<'a> Parser<'a> {
mk_err_expr(self, inner_op.span.to(self.prev_token.span)) mk_err_expr(self, inner_op.span.to(self.prev_token.span))
} else { } else {
// These cases cause too many knock-down errors, bail out (#61329). // These cases cause too many knock-down errors, bail out (#61329).
Err(err.into_diagnostic(self.dcx())) Err(self.dcx().create_err(err))
} }
}; };
} }
@ -2539,7 +2538,7 @@ impl<'a> Parser<'a> {
Ok(Some(GenericArg::Const(self.parse_const_arg()?))) Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
} else { } else {
let after_kw_const = self.token.span; let after_kw_const = self.token.span;
self.recover_const_arg(after_kw_const, err.into_diagnostic(self.dcx())).map(Some) self.recover_const_arg(after_kw_const, self.dcx().create_err(err)).map(Some)
} }
} }
@ -2893,11 +2892,10 @@ impl<'a> Parser<'a> {
let (a_span, b_span) = (a.span(), b.span()); let (a_span, b_span) = (a.span(), b.span());
let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo()); let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
if self.span_to_snippet(between_span).as_deref() == Ok(":: ") { if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
return Err(DoubleColonInBound { return Err(self.dcx().create_err(DoubleColonInBound {
span: path.span.shrink_to_hi(), span: path.span.shrink_to_hi(),
between: between_span, between: between_span,
} }));
.into_diagnostic(self.dcx()));
} }
} }
} }

View File

@ -26,8 +26,8 @@ use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_errors::{ use rustc_errors::{
AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, PResult,
PResult, StashKey, StashKey,
}; };
use rustc_macros::Subdiagnostic; use rustc_macros::Subdiagnostic;
use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded}; use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
@ -1255,21 +1255,21 @@ impl<'a> Parser<'a> {
// that of the open delim in `TokenTreesReader::parse_token_tree`, even if they are different. // that of the open delim in `TokenTreesReader::parse_token_tree`, even if they are different.
self.span_to_snippet(close_paren).is_ok_and(|snippet| snippet == ")") self.span_to_snippet(close_paren).is_ok_and(|snippet| snippet == ")")
{ {
let mut replacement_err = errors::ParenthesesWithStructFields { let mut replacement_err =
span, self.dcx().create_err(errors::ParenthesesWithStructFields {
r#type: path, span,
braces_for_struct: errors::BracesForStructLiteral { r#type: path,
first: open_paren, braces_for_struct: errors::BracesForStructLiteral {
second: close_paren, first: open_paren,
}, second: close_paren,
no_fields_for_fn: errors::NoFieldsForFnCall { },
fields: fields no_fields_for_fn: errors::NoFieldsForFnCall {
.into_iter() fields: fields
.map(|field| field.span.until(field.expr.span)) .into_iter()
.collect(), .map(|field| field.span.until(field.expr.span))
}, .collect(),
} },
.into_diagnostic(self.dcx()); });
replacement_err.emit(); replacement_err.emit();
let old_err = mem::replace(err, replacement_err); let old_err = mem::replace(err, replacement_err);
@ -1883,8 +1883,7 @@ impl<'a> Parser<'a> {
self.bump(); // `#` self.bump(); // `#`
let Some((ident, false)) = self.token.ident() else { let Some((ident, false)) = self.token.ident() else {
let err = let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
errors::ExpectedBuiltinIdent { span: self.token.span }.into_diagnostic(self.dcx());
return Err(err); return Err(err);
}; };
self.sess.gated_spans.gate(sym::builtin_syntax, ident.span); self.sess.gated_spans.gate(sym::builtin_syntax, ident.span);
@ -1894,8 +1893,10 @@ impl<'a> Parser<'a> {
let ret = if let Some(res) = parse(self, lo, ident)? { let ret = if let Some(res) = parse(self, lo, ident)? {
Ok(res) Ok(res)
} else { } else {
let err = errors::UnknownBuiltinConstruct { span: lo.to(ident.span), name: ident.name } let err = self.dcx().create_err(errors::UnknownBuiltinConstruct {
.into_diagnostic(self.dcx()); span: lo.to(ident.span),
name: ident.name,
});
return Err(err); return Err(err);
}; };
self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?; self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
@ -1958,8 +1959,9 @@ impl<'a> Parser<'a> {
&& let token::NtExpr(e) | token::NtLiteral(e) = &nt.0 && let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
&& matches!(e.kind, ExprKind::Err) && matches!(e.kind, ExprKind::Err)
{ {
let mut err = errors::InvalidInterpolatedExpression { span: self.token.span } let mut err = self
.into_diagnostic(self.dcx()); .dcx()
.create_err(errors::InvalidInterpolatedExpression { span: self.token.span });
err.downgrade_to_delayed_bug(); err.downgrade_to_delayed_bug();
return Err(err); return Err(err);
} }
@ -2168,10 +2170,10 @@ impl<'a> Parser<'a> {
.span_to_snippet(snapshot.token.span) .span_to_snippet(snapshot.token.span)
.is_ok_and(|snippet| snippet == "]") => .is_ok_and(|snippet| snippet == "]") =>
{ {
return Err(errors::MissingSemicolonBeforeArray { return Err(self.dcx().create_err(errors::MissingSemicolonBeforeArray {
open_delim: open_delim_span, open_delim: open_delim_span,
semicolon: prev_span.shrink_to_hi(), semicolon: prev_span.shrink_to_hi(),
}.into_diagnostic(self.dcx())); }));
} }
Ok(_) => (), Ok(_) => (),
Err(err) => err.cancel(), Err(err) => err.cancel(),
@ -2318,8 +2320,9 @@ impl<'a> Parser<'a> {
// Check for `move async` and recover // Check for `move async` and recover
if self.check_keyword(kw::Async) { if self.check_keyword(kw::Async) {
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo); let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
Err(errors::AsyncMoveOrderIncorrect { span: move_async_span } Err(self
.into_diagnostic(self.dcx())) .dcx()
.create_err(errors::AsyncMoveOrderIncorrect { span: move_async_span }))
} else { } else {
Ok(CaptureBy::Value { move_kw: move_kw_span }) Ok(CaptureBy::Value { move_kw: move_kw_span })
} }
@ -2509,7 +2512,7 @@ impl<'a> Parser<'a> {
}; };
if self.prev_token.kind == token::BinOp(token::Or) { if self.prev_token.kind == token::BinOp(token::Or) {
// This was part of a closure, the that part of the parser recover. // This was part of a closure, the that part of the parser recover.
return Err(err.into_diagnostic(self.dcx())); return Err(self.dcx().create_err(err));
} else { } else {
Some(self.sess.emit_err(err)) Some(self.sess.emit_err(err))
} }
@ -3193,7 +3196,7 @@ impl<'a> Parser<'a> {
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> { fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
let (attrs, body) = self.parse_inner_attrs_and_block()?; let (attrs, body) = self.parse_inner_attrs_and_block()?;
if self.eat_keyword(kw::Catch) { if self.eat_keyword(kw::Catch) {
Err(errors::CatchAfterTry { span: self.prev_token.span }.into_diagnostic(self.dcx())) Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
} else { } else {
let span = span_lo.to(body.span); let span = span_lo.to(body.span);
self.sess.gated_spans.gate(sym::try_blocks, span); self.sess.gated_spans.gate(sym::try_blocks, span);
@ -3530,12 +3533,11 @@ impl<'a> Parser<'a> {
|| t == &token::CloseDelim(Delimiter::Parenthesis) || t == &token::CloseDelim(Delimiter::Parenthesis)
}); });
if is_wrong { if is_wrong {
return Err(errors::ExpectedStructField { return Err(this.dcx().create_err(errors::ExpectedStructField {
span: this.look_ahead(1, |t| t.span), span: this.look_ahead(1, |t| t.span),
ident_span: this.token.span, ident_span: this.token.span,
token: this.look_ahead(1, |t| t.clone()), token: this.look_ahead(1, |t| t.clone()),
} }));
.into_diagnostic(&self.sess.dcx));
} }
let (ident, expr) = if is_shorthand { let (ident, expr) = if is_shorthand {
// Mimic `x: x` for the `x` field shorthand. // Mimic `x: x` for the `x` field shorthand.

View File

@ -10,10 +10,7 @@ use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::util::case::Case; use rustc_ast::util::case::Case;
use rustc_ast::{self as ast}; use rustc_ast::{self as ast};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{ use rustc_errors::{struct_span_err, Applicability, PResult, StashKey};
struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
StashKey,
};
use rustc_span::edit_distance::edit_distance; use rustc_span::edit_distance::edit_distance;
use rustc_span::edition::Edition; use rustc_span::edition::Edition;
use rustc_span::source_map; use rustc_span::source_map;
@ -438,7 +435,7 @@ impl<'a> Parser<'a> {
None None
}; };
if let Some(err) = err { Err(err.into_diagnostic(self.dcx())) } else { Ok(()) } if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) }
} }
fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> { fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
@ -1373,8 +1370,7 @@ impl<'a> Parser<'a> {
}; };
let span = self.prev_token.span.shrink_to_hi(); let span = self.prev_token.span.shrink_to_hi();
let err: DiagnosticBuilder<'_, ErrorGuaranteed> = let err = self.dcx().create_err(errors::MissingConstType { span, colon, kind });
errors::MissingConstType { span, colon, kind }.into_diagnostic(self.dcx());
err.stash(span, StashKey::ItemNoType); err.stash(span, StashKey::ItemNoType);
// The user intended that the type be inferred, // The user intended that the type be inferred,
@ -1391,7 +1387,7 @@ impl<'a> Parser<'a> {
self.bump(); self.bump();
self.sess.emit_err(err); self.sess.emit_err(err);
} else { } else {
return Err(err.into_diagnostic(self.dcx())); return Err(self.dcx().create_err(err));
} }
} }
@ -1591,7 +1587,7 @@ impl<'a> Parser<'a> {
} else { } else {
let err = let err =
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
return Err(err.into_diagnostic(self.dcx())); return Err(self.dcx().create_err(err));
}; };
Ok((class_name, ItemKind::Struct(vdata, generics))) Ok((class_name, ItemKind::Struct(vdata, generics)))
@ -1787,7 +1783,7 @@ impl<'a> Parser<'a> {
let sp = previous_span.shrink_to_hi(); let sp = previous_span.shrink_to_hi();
err.missing_comma = Some(sp); err.missing_comma = Some(sp);
} }
return Err(err.into_diagnostic(self.dcx())); return Err(self.dcx().create_err(err));
} }
} }
_ => { _ => {

View File

@ -32,9 +32,7 @@ use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::PResult; use rustc_errors::PResult;
use rustc_errors::{ use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, MultiSpan};
Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
};
use rustc_session::parse::ParseSess; use rustc_session::parse::ParseSess;
use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP}; use rustc_span::{Span, DUMMY_SP};
@ -1500,14 +1498,13 @@ pub(crate) fn make_unclosed_delims_error(
if let Some(sp) = unmatched.unclosed_span { if let Some(sp) = unmatched.unclosed_span {
spans.push(sp); spans.push(sp);
}; };
let err = MismatchedClosingDelimiter { let err = sess.dcx.create_err(MismatchedClosingDelimiter {
spans, spans,
delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(), delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
unmatched: unmatched.found_span, unmatched: unmatched.found_span,
opening_candidate: unmatched.candidate_span, opening_candidate: unmatched.candidate_span,
unclosed: unmatched.unclosed_span, unclosed: unmatched.unclosed_span,
} });
.into_diagnostic(&sess.dcx);
Some(err) Some(err)
} }

View File

@ -2,7 +2,6 @@ use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token}; use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token};
use rustc_ast::HasTokens; use rustc_ast::HasTokens;
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::IntoDiagnostic;
use rustc_errors::PResult; use rustc_errors::PResult;
use rustc_span::symbol::{kw, Ident}; use rustc_span::symbol::{kw, Ident};
@ -114,9 +113,9 @@ impl<'a> Parser<'a> {
NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? { NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
Some(item) => NtItem(item), Some(item) => NtItem(item),
None => { None => {
return Err( return Err(self
UnexpectedNonterminal::Item(self.token.span).into_diagnostic(self.dcx()) .dcx()
); .create_err(UnexpectedNonterminal::Item(self.token.span)));
} }
}, },
NonterminalKind::Block => { NonterminalKind::Block => {
@ -127,8 +126,9 @@ impl<'a> Parser<'a> {
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? { NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
Some(s) => NtStmt(P(s)), Some(s) => NtStmt(P(s)),
None => { None => {
return Err(UnexpectedNonterminal::Statement(self.token.span) return Err(self
.into_diagnostic(self.dcx())); .dcx()
.create_err(UnexpectedNonterminal::Statement(self.token.span)));
} }
}, },
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => { NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
@ -160,11 +160,10 @@ impl<'a> Parser<'a> {
NtIdent(ident, is_raw) NtIdent(ident, is_raw)
} }
NonterminalKind::Ident => { NonterminalKind::Ident => {
return Err(UnexpectedNonterminal::Ident { return Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
span: self.token.span, span: self.token.span,
token: self.token.clone(), token: self.token.clone(),
} }));
.into_diagnostic(self.dcx()));
} }
NonterminalKind::Path => { NonterminalKind::Path => {
NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?)) NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
@ -178,11 +177,10 @@ impl<'a> Parser<'a> {
if self.check_lifetime() { if self.check_lifetime() {
NtLifetime(self.expect_lifetime().ident) NtLifetime(self.expect_lifetime().ident)
} else { } else {
return Err(UnexpectedNonterminal::Lifetime { return Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
span: self.token.span, span: self.token.span,
token: self.token.clone(), token: self.token.clone(),
} }));
.into_diagnostic(self.dcx()));
} }
} }
}; };

View File

@ -18,7 +18,7 @@ use rustc_ast::{
PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax, PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
}; };
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult}; use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
use rustc_session::errors::ExprParenthesesNeeded; use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::{respan, Spanned}; use rustc_span::source_map::{respan, Spanned};
use rustc_span::symbol::{kw, sym, Ident}; use rustc_span::symbol::{kw, sym, Ident};
@ -872,8 +872,9 @@ impl<'a> Parser<'a> {
// binding mode then we do not end up here, because the lookahead // binding mode then we do not end up here, because the lookahead
// will direct us over to `parse_enum_variant()`. // will direct us over to `parse_enum_variant()`.
if self.token == token::OpenDelim(Delimiter::Parenthesis) { if self.token == token::OpenDelim(Delimiter::Parenthesis) {
return Err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span } return Err(self
.into_diagnostic(self.dcx())); .dcx()
.create_err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }));
} }
Ok(PatKind::Ident(binding_annotation, ident, sub)) Ok(PatKind::Ident(binding_annotation, ident, sub))
@ -986,8 +987,8 @@ impl<'a> Parser<'a> {
// check that a comma comes after every field // check that a comma comes after every field
if !ate_comma { if !ate_comma {
let mut err = ExpectedCommaAfterPatternField { span: self.token.span } let mut err =
.into_diagnostic(self.dcx()); self.dcx().create_err(ExpectedCommaAfterPatternField { span: self.token.span });
if let Some(mut delayed) = delayed_err { if let Some(mut delayed) = delayed_err {
delayed.emit(); delayed.emit();
} }

View File

@ -9,7 +9,7 @@ use rustc_ast::{
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs, AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
Path, PathSegment, QSelf, Path, PathSegment, QSelf,
}; };
use rustc_errors::{Applicability, IntoDiagnostic, PResult}; use rustc_errors::{Applicability, PResult};
use rustc_span::symbol::{kw, sym, Ident}; use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{BytePos, Span}; use rustc_span::{BytePos, Span};
use std::mem; use std::mem;
@ -318,15 +318,14 @@ impl<'a> Parser<'a> {
}) })
{ {
err.cancel(); err.cancel();
err = PathSingleColon { err = self.dcx().create_err(PathSingleColon {
span: self.token.span, span: self.token.span,
type_ascription: self type_ascription: self
.sess .sess
.unstable_features .unstable_features
.is_nightly_build() .is_nightly_build()
.then_some(()), .then_some(()),
} });
.into_diagnostic(self.dcx());
} }
// Attempt to find places where a missing `>` might belong. // Attempt to find places where a missing `>` might belong.
else if let Some(arg) = args else if let Some(arg) = args

View File

@ -4,9 +4,7 @@ use crate::query::plumbing::CycleError;
use crate::query::DepKind; use crate::query::DepKind;
use crate::query::{QueryContext, QueryStackFrame}; use crate::query::{QueryContext, QueryStackFrame};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{ use rustc_errors::{DiagCtxt, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, Level};
DiagCtxt, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, Level,
};
use rustc_hir::def::DefKind; use rustc_hir::def::DefKind;
use rustc_session::Session; use rustc_session::Session;
use rustc_span::Span; use rustc_span::Span;
@ -604,7 +602,7 @@ pub(crate) fn report_cycle<'a>(
note_span: (), note_span: (),
}; };
cycle_diag.into_diagnostic(sess.dcx()) sess.dcx().create_err(cycle_diag)
} }
pub fn print_query_stack<Qcx: QueryContext>( pub fn print_query_stack<Qcx: QueryContext>(