Use .into_diagnostic()
less.
This commit replaces this pattern: ``` err.into_diagnostic(dcx) ``` with this pattern: ``` dcx.create_err(err) ``` in a lot of places. It's a little shorter, makes the error level explicit, avoids some `IntoDiagnostic` imports, and is a necessary prerequisite for the next commit which will add a `level` arg to `into_diagnostic`. This requires adding `track_caller` on `create_err` to avoid mucking up the output of `tests/ui/track-diagnostics/track4.rs`. It probably should have been there already.
This commit is contained in:
parent
cda4736f1e
commit
cea683c08f
@ -1274,6 +1274,7 @@ pub fn emit_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> ErrorGuaranteed {
|
||||
self.create_err(err).emit()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn create_err<'a>(
|
||||
&'a self,
|
||||
err: impl IntoDiagnostic<'a>,
|
||||
|
@ -1204,11 +1204,10 @@ pub fn resolve_path(
|
||||
.expect("attempting to resolve a file path in an external file"),
|
||||
FileName::DocTest(path, _) => path,
|
||||
other => {
|
||||
return Err(errors::ResolveRelativePath {
|
||||
return Err(parse_sess.dcx.create_err(errors::ResolveRelativePath {
|
||||
span,
|
||||
path: parse_sess.source_map().filename_for_diagnostics(&other).to_string(),
|
||||
}
|
||||
.into_diagnostic(&parse_sess.dcx));
|
||||
}));
|
||||
}
|
||||
};
|
||||
result.pop();
|
||||
|
@ -5,7 +5,6 @@
|
||||
use crate::infer::error_reporting::TypeErrCtxt;
|
||||
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use crate::infer::InferCtxt;
|
||||
use rustc_errors::IntoDiagnostic;
|
||||
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed, IntoDiagnosticArg};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Res;
|
||||
@ -367,7 +366,7 @@ fn bad_inference_failure_err(
|
||||
let multi_suggestions = Vec::new();
|
||||
let bad_label = Some(arg_data.make_bad_error(span));
|
||||
match error_code {
|
||||
TypeAnnotationNeeded::E0282 => AnnotationRequired {
|
||||
TypeAnnotationNeeded::E0282 => self.tcx.sess.dcx().create_err(AnnotationRequired {
|
||||
span,
|
||||
source_kind,
|
||||
source_name,
|
||||
@ -375,9 +374,8 @@ fn bad_inference_failure_err(
|
||||
infer_subdiags,
|
||||
multi_suggestions,
|
||||
bad_label,
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx()),
|
||||
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
|
||||
}),
|
||||
TypeAnnotationNeeded::E0283 => self.tcx.sess.dcx().create_err(AmbiguousImpl {
|
||||
span,
|
||||
source_kind,
|
||||
source_name,
|
||||
@ -385,9 +383,8 @@ fn bad_inference_failure_err(
|
||||
infer_subdiags,
|
||||
multi_suggestions,
|
||||
bad_label,
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx()),
|
||||
TypeAnnotationNeeded::E0284 => AmbiguousReturn {
|
||||
}),
|
||||
TypeAnnotationNeeded::E0284 => self.tcx.sess.dcx().create_err(AmbiguousReturn {
|
||||
span,
|
||||
source_kind,
|
||||
source_name,
|
||||
@ -395,8 +392,7 @@ fn bad_inference_failure_err(
|
||||
infer_subdiags,
|
||||
multi_suggestions,
|
||||
bad_label,
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -574,7 +570,7 @@ pub fn emit_inference_failure_err(
|
||||
}
|
||||
}
|
||||
match error_code {
|
||||
TypeAnnotationNeeded::E0282 => AnnotationRequired {
|
||||
TypeAnnotationNeeded::E0282 => self.tcx.sess.dcx().create_err(AnnotationRequired {
|
||||
span,
|
||||
source_kind,
|
||||
source_name: &name,
|
||||
@ -582,9 +578,8 @@ pub fn emit_inference_failure_err(
|
||||
infer_subdiags,
|
||||
multi_suggestions,
|
||||
bad_label: None,
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx()),
|
||||
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
|
||||
}),
|
||||
TypeAnnotationNeeded::E0283 => self.tcx.sess.dcx().create_err(AmbiguousImpl {
|
||||
span,
|
||||
source_kind,
|
||||
source_name: &name,
|
||||
@ -592,9 +587,8 @@ pub fn emit_inference_failure_err(
|
||||
infer_subdiags,
|
||||
multi_suggestions,
|
||||
bad_label: None,
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx()),
|
||||
TypeAnnotationNeeded::E0284 => AmbiguousReturn {
|
||||
}),
|
||||
TypeAnnotationNeeded::E0284 => self.tcx.sess.dcx().create_err(AmbiguousReturn {
|
||||
span,
|
||||
source_kind,
|
||||
source_name: &name,
|
||||
@ -602,8 +596,7 @@ pub fn emit_inference_failure_err(
|
||||
infer_subdiags,
|
||||
multi_suggestions,
|
||||
bad_label: None,
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,9 +5,7 @@
|
||||
use crate::fluent_generated as fluent;
|
||||
use crate::infer::error_reporting::{note_and_explain_region, TypeErrCtxt};
|
||||
use crate::infer::{self, SubregionOrigin};
|
||||
use rustc_errors::{
|
||||
AddToDiagnostic, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic,
|
||||
};
|
||||
use rustc_errors::{AddToDiagnostic, Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_middle::traits::ObligationCauseCode;
|
||||
use rustc_middle::ty::error::TypeError;
|
||||
@ -136,11 +134,10 @@ pub(super) fn report_concrete_failure(
|
||||
note_and_explain::PrefixKind::ContentValidFor,
|
||||
note_and_explain::SuffixKind::Empty,
|
||||
);
|
||||
OutlivesContent {
|
||||
self.tcx.sess.dcx().create_err(OutlivesContent {
|
||||
span,
|
||||
notes: reference_valid.into_iter().chain(content_valid).collect(),
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx())
|
||||
})
|
||||
}
|
||||
infer::RelateObjectBound(span) => {
|
||||
let object_valid = note_and_explain::RegionExplanation::new(
|
||||
@ -157,11 +154,10 @@ pub(super) fn report_concrete_failure(
|
||||
note_and_explain::PrefixKind::SourcePointerValidFor,
|
||||
note_and_explain::SuffixKind::Empty,
|
||||
);
|
||||
OutlivesBound {
|
||||
self.tcx.sess.dcx().create_err(OutlivesBound {
|
||||
span,
|
||||
notes: object_valid.into_iter().chain(pointer_valid).collect(),
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx())
|
||||
})
|
||||
}
|
||||
infer::RelateParamBound(span, ty, opt_span) => {
|
||||
let prefix = match *sub {
|
||||
@ -176,8 +172,11 @@ pub(super) fn report_concrete_failure(
|
||||
let note = note_and_explain::RegionExplanation::new(
|
||||
self.tcx, sub, opt_span, prefix, suffix,
|
||||
);
|
||||
FulfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note }
|
||||
.into_diagnostic(self.tcx.sess.dcx())
|
||||
self.tcx.sess.dcx().create_err(FulfillReqLifetime {
|
||||
span,
|
||||
ty: self.resolve_vars_if_possible(ty),
|
||||
note,
|
||||
})
|
||||
}
|
||||
infer::RelateRegionParamBound(span) => {
|
||||
let param_instantiated = note_and_explain::RegionExplanation::new(
|
||||
@ -194,11 +193,10 @@ pub(super) fn report_concrete_failure(
|
||||
note_and_explain::PrefixKind::LfParamMustOutlive,
|
||||
note_and_explain::SuffixKind::Empty,
|
||||
);
|
||||
LfBoundNotSatisfied {
|
||||
self.tcx.sess.dcx().create_err(LfBoundNotSatisfied {
|
||||
span,
|
||||
notes: param_instantiated.into_iter().chain(param_must_outlive).collect(),
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx())
|
||||
})
|
||||
}
|
||||
infer::ReferenceOutlivesReferent(ty, span) => {
|
||||
let pointer_valid = note_and_explain::RegionExplanation::new(
|
||||
@ -215,12 +213,11 @@ pub(super) fn report_concrete_failure(
|
||||
note_and_explain::PrefixKind::DataValidFor,
|
||||
note_and_explain::SuffixKind::Empty,
|
||||
);
|
||||
RefLongerThanData {
|
||||
self.tcx.sess.dcx().create_err(RefLongerThanData {
|
||||
span,
|
||||
ty: self.resolve_vars_if_possible(ty),
|
||||
notes: pointer_valid.into_iter().chain(data_valid).collect(),
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx())
|
||||
})
|
||||
}
|
||||
infer::CompareImplItemObligation { span, impl_item_def_id, trait_item_def_id } => {
|
||||
let mut err = self.report_extra_impl_obligation(
|
||||
@ -277,11 +274,10 @@ pub(super) fn report_concrete_failure(
|
||||
note_and_explain::PrefixKind::LfMustOutlive,
|
||||
note_and_explain::SuffixKind::Empty,
|
||||
);
|
||||
LfBoundNotSatisfied {
|
||||
self.tcx.sess.dcx().create_err(LfBoundNotSatisfied {
|
||||
span,
|
||||
notes: instantiated.into_iter().chain(must_outlive).collect(),
|
||||
}
|
||||
.into_diagnostic(self.tcx.sess.dcx())
|
||||
})
|
||||
}
|
||||
};
|
||||
if sub.is_error() || sup.is_error() {
|
||||
|
@ -5,7 +5,7 @@
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal};
|
||||
use rustc_errors::{error_code, Diagnostic, IntoDiagnostic, PResult};
|
||||
use rustc_errors::{error_code, Diagnostic, PResult};
|
||||
use rustc_span::{sym, BytePos, Span};
|
||||
use thin_vec::ThinVec;
|
||||
use tracing::debug;
|
||||
@ -416,8 +416,9 @@ fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
|
||||
Err(err) => err.cancel(),
|
||||
}
|
||||
|
||||
Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }
|
||||
.into_diagnostic(self.dcx()))
|
||||
Err(self
|
||||
.dcx()
|
||||
.create_err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{
|
||||
pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
|
||||
DiagnosticMessage, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan, PResult,
|
||||
DiagnosticMessage, ErrorGuaranteed, FatalError, MultiSpan, PResult,
|
||||
};
|
||||
use rustc_session::errors::ExprParenthesesNeeded;
|
||||
use rustc_span::source_map::Spanned;
|
||||
@ -280,11 +280,10 @@ pub(super) fn expected_ident_found(
|
||||
recover: bool,
|
||||
) -> PResult<'a, (Ident, /* is_raw */ bool)> {
|
||||
if let TokenKind::DocComment(..) = self.prev_token.kind {
|
||||
return Err(DocCommentDoesNotDocumentAnything {
|
||||
return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
|
||||
span: self.prev_token.span,
|
||||
missing_comma: None,
|
||||
}
|
||||
.into_diagnostic(self.dcx()));
|
||||
}));
|
||||
}
|
||||
|
||||
let valid_follow = &[
|
||||
@ -347,7 +346,7 @@ pub(super) fn expected_ident_found(
|
||||
suggest_remove_comma,
|
||||
help_cannot_start_number,
|
||||
};
|
||||
let mut err = err.into_diagnostic(self.dcx());
|
||||
let mut err = self.dcx().create_err(err);
|
||||
|
||||
// if the token we have is a `<`
|
||||
// it *might* be a misplaced generic
|
||||
@ -1410,7 +1409,7 @@ pub(super) fn check_no_chained_comparison(
|
||||
// Not entirely sure now, but we bubble the error up with the
|
||||
// suggestion.
|
||||
self.restore_snapshot(snapshot);
|
||||
Err(err.into_diagnostic(self.dcx()))
|
||||
Err(self.dcx().create_err(err))
|
||||
}
|
||||
}
|
||||
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
|
||||
@ -1425,7 +1424,7 @@ pub(super) fn check_no_chained_comparison(
|
||||
}
|
||||
// Consume the fn call arguments.
|
||||
match self.consume_fn_args() {
|
||||
Err(()) => Err(err.into_diagnostic(self.dcx())),
|
||||
Err(()) => Err(self.dcx().create_err(err)),
|
||||
Ok(()) => {
|
||||
self.sess.emit_err(err);
|
||||
// FIXME: actually check that the two expressions in the binop are
|
||||
@ -1451,7 +1450,7 @@ pub(super) fn check_no_chained_comparison(
|
||||
mk_err_expr(self, inner_op.span.to(self.prev_token.span))
|
||||
} else {
|
||||
// These cases cause too many knock-down errors, bail out (#61329).
|
||||
Err(err.into_diagnostic(self.dcx()))
|
||||
Err(self.dcx().create_err(err))
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -2539,7 +2538,7 @@ pub fn recover_const_param_declaration(
|
||||
Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
|
||||
} else {
|
||||
let after_kw_const = self.token.span;
|
||||
self.recover_const_arg(after_kw_const, err.into_diagnostic(self.dcx())).map(Some)
|
||||
self.recover_const_arg(after_kw_const, self.dcx().create_err(err)).map(Some)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2893,11 +2892,10 @@ pub(crate) fn maybe_recover_bounds_doubled_colon(&mut self, ty: &Ty) -> PResult<
|
||||
let (a_span, b_span) = (a.span(), b.span());
|
||||
let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
|
||||
if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
|
||||
return Err(DoubleColonInBound {
|
||||
return Err(self.dcx().create_err(DoubleColonInBound {
|
||||
span: path.span.shrink_to_hi(),
|
||||
between: between_span,
|
||||
}
|
||||
.into_diagnostic(self.dcx()));
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -26,8 +26,8 @@
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_errors::{
|
||||
AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic,
|
||||
PResult, StashKey,
|
||||
AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, PResult,
|
||||
StashKey,
|
||||
};
|
||||
use rustc_macros::Subdiagnostic;
|
||||
use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
|
||||
@ -1255,21 +1255,21 @@ fn maybe_recover_struct_lit_bad_delims(
|
||||
// that of the open delim in `TokenTreesReader::parse_token_tree`, even if they are different.
|
||||
self.span_to_snippet(close_paren).is_ok_and(|snippet| snippet == ")")
|
||||
{
|
||||
let mut replacement_err = errors::ParenthesesWithStructFields {
|
||||
span,
|
||||
r#type: path,
|
||||
braces_for_struct: errors::BracesForStructLiteral {
|
||||
first: open_paren,
|
||||
second: close_paren,
|
||||
},
|
||||
no_fields_for_fn: errors::NoFieldsForFnCall {
|
||||
fields: fields
|
||||
.into_iter()
|
||||
.map(|field| field.span.until(field.expr.span))
|
||||
.collect(),
|
||||
},
|
||||
}
|
||||
.into_diagnostic(self.dcx());
|
||||
let mut replacement_err =
|
||||
self.dcx().create_err(errors::ParenthesesWithStructFields {
|
||||
span,
|
||||
r#type: path,
|
||||
braces_for_struct: errors::BracesForStructLiteral {
|
||||
first: open_paren,
|
||||
second: close_paren,
|
||||
},
|
||||
no_fields_for_fn: errors::NoFieldsForFnCall {
|
||||
fields: fields
|
||||
.into_iter()
|
||||
.map(|field| field.span.until(field.expr.span))
|
||||
.collect(),
|
||||
},
|
||||
});
|
||||
replacement_err.emit();
|
||||
|
||||
let old_err = mem::replace(err, replacement_err);
|
||||
@ -1883,8 +1883,7 @@ pub(crate) fn parse_builtin<T>(
|
||||
self.bump(); // `#`
|
||||
|
||||
let Some((ident, false)) = self.token.ident() else {
|
||||
let err =
|
||||
errors::ExpectedBuiltinIdent { span: self.token.span }.into_diagnostic(self.dcx());
|
||||
let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
|
||||
return Err(err);
|
||||
};
|
||||
self.sess.gated_spans.gate(sym::builtin_syntax, ident.span);
|
||||
@ -1894,8 +1893,10 @@ pub(crate) fn parse_builtin<T>(
|
||||
let ret = if let Some(res) = parse(self, lo, ident)? {
|
||||
Ok(res)
|
||||
} else {
|
||||
let err = errors::UnknownBuiltinConstruct { span: lo.to(ident.span), name: ident.name }
|
||||
.into_diagnostic(self.dcx());
|
||||
let err = self.dcx().create_err(errors::UnknownBuiltinConstruct {
|
||||
span: lo.to(ident.span),
|
||||
name: ident.name,
|
||||
});
|
||||
return Err(err);
|
||||
};
|
||||
self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
|
||||
@ -1958,8 +1959,9 @@ fn handle_missing_lit<L>(
|
||||
&& let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
|
||||
&& matches!(e.kind, ExprKind::Err)
|
||||
{
|
||||
let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
|
||||
.into_diagnostic(self.dcx());
|
||||
let mut err = self
|
||||
.dcx()
|
||||
.create_err(errors::InvalidInterpolatedExpression { span: self.token.span });
|
||||
err.downgrade_to_delayed_bug();
|
||||
return Err(err);
|
||||
}
|
||||
@ -2168,10 +2170,10 @@ fn suggest_missing_semicolon_before_array(
|
||||
.span_to_snippet(snapshot.token.span)
|
||||
.is_ok_and(|snippet| snippet == "]") =>
|
||||
{
|
||||
return Err(errors::MissingSemicolonBeforeArray {
|
||||
return Err(self.dcx().create_err(errors::MissingSemicolonBeforeArray {
|
||||
open_delim: open_delim_span,
|
||||
semicolon: prev_span.shrink_to_hi(),
|
||||
}.into_diagnostic(self.dcx()));
|
||||
}));
|
||||
}
|
||||
Ok(_) => (),
|
||||
Err(err) => err.cancel(),
|
||||
@ -2318,8 +2320,9 @@ fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
|
||||
// Check for `move async` and recover
|
||||
if self.check_keyword(kw::Async) {
|
||||
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
|
||||
Err(errors::AsyncMoveOrderIncorrect { span: move_async_span }
|
||||
.into_diagnostic(self.dcx()))
|
||||
Err(self
|
||||
.dcx()
|
||||
.create_err(errors::AsyncMoveOrderIncorrect { span: move_async_span }))
|
||||
} else {
|
||||
Ok(CaptureBy::Value { move_kw: move_kw_span })
|
||||
}
|
||||
@ -2509,7 +2512,7 @@ fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>>
|
||||
};
|
||||
if self.prev_token.kind == token::BinOp(token::Or) {
|
||||
// This was part of a closure, the that part of the parser recover.
|
||||
return Err(err.into_diagnostic(self.dcx()));
|
||||
return Err(self.dcx().create_err(err));
|
||||
} else {
|
||||
Some(self.sess.emit_err(err))
|
||||
}
|
||||
@ -3193,7 +3196,7 @@ pub(crate) fn is_builtin(&self) -> bool {
|
||||
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
|
||||
let (attrs, body) = self.parse_inner_attrs_and_block()?;
|
||||
if self.eat_keyword(kw::Catch) {
|
||||
Err(errors::CatchAfterTry { span: self.prev_token.span }.into_diagnostic(self.dcx()))
|
||||
Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
|
||||
} else {
|
||||
let span = span_lo.to(body.span);
|
||||
self.sess.gated_spans.gate(sym::try_blocks, span);
|
||||
@ -3530,12 +3533,11 @@ fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
||||
|| t == &token::CloseDelim(Delimiter::Parenthesis)
|
||||
});
|
||||
if is_wrong {
|
||||
return Err(errors::ExpectedStructField {
|
||||
return Err(this.dcx().create_err(errors::ExpectedStructField {
|
||||
span: this.look_ahead(1, |t| t.span),
|
||||
ident_span: this.token.span,
|
||||
token: this.look_ahead(1, |t| t.clone()),
|
||||
}
|
||||
.into_diagnostic(&self.sess.dcx));
|
||||
}));
|
||||
}
|
||||
let (ident, expr) = if is_shorthand {
|
||||
// Mimic `x: x` for the `x` field shorthand.
|
||||
|
@ -10,10 +10,7 @@
|
||||
use rustc_ast::util::case::Case;
|
||||
use rustc_ast::{self as ast};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::{
|
||||
struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
|
||||
StashKey,
|
||||
};
|
||||
use rustc_errors::{struct_span_err, Applicability, PResult, StashKey};
|
||||
use rustc_span::edit_distance::edit_distance;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::source_map;
|
||||
@ -438,7 +435,7 @@ fn recover_missing_kw_before_item(&mut self) -> PResult<'a, ()> {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(err) = err { Err(err.into_diagnostic(self.dcx())) } else { Ok(()) }
|
||||
if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) }
|
||||
}
|
||||
|
||||
fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
|
||||
@ -1373,8 +1370,7 @@ fn recover_missing_global_item_type(
|
||||
};
|
||||
|
||||
let span = self.prev_token.span.shrink_to_hi();
|
||||
let err: DiagnosticBuilder<'_, ErrorGuaranteed> =
|
||||
errors::MissingConstType { span, colon, kind }.into_diagnostic(self.dcx());
|
||||
let err = self.dcx().create_err(errors::MissingConstType { span, colon, kind });
|
||||
err.stash(span, StashKey::ItemNoType);
|
||||
|
||||
// The user intended that the type be inferred,
|
||||
@ -1391,7 +1387,7 @@ fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
|
||||
self.bump();
|
||||
self.sess.emit_err(err);
|
||||
} else {
|
||||
return Err(err.into_diagnostic(self.dcx()));
|
||||
return Err(self.dcx().create_err(err));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1591,7 +1587,7 @@ fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
|
||||
} else {
|
||||
let err =
|
||||
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
|
||||
return Err(err.into_diagnostic(self.dcx()));
|
||||
return Err(self.dcx().create_err(err));
|
||||
};
|
||||
|
||||
Ok((class_name, ItemKind::Struct(vdata, generics)))
|
||||
@ -1787,7 +1783,7 @@ fn parse_single_struct_field(
|
||||
let sp = previous_span.shrink_to_hi();
|
||||
err.missing_comma = Some(sp);
|
||||
}
|
||||
return Err(err.into_diagnostic(self.dcx()));
|
||||
return Err(self.dcx().create_err(err));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
@ -32,9 +32,7 @@
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::PResult;
|
||||
use rustc_errors::{
|
||||
Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
|
||||
};
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, MultiSpan};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
@ -1500,14 +1498,13 @@ pub(crate) fn make_unclosed_delims_error(
|
||||
if let Some(sp) = unmatched.unclosed_span {
|
||||
spans.push(sp);
|
||||
};
|
||||
let err = MismatchedClosingDelimiter {
|
||||
let err = sess.dcx.create_err(MismatchedClosingDelimiter {
|
||||
spans,
|
||||
delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
|
||||
unmatched: unmatched.found_span,
|
||||
opening_candidate: unmatched.candidate_span,
|
||||
unclosed: unmatched.unclosed_span,
|
||||
}
|
||||
.into_diagnostic(&sess.dcx);
|
||||
});
|
||||
Some(err)
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,6 @@
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token};
|
||||
use rustc_ast::HasTokens;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::IntoDiagnostic;
|
||||
use rustc_errors::PResult;
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
|
||||
@ -114,9 +113,9 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseN
|
||||
NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
|
||||
Some(item) => NtItem(item),
|
||||
None => {
|
||||
return Err(
|
||||
UnexpectedNonterminal::Item(self.token.span).into_diagnostic(self.dcx())
|
||||
);
|
||||
return Err(self
|
||||
.dcx()
|
||||
.create_err(UnexpectedNonterminal::Item(self.token.span)));
|
||||
}
|
||||
},
|
||||
NonterminalKind::Block => {
|
||||
@ -127,8 +126,9 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseN
|
||||
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
|
||||
Some(s) => NtStmt(P(s)),
|
||||
None => {
|
||||
return Err(UnexpectedNonterminal::Statement(self.token.span)
|
||||
.into_diagnostic(self.dcx()));
|
||||
return Err(self
|
||||
.dcx()
|
||||
.create_err(UnexpectedNonterminal::Statement(self.token.span)));
|
||||
}
|
||||
},
|
||||
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
|
||||
@ -160,11 +160,10 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseN
|
||||
NtIdent(ident, is_raw)
|
||||
}
|
||||
NonterminalKind::Ident => {
|
||||
return Err(UnexpectedNonterminal::Ident {
|
||||
return Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
|
||||
span: self.token.span,
|
||||
token: self.token.clone(),
|
||||
}
|
||||
.into_diagnostic(self.dcx()));
|
||||
}));
|
||||
}
|
||||
NonterminalKind::Path => {
|
||||
NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
|
||||
@ -178,11 +177,10 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseN
|
||||
if self.check_lifetime() {
|
||||
NtLifetime(self.expect_lifetime().ident)
|
||||
} else {
|
||||
return Err(UnexpectedNonterminal::Lifetime {
|
||||
return Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
|
||||
span: self.token.span,
|
||||
token: self.token.clone(),
|
||||
}
|
||||
.into_diagnostic(self.dcx()));
|
||||
}));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -18,7 +18,7 @@
|
||||
PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
|
||||
};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult};
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
|
||||
use rustc_session::errors::ExprParenthesesNeeded;
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
@ -872,8 +872,9 @@ fn parse_pat_ident(
|
||||
// binding mode then we do not end up here, because the lookahead
|
||||
// will direct us over to `parse_enum_variant()`.
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
return Err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }
|
||||
.into_diagnostic(self.dcx()));
|
||||
return Err(self
|
||||
.dcx()
|
||||
.create_err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }));
|
||||
}
|
||||
|
||||
Ok(PatKind::Ident(binding_annotation, ident, sub))
|
||||
@ -986,8 +987,8 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, bool)> {
|
||||
|
||||
// check that a comma comes after every field
|
||||
if !ate_comma {
|
||||
let mut err = ExpectedCommaAfterPatternField { span: self.token.span }
|
||||
.into_diagnostic(self.dcx());
|
||||
let mut err =
|
||||
self.dcx().create_err(ExpectedCommaAfterPatternField { span: self.token.span });
|
||||
if let Some(mut delayed) = delayed_err {
|
||||
delayed.emit();
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
|
||||
Path, PathSegment, QSelf,
|
||||
};
|
||||
use rustc_errors::{Applicability, IntoDiagnostic, PResult};
|
||||
use rustc_errors::{Applicability, PResult};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::{BytePos, Span};
|
||||
use std::mem;
|
||||
@ -318,15 +318,14 @@ pub(super) fn parse_path_segment(
|
||||
})
|
||||
{
|
||||
err.cancel();
|
||||
err = PathSingleColon {
|
||||
err = self.dcx().create_err(PathSingleColon {
|
||||
span: self.token.span,
|
||||
type_ascription: self
|
||||
.sess
|
||||
.unstable_features
|
||||
.is_nightly_build()
|
||||
.then_some(()),
|
||||
}
|
||||
.into_diagnostic(self.dcx());
|
||||
});
|
||||
}
|
||||
// Attempt to find places where a missing `>` might belong.
|
||||
else if let Some(arg) = args
|
||||
|
@ -4,9 +4,7 @@
|
||||
use crate::query::DepKind;
|
||||
use crate::query::{QueryContext, QueryStackFrame};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{
|
||||
DiagCtxt, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, Level,
|
||||
};
|
||||
use rustc_errors::{DiagCtxt, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, Level};
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::Span;
|
||||
@ -604,7 +602,7 @@ pub(crate) fn report_cycle<'a>(
|
||||
note_span: (),
|
||||
};
|
||||
|
||||
cycle_diag.into_diagnostic(sess.dcx())
|
||||
sess.dcx().create_err(cycle_diag)
|
||||
}
|
||||
|
||||
pub fn print_query_stack<Qcx: QueryContext>(
|
||||
|
Loading…
Reference in New Issue
Block a user