diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 9950db4784b..0b1cb124080 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -125,12 +125,8 @@ pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> { let lit_kind = match LitKind::from_token_lit(*token_lit) { Ok(lit_kind) => lit_kind, Err(err) => { - let guar = report_lit_error( - &self.tcx.sess.parse_sess, - err, - *token_lit, - e.span, - ); + let guar = + report_lit_error(&self.tcx.sess.psess, err, *token_lit, e.span); LitKind::Err(guar) } }; @@ -721,7 +717,7 @@ pub(super) fn maybe_forward_track_caller( sym::track_caller, span, )))), - id: self.tcx.sess.parse_sess.attr_id_generator.mk_attr_id(), + id: self.tcx.sess.psess.attr_id_generator.mk_attr_id(), style: AttrStyle::Outer, span: unstable_span, }], @@ -1756,7 +1752,7 @@ fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> // `#[allow(unreachable_code)]` let attr = attr::mk_attr_nested_word( - &self.tcx.sess.parse_sess.attr_id_generator, + &self.tcx.sess.psess.attr_id_generator, AttrStyle::Outer, sym::allow, sym::unreachable_code, diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index b56d695c671..f5ce9dedac9 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -835,7 +835,7 @@ fn validate_generic_param_order( impl<'a> Visitor<'a> for AstValidator<'a> { fn visit_attribute(&mut self, attr: &Attribute) { - validate_attr::check_attr(&self.session.parse_sess, attr); + validate_attr::check_attr(&self.session.psess, attr); } fn visit_ty(&mut self, ty: &'a Ty) { diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index 1b0dd9acc37..a28fcb00779 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -507,7 +507,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { check_incompatible_features(sess, features); let mut visitor = PostExpansionVisitor { sess, features }; - let spans = sess.parse_sess.gated_spans.spans.borrow(); + let spans = sess.psess.gated_spans.spans.borrow(); macro_rules! gate_all { ($gate:ident, $msg:literal) => { if let Some(spans) = spans.get(&sym::$gate) { diff --git a/compiler/rustc_attr/src/builtin.rs b/compiler/rustc_attr/src/builtin.rs index 3d2cf25e98b..dd64ae6614f 100644 --- a/compiler/rustc_attr/src/builtin.rs +++ b/compiler/rustc_attr/src/builtin.rs @@ -524,9 +524,9 @@ pub fn cfg_matches( ) -> bool { eval_condition(cfg, sess, features, &mut |cfg| { try_gate_cfg(cfg.name, cfg.span, sess, features); - match sess.parse_sess.check_config.expecteds.get(&cfg.name) { + match sess.psess.check_config.expecteds.get(&cfg.name) { Some(ExpectedValues::Some(values)) if !values.contains(&cfg.value) => { - sess.parse_sess.buffer_lint_with_diagnostic( + sess.psess.buffer_lint_with_diagnostic( UNEXPECTED_CFGS, cfg.span, lint_node_id, @@ -541,8 +541,8 @@ pub fn cfg_matches( ), ); } - None if sess.parse_sess.check_config.exhaustive_names => { - sess.parse_sess.buffer_lint_with_diagnostic( + None if sess.psess.check_config.exhaustive_names => { + sess.psess.buffer_lint_with_diagnostic( UNEXPECTED_CFGS, cfg.span, lint_node_id, @@ -555,7 +555,7 @@ pub fn cfg_matches( } _ => { /* not unexpected */ } } - sess.parse_sess.config.contains(&(cfg.name, cfg.value)) + sess.psess.config.contains(&(cfg.name, cfg.value)) }) } @@ -598,7 +598,7 @@ pub fn eval_condition( features: Option<&Features>, eval: &mut impl FnMut(Condition) -> bool, ) -> bool { - let dcx = &sess.parse_sess.dcx; + let dcx = &sess.psess.dcx; match &cfg.kind { ast::MetaItemKind::List(mis) if cfg.name_or_empty() == sym::version => { try_gate_cfg(sym::version, cfg.span, sess, features); @@ -626,7 +626,7 @@ pub fn eval_condition( }; // See https://github.com/rust-lang/rust/issues/64796#issuecomment-640851454 for details - if sess.parse_sess.assume_incomplete_release { + if sess.psess.assume_incomplete_release { RustcVersion::CURRENT > min_version } else { RustcVersion::CURRENT >= min_version diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index 29bf5e9f304..38fa1ac5935 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -10,7 +10,6 @@ use rustc_parse::parser::Parser; use rustc_parse_format as parse; use rustc_session::lint; -use rustc_session::parse::ParseSess; use rustc_span::symbol::Ident; use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::{ErrorGuaranteed, InnerSpan, Span}; @@ -36,19 +35,17 @@ fn parse_args<'a>( is_global_asm: bool, ) -> PResult<'a, AsmArgs> { let mut p = ecx.new_parser_from_tts(tts); - let sess = &ecx.sess.parse_sess; - parse_asm_args(&mut p, sess, sp, is_global_asm) + parse_asm_args(&mut p, sp, is_global_asm) } // Primarily public for rustfmt consumption. // Internal consumers should continue to leverage `expand_asm`/`expand__global_asm` pub fn parse_asm_args<'a>( p: &mut Parser<'a>, - sess: &'a ParseSess, sp: Span, is_global_asm: bool, ) -> PResult<'a, AsmArgs> { - let dcx = &sess.dcx; + let dcx = &p.psess.dcx; if p.token == token::Eof { return Err(dcx.create_err(errors::AsmRequiresTemplate { span: sp })); @@ -299,7 +296,7 @@ pub fn parse_asm_args<'a>( fn err_duplicate_option(p: &mut Parser<'_>, symbol: Symbol, span: Span) { // Tool-only output let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span }; - p.sess.dcx.emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span }); + p.psess.dcx.emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span }); } /// Try to set the provided option in the provided `AsmArgs`. @@ -371,7 +368,7 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) { - return Err(p.sess.dcx.create_err(errors::NonABI { span: p.token.span })); + return Err(p.psess.dcx.create_err(errors::NonABI { span: p.token.span })); } let mut new_abis = Vec::new(); @@ -382,7 +379,7 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, } Err(opt_lit) => { let span = opt_lit.map_or(p.token.span, |lit| lit.span); - let mut err = p.sess.dcx.struct_span_err(span, "expected string literal"); + let mut err = p.psess.dcx.struct_span_err(span, "expected string literal"); err.span_label(span, "not a string literal"); return Err(err); } @@ -498,7 +495,7 @@ fn expand_preparsed_asm( }; if template_str.contains(".intel_syntax") { - ecx.parse_sess().buffer_lint( + ecx.psess().buffer_lint( lint::builtin::BAD_ASM_STYLE, find_span(".intel_syntax"), ecx.current_expansion.lint_node_id, @@ -506,7 +503,7 @@ fn expand_preparsed_asm( ); } if template_str.contains(".att_syntax") { - ecx.parse_sess().buffer_lint( + ecx.psess().buffer_lint( lint::builtin::BAD_ASM_STYLE, find_span(".att_syntax"), ecx.current_expansion.lint_node_id, diff --git a/compiler/rustc_builtin_macros/src/cfg_accessible.rs b/compiler/rustc_builtin_macros/src/cfg_accessible.rs index ceb5f861078..1933b2e1fb7 100644 --- a/compiler/rustc_builtin_macros/src/cfg_accessible.rs +++ b/compiler/rustc_builtin_macros/src/cfg_accessible.rs @@ -46,7 +46,7 @@ fn expand( ) -> ExpandResult, Annotatable> { let template = AttributeTemplate { list: Some("path"), ..Default::default() }; validate_attr::check_builtin_meta_item( - &ecx.sess.parse_sess, + &ecx.sess.psess, meta_item, ast::AttrStyle::Outer, sym::cfg_accessible, diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs index cfa94b0e780..1de95ca81f7 100644 --- a/compiler/rustc_builtin_macros/src/cfg_eval.rs +++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs @@ -195,8 +195,7 @@ fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Option annotatable = a, diff --git a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs index d956c096d24..ada82e45712 100644 --- a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs +++ b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs @@ -7,10 +7,10 @@ use rustc_session::parse::ParseSess; use rustc_span::FileName; -pub fn inject(krate: &mut ast::Crate, parse_sess: &ParseSess, attrs: &[String]) { +pub fn inject(krate: &mut ast::Crate, psess: &ParseSess, attrs: &[String]) { for raw_attr in attrs { let mut parser = rustc_parse::new_parser_from_source_str( - parse_sess, + psess, FileName::cli_crate_attr_source_code(raw_attr), raw_attr.clone(), ); @@ -25,12 +25,12 @@ pub fn inject(krate: &mut ast::Crate, parse_sess: &ParseSess, attrs: &[String]) }; let end_span = parser.token.span; if parser.token != token::Eof { - parse_sess.dcx.emit_err(errors::InvalidCrateAttr { span: start_span.to(end_span) }); + psess.dcx.emit_err(errors::InvalidCrateAttr { span: start_span.to(end_span) }); continue; } krate.attrs.push(mk_attr( - &parse_sess.attr_id_generator, + &psess.attr_id_generator, AttrStyle::Inner, path, args, diff --git a/compiler/rustc_builtin_macros/src/concat.rs b/compiler/rustc_builtin_macros/src/concat.rs index abfaa9b006e..0bfb848859b 100644 --- a/compiler/rustc_builtin_macros/src/concat.rs +++ b/compiler/rustc_builtin_macros/src/concat.rs @@ -43,7 +43,7 @@ pub fn expand_concat( guar = Some(guarantee); } Err(err) => { - guar = Some(report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span)); + guar = Some(report_lit_error(&cx.sess.psess, err, token_lit, e.span)); } }, // We also want to allow negative numeric literals. @@ -52,7 +52,7 @@ pub fn expand_concat( Ok(LitKind::Int(i, _)) => accumulator.push_str(&format!("-{i}")), Ok(LitKind::Float(f, _)) => accumulator.push_str(&format!("-{f}")), Err(err) => { - guar = Some(report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span)); + guar = Some(report_lit_error(&cx.sess.psess, err, token_lit, e.span)); } _ => missing_literal.push(e.span), } diff --git a/compiler/rustc_builtin_macros/src/concat_bytes.rs b/compiler/rustc_builtin_macros/src/concat_bytes.rs index 3fb0b50f417..502bfb4467e 100644 --- a/compiler/rustc_builtin_macros/src/concat_bytes.rs +++ b/compiler/rustc_builtin_macros/src/concat_bytes.rs @@ -55,7 +55,7 @@ fn invalid_type_err( Ok(LitKind::Int(_, _)) => dcx.emit_err(ConcatBytesNonU8 { span }), Ok(LitKind::ByteStr(..) | LitKind::Byte(_)) => unreachable!(), Ok(LitKind::Err(guar)) => guar, - Err(err) => report_lit_error(&cx.sess.parse_sess, err, token_lit, span), + Err(err) => report_lit_error(&cx.sess.psess, err, token_lit, span), } } diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs index 7388e133c53..4f412cf79d9 100644 --- a/compiler/rustc_builtin_macros/src/derive.rs +++ b/compiler/rustc_builtin_macros/src/derive.rs @@ -34,7 +34,7 @@ fn expand( let template = AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() }; validate_attr::check_builtin_meta_item( - &sess.parse_sess, + &sess.psess, meta_item, ast::AttrStyle::Outer, sym::derive, diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index eb664b571ba..0adab6c4322 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -1624,7 +1624,7 @@ fn create_struct_field_access_fields( }; if let Some(ty) = exception { - cx.sess.parse_sess.buffer_lint_with_diagnostic( + cx.sess.psess.buffer_lint_with_diagnostic( BYTE_SLICE_IN_PACKED_STRUCT_WITH_DERIVE, sp, ast::CRATE_NODE_ID, diff --git a/compiler/rustc_builtin_macros/src/env.rs b/compiler/rustc_builtin_macros/src/env.rs index f057d44bf71..193b38a8323 100644 --- a/compiler/rustc_builtin_macros/src/env.rs +++ b/compiler/rustc_builtin_macros/src/env.rs @@ -39,7 +39,7 @@ pub fn expand_option_env<'cx>( let sp = cx.with_def_site_ctxt(sp); let value = lookup_env(cx, var); - cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value)); + cx.sess.psess.env_depinfo.borrow_mut().insert((var, value)); let e = match value { None => { let lt = cx.lifetime(sp, Ident::new(kw::StaticLifetime, sp)); @@ -94,7 +94,7 @@ pub fn expand_env<'cx>( let span = cx.with_def_site_ctxt(sp); let value = lookup_env(cx, var); - cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value)); + cx.sess.psess.env_depinfo.borrow_mut().insert((var, value)); let e = match value { None => { let ExprKind::Lit(token::Lit { diff --git a/compiler/rustc_builtin_macros/src/source_util.rs b/compiler/rustc_builtin_macros/src/source_util.rs index 8e0978de237..2da9bda19e0 100644 --- a/compiler/rustc_builtin_macros/src/source_util.rs +++ b/compiler/rustc_builtin_macros/src/source_util.rs @@ -118,7 +118,7 @@ pub fn expand_include<'cx>( return DummyResult::any(sp, guar); } }; - let p = new_parser_from_file(cx.parse_sess(), &file, Some(sp)); + let p = new_parser_from_file(cx.psess(), &file, Some(sp)); // If in the included file we have e.g., `mod bar;`, // then the path of `bar.rs` should be relative to the directory of `file`. @@ -136,7 +136,7 @@ impl<'a> MacResult for ExpandResult<'a> { fn make_expr(mut self: Box>) -> Option> { let expr = parse_expr(&mut self.p).ok()?; if self.p.token != token::Eof { - self.p.sess.buffer_lint( + self.p.psess.buffer_lint( INCOMPLETE_INCLUDE, self.p.token.span, self.node_id, diff --git a/compiler/rustc_builtin_macros/src/standard_library_imports.rs b/compiler/rustc_builtin_macros/src/standard_library_imports.rs index 3ee3112f021..9bcd793c450 100644 --- a/compiler/rustc_builtin_macros/src/standard_library_imports.rs +++ b/compiler/rustc_builtin_macros/src/standard_library_imports.rs @@ -17,7 +17,7 @@ pub fn inject( features: &Features, ) -> usize { let orig_num_items = krate.items.len(); - let edition = sess.parse_sess.edition; + let edition = sess.psess.edition; // the first name in this list is the crate name of the crate with the prelude let names: &[Symbol] = if attr::contains_name(pre_configured_attrs, sym::no_core) { diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index dc28cd2ea31..a2015445b42 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -159,7 +159,7 @@ struct InnerItemLinter<'a> { impl<'a> Visitor<'a> for InnerItemLinter<'_> { fn visit_item(&mut self, i: &'a ast::Item) { if let Some(attr) = attr::find_by_name(&i.attrs, sym::rustc_test_marker) { - self.sess.parse_sess.buffer_lint( + self.sess.psess.buffer_lint( UNNAMEABLE_TEST_ITEMS, attr.span, i.id, @@ -200,7 +200,7 @@ impl<'a> MutVisitor for EntryPointCleaner<'a> { EntryPointType::MainNamed | EntryPointType::RustcMainAttr | EntryPointType::Start => { item.map(|ast::Item { id, ident, attrs, kind, vis, span, tokens }| { let allow_dead_code = attr::mk_attr_nested_word( - &self.sess.parse_sess.attr_id_generator, + &self.sess.psess.attr_id_generator, ast::AttrStyle::Outer, sym::allow, sym::dead_code, diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs index eeaf00004e6..ad6b09ba574 100644 --- a/compiler/rustc_builtin_macros/src/util.rs +++ b/compiler/rustc_builtin_macros/src/util.rs @@ -9,7 +9,7 @@ pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, na // All the built-in macro attributes are "words" at the moment. let template = AttributeTemplate { word: true, ..Default::default() }; validate_attr::check_builtin_meta_item( - &ecx.sess.parse_sess, + &ecx.sess.psess, meta_item, AttrStyle::Outer, name, @@ -37,7 +37,7 @@ pub fn warn_on_duplicate_attribute(ecx: &ExtCtxt<'_>, item: &Annotatable, name: }; if let Some(attrs) = attrs { if let Some(attr) = attr::find_by_name(attrs, name) { - ecx.parse_sess().buffer_lint( + ecx.psess().buffer_lint( DUPLICATE_MACRO_ATTRIBUTES, attr.span, ecx.current_expansion.lint_node_id, diff --git a/compiler/rustc_codegen_ssa/src/assert_module_sources.rs b/compiler/rustc_codegen_ssa/src/assert_module_sources.rs index cbee4877122..7ecc3864347 100644 --- a/compiler/rustc_codegen_ssa/src/assert_module_sources.rs +++ b/compiler/rustc_codegen_ssa/src/assert_module_sources.rs @@ -176,7 +176,7 @@ fn field(&self, attr: &ast::Attribute, name: Symbol) -> Symbol { /// Scan for a `cfg="foo"` attribute and check whether we have a /// cfg flag called `foo`. fn check_config(&self, attr: &ast::Attribute) -> bool { - let config = &self.tcx.sess.parse_sess.config; + let config = &self.tcx.sess.psess.config; let value = self.field(attr, sym::cfg); debug!("check_config(config={:?}, value={:?})", config, value); if config.iter().any(|&(name, _)| name == value) { diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 410e7eba30a..ab1e37e4ac2 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -314,7 +314,7 @@ fn run_compiler( file_loader, locale_resources: DEFAULT_LOCALE_RESOURCES, lint_caps: Default::default(), - parse_sess_created: None, + psess_created: None, hash_untracked_state: None, register_lints: None, override_queries: None, @@ -768,7 +768,7 @@ fn print_crate_info( } Cfg => { let mut cfgs = sess - .parse_sess + .psess .config .iter() .filter_map(|&(name, value)| { @@ -1215,12 +1215,10 @@ pub fn handle_options(early_dcx: &EarlyDiagCtxt, args: &[String]) -> Option(sess: &'a Session) -> PResult<'a, ast::AttrVec> { match &sess.io.input { - Input::File(ifile) => rustc_parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess), - Input::Str { name, input } => rustc_parse::parse_crate_attrs_from_source_str( - name.clone(), - input.clone(), - &sess.parse_sess, - ), + Input::File(ifile) => rustc_parse::parse_crate_attrs_from_file(ifile, &sess.psess), + Input::Str { name, input } => { + rustc_parse::parse_crate_attrs_from_source_str(name.clone(), input.clone(), &sess.psess) + } } } diff --git a/compiler/rustc_driver_impl/src/pretty.rs b/compiler/rustc_driver_impl/src/pretty.rs index ff5ffd2454a..768d98ce01e 100644 --- a/compiler/rustc_driver_impl/src/pretty.rs +++ b/compiler/rustc_driver_impl/src/pretty.rs @@ -260,7 +260,7 @@ pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) { ExpandedIdentified => Box::new(AstIdentifiedAnn), ExpandedHygiene => Box::new(AstHygieneAnn { sess }), }; - let parse = &sess.parse_sess; + let psess = &sess.psess; let is_expanded = ppm.needs_ast_map(); ex.with_krate(|krate| { pprust_ast::print_crate( @@ -270,8 +270,8 @@ pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) { src, &*annotation, is_expanded, - parse.edition, - &sess.parse_sess.attr_id_generator, + psess.edition, + &sess.psess.attr_id_generator, ) }) } diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 485f0e7e46d..6d205a82675 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -1135,13 +1135,13 @@ pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> { expand::MacroExpander::new(self, true) } pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> { - rustc_parse::stream_to_parser(&self.sess.parse_sess, stream, MACRO_ARGUMENTS) + rustc_parse::stream_to_parser(&self.sess.psess, stream, MACRO_ARGUMENTS) } pub fn source_map(&self) -> &'a SourceMap { - self.sess.parse_sess.source_map() + self.sess.psess.source_map() } - pub fn parse_sess(&self) -> &'a ParseSess { - &self.sess.parse_sess + pub fn psess(&self) -> &'a ParseSess { + &self.sess.psess } pub fn call_site(&self) -> Span { self.current_expansion.id.expn_data().call_site @@ -1216,26 +1216,22 @@ pub fn check_unused_macros(&mut self) { /// Resolves a `path` mentioned inside Rust code, returning an absolute path. /// /// This unifies the logic used for resolving `include_X!`. -pub fn resolve_path( - parse_sess: &Session, - path: impl Into, - span: Span, -) -> PResult<'_, PathBuf> { +pub fn resolve_path(sess: &Session, path: impl Into, span: Span) -> PResult<'_, PathBuf> { let path = path.into(); // Relative paths are resolved relative to the file in which they are found // after macro expansion (that is, they are unhygienic). if !path.is_absolute() { let callsite = span.source_callsite(); - let mut result = match parse_sess.source_map().span_to_filename(callsite) { + let mut result = match sess.source_map().span_to_filename(callsite) { FileName::Real(name) => name .into_local_path() .expect("attempting to resolve a file path in an external file"), FileName::DocTest(path, _) => path, other => { - return Err(parse_sess.dcx().create_err(errors::ResolveRelativePath { + return Err(sess.dcx().create_err(errors::ResolveRelativePath { span, - path: parse_sess.source_map().filename_for_diagnostics(&other).to_string(), + path: sess.source_map().filename_for_diagnostics(&other).to_string(), })); } }; @@ -1281,7 +1277,7 @@ pub fn expr_to_spanned_string<'a>( Ok((err, true)) } Ok(ast::LitKind::Err(guar)) => Err(guar), - Err(err) => Err(report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span)), + Err(err) => Err(report_lit_error(&cx.sess.psess, err, token_lit, expr.span)), _ => Ok((cx.dcx().struct_span_err(expr.span, err_msg), false)), }, ast::ExprKind::Err(guar) => Err(guar), @@ -1487,7 +1483,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) -> bool { }; if crate_matches { - sess.parse_sess.buffer_lint_with_diagnostic( + sess.psess.buffer_lint_with_diagnostic( PROC_MACRO_BACK_COMPAT, item.ident.span, ast::CRATE_NODE_ID, diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index 9ce2084c847..989b7b485c9 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -665,7 +665,7 @@ pub fn item_const( // Builds `#[name]`. pub fn attr_word(&self, name: Symbol, span: Span) -> ast::Attribute { - let g = &self.sess.parse_sess.attr_id_generator; + let g = &self.sess.psess.attr_id_generator; attr::mk_attr_word(g, ast::AttrStyle::Outer, name, span) } @@ -673,13 +673,13 @@ pub fn attr_word(&self, name: Symbol, span: Span) -> ast::Attribute { // // Note: `span` is used for both the identifier and the value. pub fn attr_name_value_str(&self, name: Symbol, val: Symbol, span: Span) -> ast::Attribute { - let g = &self.sess.parse_sess.attr_id_generator; + let g = &self.sess.psess.attr_id_generator; attr::mk_attr_name_value_str(g, ast::AttrStyle::Outer, name, val, span) } // Builds `#[outer(inner)]`. pub fn attr_nested_word(&self, outer: Symbol, inner: Symbol, span: Span) -> ast::Attribute { - let g = &self.sess.parse_sess.attr_id_generator; + let g = &self.sess.psess.attr_id_generator; attr::mk_attr_nested_word(g, ast::AttrStyle::Outer, outer, inner, span) } } diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 435135d1959..921fea14312 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -241,14 +241,14 @@ fn process_cfg_attr(&self, attr: &Attribute) -> Vec { /// the attribute is incorrect. pub(crate) fn expand_cfg_attr(&self, attr: &Attribute, recursive: bool) -> Vec { let Some((cfg_predicate, expanded_attrs)) = - rustc_parse::parse_cfg_attr(attr, &self.sess.parse_sess) + rustc_parse::parse_cfg_attr(attr, &self.sess.psess) else { return vec![]; }; // Lint on zero attributes in source. if expanded_attrs.is_empty() { - self.sess.parse_sess.buffer_lint( + self.sess.psess.buffer_lint( rustc_lint_defs::builtin::UNUSED_ATTRIBUTES, attr.span, ast::CRATE_NODE_ID, @@ -324,14 +324,14 @@ fn expand_cfg_attr_item( }; let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees))); let attr = attr::mk_attr_from_item( - &self.sess.parse_sess.attr_id_generator, + &self.sess.psess.attr_id_generator, item, tokens, attr.style, item_span, ); if attr.has_name(sym::crate_type) { - self.sess.parse_sess.buffer_lint( + self.sess.psess.buffer_lint( rustc_lint_defs::builtin::DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME, attr.span, ast::CRATE_NODE_ID, @@ -339,7 +339,7 @@ fn expand_cfg_attr_item( ); } if attr.has_name(sym::crate_name) { - self.sess.parse_sess.buffer_lint( + self.sess.psess.buffer_lint( rustc_lint_defs::builtin::DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME, attr.span, ast::CRATE_NODE_ID, @@ -355,7 +355,7 @@ fn in_cfg(&self, attrs: &[Attribute]) -> bool { } pub(crate) fn cfg_true(&self, attr: &Attribute) -> (bool, Option) { - let meta_item = match validate_attr::parse_meta(&self.sess.parse_sess, attr) { + let meta_item = match validate_attr::parse_meta(&self.sess.psess, attr) { Ok(meta_item) => meta_item, Err(err) => { err.emit(); diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 9bc7b4bdd1e..8a01704b766 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -691,10 +691,9 @@ fn expand_invoc( // fixed prior to stabilization // Fake tokens when we are invoking an inner attribute, and // we are invoking it on an out-of-line module or crate. - Annotatable::Crate(krate) => rustc_parse::fake_token_stream_for_crate( - &self.cx.sess.parse_sess, - krate, - ), + Annotatable::Crate(krate) => { + rustc_parse::fake_token_stream_for_crate(&self.cx.sess.psess, krate) + } Annotatable::Item(item_inner) if matches!(attr.style, AttrStyle::Inner) && matches!( @@ -705,10 +704,7 @@ fn expand_invoc( ) ) => { - rustc_parse::fake_token_stream_for_item( - &self.cx.sess.parse_sess, - item_inner, - ) + rustc_parse::fake_token_stream_for_item(&self.cx.sess.psess, item_inner) } _ => item.to_tokens(), }; @@ -728,7 +724,7 @@ fn expand_invoc( } } SyntaxExtensionKind::LegacyAttr(expander) => { - match validate_attr::parse_meta(&self.cx.sess.parse_sess, &attr) { + match validate_attr::parse_meta(&self.cx.sess.psess, &attr) { Ok(meta) => { let items = match expander.expand(self.cx, span, &meta, item, false) { ExpandResult::Ready(items) => items, @@ -962,8 +958,8 @@ pub fn ensure_complete_parse<'a>( // Avoid emitting backtrace info twice. let def_site_span = parser.token.span.with_ctxt(SyntaxContext::root()); - let semi_span = parser.sess.source_map().next_point(span); - let add_semicolon = match &parser.sess.source_map().span_to_snippet(semi_span) { + let semi_span = parser.psess.source_map().next_point(span); + let add_semicolon = match &parser.psess.source_map().span_to_snippet(semi_span) { Ok(snippet) if &snippet[..] != ";" && kind_name == "expression" => { Some(span.shrink_to_hi()) } @@ -1700,7 +1696,7 @@ fn check_attributes(&self, attrs: &[ast::Attribute], call: &ast::MacCall) { let mut span: Option = None; while let Some(attr) = attrs.next() { rustc_ast_passes::feature_gate::check_attribute(attr, self.cx.sess, features); - validate_attr::check_attr(&self.cx.sess.parse_sess, attr); + validate_attr::check_attr(&self.cx.sess.psess, attr); let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span }; span = Some(current_span); @@ -1710,7 +1706,7 @@ fn check_attributes(&self, attrs: &[ast::Attribute], call: &ast::MacCall) { } if attr.is_doc_comment() { - self.cx.sess.parse_sess.buffer_lint_with_diagnostic( + self.cx.sess.psess.buffer_lint_with_diagnostic( UNUSED_DOC_COMMENTS, current_span, self.cx.current_expansion.lint_node_id, @@ -1722,7 +1718,7 @@ fn check_attributes(&self, attrs: &[ast::Attribute], call: &ast::MacCall) { // `#[cfg]` and `#[cfg_attr]` are special - they are // eagerly evaluated. if attr_name != sym::cfg && attr_name != sym::cfg_attr { - self.cx.sess.parse_sess.buffer_lint_with_diagnostic( + self.cx.sess.psess.buffer_lint_with_diagnostic( UNUSED_ATTRIBUTES, attr.span, self.cx.current_expansion.lint_node_id, diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index 053063b690e..d8b8a0fa96a 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -24,12 +24,12 @@ pub(super) fn failed_to_match_macro<'cx>( arg: TokenStream, lhses: &[Vec], ) -> Box { - let sess = &cx.sess.parse_sess; + let psess = &cx.sess.psess; // An error occurred, try the expansion again, tracking the expansion closely for better diagnostics. let mut tracker = CollectTrackerAndEmitter::new(cx, sp); - let try_success_result = try_match_macro(sess, name, &arg, lhses, &mut tracker); + let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker); if try_success_result.is_ok() { // Nonterminal parser recovery might turn failed matches into successful ones, @@ -58,7 +58,7 @@ pub(super) fn failed_to_match_macro<'cx>( err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro"); } - annotate_doc_comment(cx.sess.dcx(), &mut err, sess.source_map(), span); + annotate_doc_comment(cx.sess.dcx(), &mut err, psess.source_map(), span); if let Some(span) = remaining_matcher.span() { err.span_note(span, format!("while trying to match {remaining_matcher}")); @@ -87,7 +87,7 @@ pub(super) fn failed_to_match_macro<'cx>( // Check whether there's a missing comma in this macro call, like `println!("{}" a);` if let Some((arg, comma_span)) = arg.add_comma() { for lhs in lhses { - let parser = parser_from_cx(sess, arg.clone(), Recovery::Allowed); + let parser = parser_from_cx(psess, arg.clone(), Recovery::Allowed); let mut tt_parser = TtParser::new(name); if let Success(_) = @@ -246,10 +246,10 @@ pub(super) fn emit_frag_parse_err( if e.span.is_dummy() { // Get around lack of span in error (#30128) e.replace_span_with(site_span, true); - if !parser.sess.source_map().is_imported(arm_span) { + if !parser.psess.source_map().is_imported(arm_span) { e.span_label(arm_span, "in this macro arm"); } - } else if parser.sess.source_map().is_imported(parser.token.span) { + } else if parser.psess.source_map().is_imported(parser.token.span) { e.span_label(site_span, "in this macro invocation"); } match kind { @@ -262,7 +262,7 @@ pub(super) fn emit_frag_parse_err( ); if parser.token == token::Semi { - if let Ok(snippet) = parser.sess.source_map().span_to_snippet(site_span) { + if let Ok(snippet) = parser.psess.source_map().span_to_snippet(site_span) { e.span_suggestion_verbose( site_span, "surround the macro invocation with `{}` to interpret the expansion as a statement", diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs index 8174cb03d33..19405dcfd6c 100644 --- a/compiler/rustc_expand/src/mbe/macro_check.rs +++ b/compiler/rustc_expand/src/mbe/macro_check.rs @@ -193,25 +193,25 @@ struct MacroState<'a> { /// Checks that meta-variables are used correctly in a macro definition. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `span` is used when no spans are available /// - `lhses` and `rhses` should have the same length and represent the macro definition pub(super) fn check_meta_variables( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, span: Span, lhses: &[TokenTree], rhses: &[TokenTree], ) -> Result<(), ErrorGuaranteed> { if lhses.len() != rhses.len() { - sess.dcx.span_bug(span, "length mismatch between LHSes and RHSes") + psess.dcx.span_bug(span, "length mismatch between LHSes and RHSes") } let mut guar = None; for (lhs, rhs) in iter::zip(lhses, rhses) { let mut binders = Binders::default(); - check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut guar); - check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut guar); + check_binders(psess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut guar); + check_occurrences(psess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut guar); } guar.map_or(Ok(()), Err) } @@ -220,7 +220,7 @@ pub(super) fn check_meta_variables( /// sets `valid` to false in case of errors. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `lhs` is checked as part of a LHS /// - `macros` is the stack of possible outer macros @@ -228,7 +228,7 @@ pub(super) fn check_meta_variables( /// - `ops` is the stack of Kleene operators from the LHS /// - `guar` is set in case of errors fn check_binders( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, lhs: &TokenTree, macros: &Stack<'_, MacroState<'_>>, @@ -244,7 +244,7 @@ fn check_binders( // MetaVar(fragment) and not as MetaVarDecl(y, fragment). TokenTree::MetaVar(span, name) => { if macros.is_empty() { - sess.dcx.span_bug(span, "unexpected MetaVar in lhs"); + psess.dcx.span_bug(span, "unexpected MetaVar in lhs"); } let name = MacroRulesNormalizedIdent::new(name); // There are 3 possibilities: @@ -252,13 +252,13 @@ fn check_binders( // 1. The meta-variable is already bound in the current LHS: This is an error. let mut span = MultiSpan::from_span(span); span.push_span_label(prev_info.span, "previous declaration"); - buffer_lint(sess, span, node_id, "duplicate matcher binding"); + buffer_lint(psess, span, node_id, "duplicate matcher binding"); } else if get_binder_info(macros, binders, name).is_none() { // 2. The meta-variable is free: This is a binder. binders.insert(name, BinderInfo { span, ops: ops.into() }); } else { // 3. The meta-variable is bound: This is an occurrence. - check_occurrences(sess, node_id, lhs, macros, binders, ops, guar); + check_occurrences(psess, node_id, lhs, macros, binders, ops, guar); } } // Similarly, this can only happen when checking a toplevel macro. @@ -267,7 +267,7 @@ fn check_binders( // FIXME: Report this as a hard error eventually and remove equivalent errors from // `parse_tt_inner` and `nameize`. Until then the error may be reported twice, once // as a hard error and then once as a buffered lint. - sess.buffer_lint( + psess.buffer_lint( MISSING_FRAGMENT_SPECIFIER, span, node_id, @@ -275,14 +275,15 @@ fn check_binders( ); } if !macros.is_empty() { - sess.dcx.span_bug(span, "unexpected MetaVarDecl in nested lhs"); + psess.dcx.span_bug(span, "unexpected MetaVarDecl in nested lhs"); } let name = MacroRulesNormalizedIdent::new(name); if let Some(prev_info) = get_binder_info(macros, binders, name) { // Duplicate binders at the top-level macro definition are errors. The lint is only // for nested macro definitions. *guar = Some( - sess.dcx + psess + .dcx .emit_err(errors::DuplicateMatcherBinding { span, prev: prev_info.span }), ); } else { @@ -293,13 +294,13 @@ fn check_binders( TokenTree::MetaVarExpr(..) => {} TokenTree::Delimited(.., ref del) => { for tt in &del.tts { - check_binders(sess, node_id, tt, macros, binders, ops, guar); + check_binders(psess, node_id, tt, macros, binders, ops, guar); } } TokenTree::Sequence(_, ref seq) => { let ops = ops.push(seq.kleene); for tt in &seq.tts { - check_binders(sess, node_id, tt, macros, binders, &ops, guar); + check_binders(psess, node_id, tt, macros, binders, &ops, guar); } } } @@ -323,7 +324,7 @@ fn get_binder_info<'a>( /// errors. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `rhs` is checked as part of a RHS /// - `macros` is the stack of possible outer macros @@ -331,7 +332,7 @@ fn get_binder_info<'a>( /// - `ops` is the stack of Kleene operators from the RHS /// - `guar` is set in case of errors fn check_occurrences( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, rhs: &TokenTree, macros: &Stack<'_, MacroState<'_>>, @@ -342,24 +343,24 @@ fn check_occurrences( match *rhs { TokenTree::Token(..) => {} TokenTree::MetaVarDecl(span, _name, _kind) => { - sess.dcx.span_bug(span, "unexpected MetaVarDecl in rhs") + psess.dcx.span_bug(span, "unexpected MetaVarDecl in rhs") } TokenTree::MetaVar(span, name) => { let name = MacroRulesNormalizedIdent::new(name); - check_ops_is_prefix(sess, node_id, macros, binders, ops, span, name); + check_ops_is_prefix(psess, node_id, macros, binders, ops, span, name); } TokenTree::MetaVarExpr(dl, ref mve) => { let Some(name) = mve.ident().map(MacroRulesNormalizedIdent::new) else { return; }; - check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name); + check_ops_is_prefix(psess, node_id, macros, binders, ops, dl.entire(), name); } TokenTree::Delimited(.., ref del) => { - check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, guar); + check_nested_occurrences(psess, node_id, &del.tts, macros, binders, ops, guar); } TokenTree::Sequence(_, ref seq) => { let ops = ops.push(seq.kleene); - check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, guar); + check_nested_occurrences(psess, node_id, &seq.tts, macros, binders, &ops, guar); } } } @@ -388,7 +389,7 @@ enum NestedMacroState { /// definitions, and sets `valid` to false in case of errors. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `tts` is checked as part of a RHS and may contain macro definitions /// - `macros` is the stack of possible outer macros @@ -396,7 +397,7 @@ enum NestedMacroState { /// - `ops` is the stack of Kleene operators from the RHS /// - `guar` is set in case of errors fn check_nested_occurrences( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, tts: &[TokenTree], macros: &Stack<'_, MacroState<'_>>, @@ -434,7 +435,7 @@ fn check_nested_occurrences( (NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => { state = NestedMacroState::MacroRulesNotName; // We check that the meta-variable is correctly used. - check_occurrences(sess, node_id, tt, macros, binders, ops, guar); + check_occurrences(psess, node_id, tt, macros, binders, ops, guar); } (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(.., del)) | (NestedMacroState::MacroName, TokenTree::Delimited(.., del)) @@ -443,11 +444,11 @@ fn check_nested_occurrences( let macro_rules = state == NestedMacroState::MacroRulesNotName; state = NestedMacroState::Empty; let rest = - check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, guar); + check_nested_macro(psess, node_id, macro_rules, &del.tts, &nested_macros, guar); // If we did not check the whole macro definition, then check the rest as if outside // the macro definition. check_nested_occurrences( - sess, + psess, node_id, &del.tts[rest..], macros, @@ -465,7 +466,7 @@ fn check_nested_occurrences( (NestedMacroState::Macro, &TokenTree::MetaVar(..)) => { state = NestedMacroState::MacroName; // We check that the meta-variable is correctly used. - check_occurrences(sess, node_id, tt, macros, binders, ops, guar); + check_occurrences(psess, node_id, tt, macros, binders, ops, guar); } (NestedMacroState::MacroName, TokenTree::Delimited(.., del)) if del.delim == Delimiter::Parenthesis => @@ -473,7 +474,7 @@ fn check_nested_occurrences( state = NestedMacroState::MacroNameParen; nested_binders = Binders::default(); check_binders( - sess, + psess, node_id, tt, &nested_macros, @@ -487,7 +488,7 @@ fn check_nested_occurrences( { state = NestedMacroState::Empty; check_occurrences( - sess, + psess, node_id, tt, &nested_macros, @@ -498,7 +499,7 @@ fn check_nested_occurrences( } (_, tt) => { state = NestedMacroState::Empty; - check_occurrences(sess, node_id, tt, macros, binders, ops, guar); + check_occurrences(psess, node_id, tt, macros, binders, ops, guar); } } } @@ -512,14 +513,14 @@ fn check_nested_occurrences( /// stopped checking because we detected we were not in a macro definition anymore. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `macro_rules` specifies whether the macro is `macro_rules` /// - `tts` is checked as a list of (LHS) => {RHS} /// - `macros` is the stack of outer macros /// - `guar` is set in case of errors fn check_nested_macro( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, macro_rules: bool, tts: &[TokenTree], @@ -541,8 +542,8 @@ fn check_nested_macro( let lhs = &tts[i]; let rhs = &tts[i + 2]; let mut binders = Binders::default(); - check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, guar); - check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, guar); + check_binders(psess, node_id, lhs, macros, &mut binders, &Stack::Empty, guar); + check_occurrences(psess, node_id, rhs, macros, &binders, &Stack::Empty, guar); // Since the last semicolon is optional for `macro_rules` macros and decl_macro are not terminated, // we increment our checked position by how many token trees we already checked (the 3 // above) before checking for the separator. @@ -559,7 +560,7 @@ fn check_nested_macro( /// Checks that a meta-variable occurrence is valid. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `macros` is the stack of possible outer macros /// - `binders` contains the binders of the associated LHS @@ -567,7 +568,7 @@ fn check_nested_macro( /// - `span` is the span of the meta-variable to check /// - `name` is the name of the meta-variable to check fn check_ops_is_prefix( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, macros: &Stack<'_, MacroState<'_>>, binders: &Binders, @@ -590,11 +591,11 @@ fn check_ops_is_prefix( for ops in acc.iter().rev() { occurrence_ops.extend_from_slice(ops); } - ops_is_prefix(sess, node_id, span, name, &binder.ops, &occurrence_ops); + ops_is_prefix(psess, node_id, span, name, &binder.ops, &occurrence_ops); return; } } - buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{name}`")); + buffer_lint(psess, span.into(), node_id, format!("unknown macro variable `{name}`")); } /// Returns whether `binder_ops` is a prefix of `occurrence_ops`. @@ -609,14 +610,14 @@ fn check_ops_is_prefix( /// It occurs under the Kleene stack ["*", "+"] and is bound under ["*"] only. /// /// Arguments: -/// - `sess` is used to emit diagnostics and lints +/// - `psess` is used to emit diagnostics and lints /// - `node_id` is used to emit lints /// - `span` is the span of the meta-variable being check /// - `name` is the name of the meta-variable being check /// - `binder_ops` is the stack of Kleene operators for the binder /// - `occurrence_ops` is the stack of Kleene operators for the occurrence fn ops_is_prefix( - sess: &ParseSess, + psess: &ParseSess, node_id: NodeId, span: Span, name: MacroRulesNormalizedIdent, @@ -628,7 +629,7 @@ fn ops_is_prefix( let mut span = MultiSpan::from_span(span); span.push_span_label(binder.span, "expected repetition"); let message = format!("variable '{name}' is still repeating at this depth"); - buffer_lint(sess, span, node_id, message); + buffer_lint(psess, span, node_id, message); return; } let occurrence = &occurrence_ops[i]; @@ -637,20 +638,20 @@ fn ops_is_prefix( span.push_span_label(binder.span, "expected repetition"); span.push_span_label(occurrence.span, "conflicting repetition"); let message = "meta-variable repeats with different Kleene operator"; - buffer_lint(sess, span, node_id, message); + buffer_lint(psess, span, node_id, message); return; } } } fn buffer_lint( - sess: &ParseSess, + psess: &ParseSess, span: MultiSpan, node_id: NodeId, message: impl Into, ) { // Macros loaded from other crates have dummy node ids. if node_id != DUMMY_NODE_ID { - sess.buffer_lint(META_VARIABLE_MISUSE, span, node_id, message); + psess.buffer_lint(META_VARIABLE_MISUSE, span, node_id, message); } } diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index c11d538048a..c8631d96810 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -78,7 +78,7 @@ pub(crate) fn make(mut self: Box>, kind: AstFragmentKind) -> // but `m!()` is allowed in expression positions (cf. issue #34706). if kind == AstFragmentKind::Expr && parser.token == token::Semi { if is_local { - parser.sess.buffer_lint_with_diagnostic( + parser.psess.buffer_lint_with_diagnostic( SEMICOLON_IN_EXPRESSIONS_FROM_MACROS, parser.token.span, lint_node_id, @@ -195,7 +195,7 @@ fn expand_macro<'cx>( lhses: &[Vec], rhses: &[mbe::TokenTree], ) -> Box { - let sess = &cx.sess.parse_sess; + let psess = &cx.sess.psess; // Macros defined in the current crate have a real node id, // whereas macros from an external crate have a dummy id. let is_local = node_id != DUMMY_NODE_ID; @@ -206,7 +206,7 @@ fn expand_macro<'cx>( } // Track nothing for the best performance. - let try_success_result = try_match_macro(sess, name, &arg, lhses, &mut NoopTracker); + let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut NoopTracker); match try_success_result { Ok((i, named_matches)) => { @@ -230,7 +230,7 @@ fn expand_macro<'cx>( trace_macros_note(&mut cx.expansions, sp, msg); } - let p = Parser::new(sess, tts, None); + let p = Parser::new(psess, tts, None); if is_local { cx.resolver.record_macro_rule_usage(node_id, i); @@ -272,9 +272,9 @@ pub(super) enum CanRetry { /// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful, /// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors /// correctly. -#[instrument(level = "debug", skip(sess, arg, lhses, track), fields(tracking = %T::description()))] +#[instrument(level = "debug", skip(psess, arg, lhses, track), fields(tracking = %T::description()))] pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( - sess: &ParseSess, + psess: &ParseSess, name: Ident, arg: &TokenStream, lhses: &'matcher [Vec], @@ -299,7 +299,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( // hacky, but speeds up the `html5ever` benchmark significantly. (Issue // 68836 suggests a more comprehensive but more complex change to deal with // this situation.) - let parser = parser_from_cx(sess, arg.clone(), T::recovery()); + let parser = parser_from_cx(psess, arg.clone(), T::recovery()); // Try each arm's matchers. let mut tt_parser = TtParser::new(name); for (i, lhs) in lhses.iter().enumerate() { @@ -309,7 +309,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( // This is used so that if a matcher is not `Success(..)`ful, // then the spans which became gated when parsing the unsuccessful matcher // are not recorded. On the first `Success(..)`ful matcher, the spans are merged. - let mut gated_spans_snapshot = mem::take(&mut *sess.gated_spans.spans.borrow_mut()); + let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut()); let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track); @@ -320,7 +320,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( debug!("Parsed arm successfully"); // The matcher was `Success(..)`ful. // Merge the gated spans from parsing the matcher with the preexisting ones. - sess.gated_spans.merge(gated_spans_snapshot); + psess.gated_spans.merge(gated_spans_snapshot); return Ok((i, named_matches)); } @@ -342,7 +342,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( // The matcher was not `Success(..)`ful. // Restore to the state before snapshotting and maybe try again. - mem::swap(&mut gated_spans_snapshot, &mut sess.gated_spans.spans.borrow_mut()); + mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut()); } Err(CanRetry::Yes) @@ -376,7 +376,7 @@ pub fn compile_declarative_macro( }; let dummy_syn_ext = |guar| (mk_syn_ext(Box::new(DummyExpander(guar))), Vec::new()); - let dcx = &sess.parse_sess.dcx; + let dcx = &sess.psess.dcx; let lhs_nm = Ident::new(sym::lhs, def.span); let rhs_nm = Ident::new(sym::rhs, def.span); let tt_spec = Some(NonterminalKind::TT); @@ -430,7 +430,7 @@ pub fn compile_declarative_macro( let create_parser = || { let body = macro_def.body.tokens.clone(); - Parser::new(&sess.parse_sess, body, rustc_parse::MACRO_ARGUMENTS) + Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS) }; let parser = create_parser(); @@ -533,7 +533,7 @@ pub fn compile_declarative_macro( } check_emission(macro_check::check_meta_variables( - &sess.parse_sess, + &sess.psess, def.id, def.span, &lhses, @@ -1149,7 +1149,7 @@ fn check_matcher_core<'tt>( name, Some(NonterminalKind::PatParam { inferred: false }), )); - sess.parse_sess.buffer_lint_with_diagnostic( + sess.psess.buffer_lint_with_diagnostic( RUST_2021_INCOMPATIBLE_OR_PATTERNS, span, ast::CRATE_NODE_ID, @@ -1182,7 +1182,7 @@ fn check_matcher_core<'tt>( err.span_label(sp, format!("not allowed after `{kind}` fragments")); if kind == NonterminalKind::PatWithOr - && sess.parse_sess.edition.at_least_rust_2021() + && sess.psess.edition.at_least_rust_2021() && next_token.is_token(&BinOp(token::BinOpToken::Or)) { let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl( @@ -1406,10 +1406,10 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String { } pub(super) fn parser_from_cx( - sess: &ParseSess, + psess: &ParseSess, mut tts: TokenStream, recovery: Recovery, ) -> Parser<'_> { tts.desugar_doc_comments(); - Parser::new(sess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery) + Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery) } diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs index 84f7dc4771a..81e1de5b095 100644 --- a/compiler/rustc_expand/src/mbe/metavar_expr.rs +++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs @@ -27,30 +27,30 @@ pub(crate) enum MetaVarExpr { impl MetaVarExpr { /// Attempt to parse a meta-variable expression from a token stream. - pub(crate) fn parse<'sess>( + pub(crate) fn parse<'psess>( input: &TokenStream, outer_span: Span, - sess: &'sess ParseSess, - ) -> PResult<'sess, MetaVarExpr> { + psess: &'psess ParseSess, + ) -> PResult<'psess, MetaVarExpr> { let mut tts = input.trees(); - let ident = parse_ident(&mut tts, sess, outer_span)?; + let ident = parse_ident(&mut tts, psess, outer_span)?; let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else { let msg = "meta-variable expression parameter must be wrapped in parentheses"; - return Err(sess.dcx.struct_span_err(ident.span, msg)); + return Err(psess.dcx.struct_span_err(ident.span, msg)); }; - check_trailing_token(&mut tts, sess)?; + check_trailing_token(&mut tts, psess)?; let mut iter = args.trees(); let rslt = match ident.as_str() { - "count" => parse_count(&mut iter, sess, ident.span)?, + "count" => parse_count(&mut iter, psess, ident.span)?, "ignore" => { - eat_dollar(&mut iter, sess, ident.span)?; - MetaVarExpr::Ignore(parse_ident(&mut iter, sess, ident.span)?) + eat_dollar(&mut iter, psess, ident.span)?; + MetaVarExpr::Ignore(parse_ident(&mut iter, psess, ident.span)?) } - "index" => MetaVarExpr::Index(parse_depth(&mut iter, sess, ident.span)?), - "length" => MetaVarExpr::Length(parse_depth(&mut iter, sess, ident.span)?), + "index" => MetaVarExpr::Index(parse_depth(&mut iter, psess, ident.span)?), + "length" => MetaVarExpr::Length(parse_depth(&mut iter, psess, ident.span)?), _ => { let err_msg = "unrecognized meta-variable expression"; - let mut err = sess.dcx.struct_span_err(ident.span, err_msg); + let mut err = psess.dcx.struct_span_err(ident.span, err_msg); err.span_suggestion( ident.span, "supported expressions are count, ignore, index and length", @@ -60,7 +60,7 @@ pub(crate) fn parse<'sess>( return Err(err); } }; - check_trailing_token(&mut iter, sess)?; + check_trailing_token(&mut iter, psess)?; Ok(rslt) } @@ -73,12 +73,12 @@ pub(crate) fn ident(&self) -> Option { } // Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}` -fn check_trailing_token<'sess>( +fn check_trailing_token<'psess>( iter: &mut RefTokenTreeCursor<'_>, - sess: &'sess ParseSess, -) -> PResult<'sess, ()> { + psess: &'psess ParseSess, +) -> PResult<'psess, ()> { if let Some(tt) = iter.next() { - let mut diag = sess + let mut diag = psess .dcx .struct_span_err(tt.span(), format!("unexpected token: {}", pprust::tt_to_string(tt))); diag.span_note(tt.span(), "meta-variable expression must not have trailing tokens"); @@ -89,21 +89,21 @@ fn check_trailing_token<'sess>( } /// Parse a meta-variable `count` expression: `count(ident[, depth])` -fn parse_count<'sess>( +fn parse_count<'psess>( iter: &mut RefTokenTreeCursor<'_>, - sess: &'sess ParseSess, + psess: &'psess ParseSess, span: Span, -) -> PResult<'sess, MetaVarExpr> { - eat_dollar(iter, sess, span)?; - let ident = parse_ident(iter, sess, span)?; +) -> PResult<'psess, MetaVarExpr> { + eat_dollar(iter, psess, span)?; + let ident = parse_ident(iter, psess, span)?; let depth = if try_eat_comma(iter) { if iter.look_ahead(0).is_none() { - return Err(sess.dcx.struct_span_err( + return Err(psess.dcx.struct_span_err( span, "`count` followed by a comma must have an associated index indicating its depth", )); } - parse_depth(iter, sess, span)? + parse_depth(iter, psess, span)? } else { 0 }; @@ -111,14 +111,14 @@ fn parse_count<'sess>( } /// Parses the depth used by index(depth) and length(depth). -fn parse_depth<'sess>( +fn parse_depth<'psess>( iter: &mut RefTokenTreeCursor<'_>, - sess: &'sess ParseSess, + psess: &'psess ParseSess, span: Span, -) -> PResult<'sess, usize> { +) -> PResult<'psess, usize> { let Some(tt) = iter.next() else { return Ok(0) }; let TokenTree::Token(token::Token { kind: token::TokenKind::Literal(lit), .. }, _) = tt else { - return Err(sess + return Err(psess .dcx .struct_span_err(span, "meta-variable expression depth must be a literal")); }; @@ -129,16 +129,16 @@ fn parse_depth<'sess>( Ok(n_usize) } else { let msg = "only unsuffixes integer literals are supported in meta-variable expressions"; - Err(sess.dcx.struct_span_err(span, msg)) + Err(psess.dcx.struct_span_err(span, msg)) } } /// Parses an generic ident -fn parse_ident<'sess>( +fn parse_ident<'psess>( iter: &mut RefTokenTreeCursor<'_>, - sess: &'sess ParseSess, + psess: &'psess ParseSess, span: Span, -) -> PResult<'sess, Ident> { +) -> PResult<'psess, Ident> { if let Some(tt) = iter.next() && let TokenTree::Token(token, _) = tt { @@ -147,7 +147,7 @@ fn parse_ident<'sess>( } let token_str = pprust::token_to_string(token); let mut err = - sess.dcx.struct_span_err(span, format!("expected identifier, found `{}`", &token_str)); + psess.dcx.struct_span_err(span, format!("expected identifier, found `{}`", &token_str)); err.span_suggestion( token.span, format!("try removing `{}`", &token_str), @@ -156,7 +156,7 @@ fn parse_ident<'sess>( ); return Err(err); } - Err(sess.dcx.struct_span_err(span, "expected identifier")) + Err(psess.dcx.struct_span_err(span, "expected identifier")) } /// Tries to move the iterator forward returning `true` if there is a comma. If not, then the @@ -170,17 +170,17 @@ fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool { } /// Expects that the next item is a dollar sign. -fn eat_dollar<'sess>( +fn eat_dollar<'psess>( iter: &mut RefTokenTreeCursor<'_>, - sess: &'sess ParseSess, + psess: &'psess ParseSess, span: Span, -) -> PResult<'sess, ()> { +) -> PResult<'psess, ()> { if let Some(TokenTree::Token(token::Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) { let _ = iter.next(); return Ok(()); } - Err(sess.dcx.struct_span_err( + Err(psess.dcx.struct_span_err( span, "meta-variables within meta-variable expressions must be referenced using a dollar sign", )) diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index ec1dd807d1a..5fd3716743b 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -175,8 +175,7 @@ fn parse_tree<'a>( // The delimiter is `{`. This indicates the beginning // of a meta-variable expression (e.g. `${count(ident)}`). // Try to parse the meta-variable expression. - match MetaVarExpr::parse(tts, delim_span.entire(), &sess.parse_sess) - { + match MetaVarExpr::parse(tts, delim_span.entire(), &sess.psess) { Err(err) => { err.emit(); // Returns early the same read `$` to avoid spanning diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs index 1282cf2c03a..8a68b39e496 100644 --- a/compiler/rustc_expand/src/module.rs +++ b/compiler/rustc_expand/src/module.rs @@ -66,7 +66,7 @@ pub(crate) fn parse_external_mod( } // Actually parse the external file as a module. - let mut parser = new_parser_from_file(&sess.parse_sess, &mp.file_path, Some(span)); + let mut parser = new_parser_from_file(&sess.psess, &mp.file_path, Some(span)); let (inner_attrs, items, inner_span) = parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?; attrs.extend(inner_attrs); @@ -157,7 +157,7 @@ fn mod_file_path<'a>( DirOwnership::Owned { relative } => relative, DirOwnership::UnownedViaBlock => None, }; - let result = default_submod_path(&sess.parse_sess, ident, relative, dir_path); + let result = default_submod_path(&sess.psess, ident, relative, dir_path); match dir_ownership { DirOwnership::Owned { .. } => result, DirOwnership::UnownedViaBlock => Err(ModError::ModInBlock(match result { @@ -185,11 +185,7 @@ fn mod_file_path_from_attr( // complexity). Usually bad forms are checked in AstValidator (via // `check_builtin_attribute`), but by the time that runs the macro // is expanded, and it doesn't give an error. - validate_attr::emit_fatal_malformed_builtin_attribute( - &sess.parse_sess, - first_path, - sym::path, - ); + validate_attr::emit_fatal_malformed_builtin_attribute(&sess.psess, first_path, sym::path); }; let path_str = path_sym.as_str(); @@ -207,7 +203,7 @@ fn mod_file_path_from_attr( /// Returns a path to a module. // Public for rustfmt usage. pub fn default_submod_path<'a>( - sess: &'a ParseSess, + psess: &'a ParseSess, ident: Ident, relative: Option, dir_path: &Path, @@ -229,8 +225,8 @@ pub fn default_submod_path<'a>( format!("{}{}{}mod.rs", relative_prefix, ident.name, path::MAIN_SEPARATOR); let default_path = dir_path.join(&default_path_str); let secondary_path = dir_path.join(&secondary_path_str); - let default_exists = sess.source_map().file_exists(&default_path); - let secondary_exists = sess.source_map().file_exists(&secondary_path); + let default_exists = psess.source_map().file_exists(&default_path); + let secondary_exists = psess.source_map().file_exists(&secondary_path); match (default_exists, secondary_exists) { (true, false) => Ok(ModulePathSuccess { diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index 0e07b41b43c..066afd7a41d 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -1,5 +1,6 @@ use crate::tests::{ - matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error, + matches_codepattern, psess, string_to_stream, with_error_checking_parse, + with_expected_parse_error, }; use ast::token::IdentIsRaw; @@ -14,19 +15,10 @@ use rustc_parse::parser::ForceCollect; use rustc_session::parse::ParseSess; use rustc_span::create_default_session_globals_then; -use rustc_span::source_map::FilePathMapping; use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::{BytePos, FileName, Pos, Span}; - use std::path::PathBuf; -fn sess() -> ParseSess { - ParseSess::new( - vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE], - FilePathMapping::empty(), - ) -} - /// Parses an item. /// /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` @@ -34,9 +26,9 @@ fn sess() -> ParseSess { fn parse_item_from_source_str( name: FileName, source: String, - sess: &ParseSess, + psess: &ParseSess, ) -> PResult<'_, Option>> { - new_parser_from_source_str(sess, name, source).parse_item(ForceCollect::No) + new_parser_from_source_str(psess, name, source).parse_item(ForceCollect::No) } // Produces a `rustc_span::span`. @@ -46,12 +38,12 @@ fn sp(a: u32, b: u32) -> Span { /// Parses a string, return an expression. fn string_to_expr(source_str: String) -> P { - with_error_checking_parse(source_str, &sess(), |p| p.parse_expr()) + with_error_checking_parse(source_str, &psess(), |p| p.parse_expr()) } /// Parses a string, returns an item. fn string_to_item(source_str: String) -> Option> { - with_error_checking_parse(source_str, &sess(), |p| p.parse_item(ForceCollect::No)) + with_error_checking_parse(source_str, &psess(), |p| p.parse_item(ForceCollect::No)) } #[test] @@ -287,24 +279,24 @@ fn wb() -> c_int { O_WRONLY as c_int } #[test] fn crlf_doc_comments() { create_default_session_globals_then(|| { - let sess = sess(); + let psess = psess(); let name_1 = FileName::Custom("crlf_source_1".to_string()); let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_1, source, &sess).unwrap().unwrap(); + let item = parse_item_from_source_str(name_1, source, &psess).unwrap().unwrap(); let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap(); assert_eq!(doc.as_str(), " doc comment"); let name_2 = FileName::Custom("crlf_source_2".to_string()); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_2, source, &sess).unwrap().unwrap(); + let item = parse_item_from_source_str(name_2, source, &psess).unwrap().unwrap(); let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::>(); let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")]; assert_eq!(&docs[..], b); let name_3 = FileName::Custom("clrf_source_3".to_string()); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap(); + let item = parse_item_from_source_str(name_3, source, &psess).unwrap().unwrap(); let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap(); assert_eq!(doc.as_str(), " doc comment\n * with CRLF "); }); @@ -315,24 +307,24 @@ fn ttdelim_span() { fn parse_expr_from_source_str( name: FileName, source: String, - sess: &ParseSess, + psess: &ParseSess, ) -> PResult<'_, P> { - new_parser_from_source_str(sess, name, source).parse_expr() + new_parser_from_source_str(psess, name, source).parse_expr() } create_default_session_globals_then(|| { - let sess = sess(); + let psess = psess(); let expr = parse_expr_from_source_str( PathBuf::from("foo").into(), "foo!( fn main() { body } )".to_string(), - &sess, + &psess, ) .unwrap(); let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") }; let span = mac.args.tokens.trees().last().unwrap().span(); - match sess.source_map().span_to_snippet(span) { + match psess.source_map().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), Err(_) => panic!("could not get snippet"), } @@ -348,7 +340,7 @@ fn out_of_line_mod() { let item = parse_item_from_source_str( PathBuf::from("foo").into(), "mod foo { struct S; mod this_does_not_exist; }".to_owned(), - &sess(), + &psess(), ) .unwrap() .unwrap(); diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 23caf2f193a..4b5c148cb55 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -162,7 +162,7 @@ fn expand( let error_count_before = ecx.dcx().err_count(); let mut parser = - rustc_parse::stream_to_parser(&ecx.sess.parse_sess, stream, Some("proc-macro derive")); + rustc_parse::stream_to_parser(&ecx.sess.psess, stream, Some("proc-macro derive")); let mut items = vec![]; loop { diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 6fe0d611363..efe35c252d8 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -354,7 +354,7 @@ fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self { )] } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { - rustc.sess().symbol_gallery.insert(sym, span); + rustc.psess().symbol_gallery.insert(sym, span); smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)] } TokenTree::Literal(self::Literal { @@ -429,8 +429,8 @@ pub fn new(ecx: &'a mut ExtCtxt<'b>) -> Self { } } - fn sess(&self) -> &ParseSess { - self.ecx.parse_sess() + fn psess(&self) -> &ParseSess { + self.ecx.psess() } } @@ -448,19 +448,19 @@ fn injected_env_var(&mut self, var: &str) -> Option { } fn track_env_var(&mut self, var: &str, value: Option<&str>) { - self.sess() + self.psess() .env_depinfo .borrow_mut() .insert((Symbol::intern(var), value.map(Symbol::intern))); } fn track_path(&mut self, path: &str) { - self.sess().file_depinfo.borrow_mut().insert(Symbol::intern(path)); + self.psess().file_depinfo.borrow_mut().insert(Symbol::intern(path)); } fn literal_from_str(&mut self, s: &str) -> Result, ()> { let name = FileName::proc_macro_source_code(s); - let mut parser = rustc_parse::new_parser_from_source_str(self.sess(), name, s.to_owned()); + let mut parser = rustc_parse::new_parser_from_source_str(self.psess(), name, s.to_owned()); let first_span = parser.token.span.data(); let minus_present = parser.eat(&token::BinOp(token::Minus)); @@ -514,7 +514,7 @@ fn literal_from_str(&mut self, s: &str) -> Result) { let message = rustc_errors::DiagnosticMessage::from(diagnostic.message); let mut diag: Diag<'_, ()> = - Diag::new(&self.sess().dcx, diagnostic.level.to_internal(), message); + Diag::new(&self.psess().dcx, diagnostic.level.to_internal(), message); diag.span(MultiSpan::from_spans(diagnostic.spans)); for child in diagnostic.children { diag.sub(child.level.to_internal(), child.message, MultiSpan::from_spans(child.spans)); @@ -532,7 +532,7 @@ fn from_str(&mut self, src: &str) -> Self::TokenStream { parse_stream_from_source_str( FileName::proc_macro_source_code(src), src.to_string(), - self.sess(), + self.psess(), Some(self.call_site), ) } @@ -545,7 +545,7 @@ fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result = try { let mut p = rustc_parse::stream_to_parser( - self.sess(), + self.psess(), stream.clone(), Some("proc_macro expand expr"), ); @@ -680,7 +680,7 @@ fn debug(&mut self, span: Self::Span) -> String { } fn source_file(&mut self, span: Self::Span) -> Self::SourceFile { - self.sess().source_map().lookup_char_pos(span.lo()).file + self.psess().source_map().lookup_char_pos(span.lo()).file } fn parent(&mut self, span: Self::Span) -> Option { @@ -692,7 +692,7 @@ fn source(&mut self, span: Self::Span) -> Self::Span { } fn byte_range(&mut self, span: Self::Span) -> Range { - let source_map = self.sess().source_map(); + let source_map = self.psess().source_map(); let relative_start_pos = source_map.lookup_byte_offset(span.lo()).pos; let relative_end_pos = source_map.lookup_byte_offset(span.hi()).pos; @@ -708,18 +708,18 @@ fn end(&mut self, span: Self::Span) -> Self::Span { } fn line(&mut self, span: Self::Span) -> usize { - let loc = self.sess().source_map().lookup_char_pos(span.lo()); + let loc = self.psess().source_map().lookup_char_pos(span.lo()); loc.line } fn column(&mut self, span: Self::Span) -> usize { - let loc = self.sess().source_map().lookup_char_pos(span.lo()); + let loc = self.psess().source_map().lookup_char_pos(span.lo()); loc.col.to_usize() + 1 } fn join(&mut self, first: Self::Span, second: Self::Span) -> Option { - let self_loc = self.sess().source_map().lookup_char_pos(first.lo()); - let other_loc = self.sess().source_map().lookup_char_pos(second.lo()); + let self_loc = self.psess().source_map().lookup_char_pos(first.lo()); + let other_loc = self.psess().source_map().lookup_char_pos(second.lo()); if self_loc.file.name != other_loc.file.name { return None; @@ -769,7 +769,7 @@ fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span { } fn source_text(&mut self, span: Self::Span) -> Option { - self.sess().source_map().span_to_snippet(span).ok() + self.psess().source_map().span_to_snippet(span).ok() } /// Saves the provided span into the metadata of @@ -797,7 +797,7 @@ fn source_text(&mut self, span: Self::Span) -> Option { /// since we've loaded `my_proc_macro` from disk in order to execute it). /// In this way, we have obtained a span pointing into `my_proc_macro` fn save_span(&mut self, span: Self::Span) -> usize { - self.sess().save_proc_macro_span(span) + self.psess().save_proc_macro_span(span) } fn recover_proc_macro_span(&mut self, id: usize) -> Self::Span { diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs index b242ce795fd..8c47b759453 100644 --- a/compiler/rustc_expand/src/tests.rs +++ b/compiler/rustc_expand/src/tests.rs @@ -18,9 +18,13 @@ use std::str; use std::sync::{Arc, Mutex}; +pub(crate) fn psess() -> ParseSess { + ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE]) +} + /// Map string to parser (via tts). -fn string_to_parser(ps: &ParseSess, source_str: String) -> Parser<'_> { - new_parser_from_source_str(ps, PathBuf::from("bogofile").into(), source_str) +fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> { + new_parser_from_source_str(psess, PathBuf::from("bogofile").into(), source_str) } fn create_test_handler() -> (DiagCtxt, Lrc, Arc>>) { @@ -40,13 +44,13 @@ fn create_test_handler() -> (DiagCtxt, Lrc, Arc>>) { /// Returns the result of parsing the given string via the given callback. /// /// If there are any errors, this will panic. -pub(crate) fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T +pub(crate) fn with_error_checking_parse<'a, T, F>(s: String, psess: &'a ParseSess, f: F) -> T where F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>, { - let mut p = string_to_parser(&ps, s); + let mut p = string_to_parser(&psess, s); let x = f(&mut p).unwrap(); - p.sess.dcx.abort_if_errors(); + p.psess.dcx.abort_if_errors(); x } @@ -57,8 +61,8 @@ pub(crate) fn with_expected_parse_error(source_str: &str, expected_output: F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>, { let (handler, source_map, output) = create_test_handler(); - let ps = ParseSess::with_dcx(handler, source_map); - let mut p = string_to_parser(&ps, source_str.to_string()); + let psess = ParseSess::with_dcx(handler, source_map); + let mut p = string_to_parser(&psess, source_str.to_string()); let result = f(&mut p); assert!(result.is_ok()); @@ -72,24 +76,18 @@ pub(crate) fn with_expected_parse_error(source_str: &str, expected_output: /// Maps a string to tts, using a made-up filename. pub(crate) fn string_to_stream(source_str: String) -> TokenStream { - let ps = ParseSess::new( - vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE], - FilePathMapping::empty(), - ); + let psess = psess(); source_file_to_stream( - &ps, - ps.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str), + &psess, + psess.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str), None, ) } /// Parses a string, returns a crate. pub(crate) fn string_to_crate(source_str: String) -> ast::Crate { - let ps = ParseSess::new( - vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE], - FilePathMapping::empty(), - ); - with_error_checking_parse(source_str, &ps, |p| p.parse_crate_mod()) + let psess = psess(); + with_error_checking_parse(source_str, &psess, |p| p.parse_crate_mod()) } /// Does the given string match the pattern? whitespace in the first string diff --git a/compiler/rustc_hir_analysis/src/check/errs.rs b/compiler/rustc_hir_analysis/src/check/errs.rs index c92320bc0fe..3d32fdd89c8 100644 --- a/compiler/rustc_hir_analysis/src/check/errs.rs +++ b/compiler/rustc_hir_analysis/src/check/errs.rs @@ -70,7 +70,7 @@ fn handle_static_mut_ref( } else { (errors::StaticMutRefSugg::Shared { span, var }, "shared") }; - tcx.sess.parse_sess.dcx.emit_err(errors::StaticMutRef { span, sugg, shared }); + tcx.sess.psess.dcx.emit_err(errors::StaticMutRef { span, sugg, shared }); return; } diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index b8cceebe103..fcb490bcfec 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -390,7 +390,7 @@ fn check_expr_unary( ); let sp = tcx.sess.source_map().start_point(expr.span).with_parent(None); if let Some(sp) = - tcx.sess.parse_sess.ambiguous_block_expr_parse.borrow().get(&sp) + tcx.sess.psess.ambiguous_block_expr_parse.borrow().get(&sp) { err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp)); } diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 809102557ac..5a1c7b05611 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -1117,7 +1117,7 @@ pub(in super::super) fn suggest_missing_parentheses( expr: &hir::Expr<'_>, ) -> bool { let sp = self.tcx.sess.source_map().start_point(expr.span).with_parent(None); - if let Some(sp) = self.tcx.sess.parse_sess.ambiguous_block_expr_parse.borrow().get(&sp) { + if let Some(sp) = self.tcx.sess.psess.ambiguous_block_expr_parse.borrow().get(&sp) { // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }` err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp)); true diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs index 9c9507d5852..79f574aa7fd 100644 --- a/compiler/rustc_hir_typeck/src/op.rs +++ b/compiler/rustc_hir_typeck/src/op.rs @@ -818,7 +818,7 @@ pub fn check_user_unop( let sp = self.tcx.sess.source_map().start_point(ex.span).with_parent(None); if let Some(sp) = - self.tcx.sess.parse_sess.ambiguous_block_expr_parse.borrow().get(&sp) + self.tcx.sess.psess.ambiguous_block_expr_parse.borrow().get(&sp) { // If the previous expression was a block expression, suggest parentheses // (turning this into a binary subtraction operation instead.) diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs index 8311a735133..24512dea939 100644 --- a/compiler/rustc_incremental/src/persist/dirty_clean.rs +++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs @@ -395,7 +395,7 @@ fn check_item(&mut self, item_id: LocalDefId) { /// a cfg flag called `foo`. fn check_config(tcx: TyCtxt<'_>, attr: &Attribute) -> bool { debug!("check_config(attr={:?})", attr); - let config = &tcx.sess.parse_sess.config; + let config = &tcx.sess.psess.config; debug!("check_config: config={:?}", config); let mut cfg = None; for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) { diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index cd7957c3bce..3a2bbed19be 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -45,7 +45,7 @@ pub struct Compiler { pub(crate) fn parse_cfg(dcx: &DiagCtxt, cfgs: Vec) -> Cfg { cfgs.into_iter() .map(|s| { - let sess = ParseSess::with_silent_emitter(format!( + let psess = ParseSess::with_silent_emitter(format!( "this error occurred on the command line: `--cfg={s}`" )); let filename = FileName::cfg_spec_source_code(&s); @@ -61,7 +61,7 @@ macro_rules! error { }; } - match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) { + match maybe_new_parser_from_source_str(&psess, filename, s.to_string()) { Ok(mut parser) => match parser.parse_meta_item() { Ok(meta_item) if parser.token == token::Eof => { if meta_item.path.segments.len() != 1 { @@ -107,7 +107,7 @@ pub(crate) fn parse_check_cfg(dcx: &DiagCtxt, specs: Vec) -> CheckCfg { let mut check_cfg = CheckCfg { exhaustive_names, exhaustive_values, ..CheckCfg::default() }; for s in specs { - let sess = ParseSess::with_silent_emitter(format!( + let psess = ParseSess::with_silent_emitter(format!( "this error occurred on the command line: `--check-cfg={s}`" )); let filename = FileName::cfg_spec_source_code(&s); @@ -127,7 +127,7 @@ macro_rules! error { error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`") }; - let mut parser = match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) { + let mut parser = match maybe_new_parser_from_source_str(&psess, filename, s.to_string()) { Ok(parser) => parser, Err(errs) => { errs.into_iter().for_each(|err| err.cancel()); @@ -277,7 +277,7 @@ pub struct Config { pub lint_caps: FxHashMap, /// This is a callback from the driver that is called when [`ParseSess`] is created. - pub parse_sess_created: Option>, + pub psess_created: Option>, /// This is a callback to hash otherwise untracked state used by the caller, if the /// hash changes between runs the incremental cache will be cleared. @@ -393,14 +393,14 @@ pub fn run_compiler(config: Config, f: impl FnOnce(&Compiler) -> R + Se let cfg = parse_cfg(&sess.dcx(), config.crate_cfg); let mut cfg = config::build_configuration(&sess, cfg); util::add_configuration(&mut cfg, &mut sess, &*codegen_backend); - sess.parse_sess.config = cfg; + sess.psess.config = cfg; let mut check_cfg = parse_check_cfg(&sess.dcx(), config.crate_check_cfg); check_cfg.fill_well_known(&sess.target); - sess.parse_sess.check_config = check_cfg; + sess.psess.check_config = check_cfg; - if let Some(parse_sess_created) = config.parse_sess_created { - parse_sess_created(&mut sess.parse_sess); + if let Some(psess_created) = config.psess_created { + psess_created(&mut sess.psess); } if let Some(hash_untracked_state) = config.hash_untracked_state { @@ -422,7 +422,7 @@ pub fn run_compiler(config: Config, f: impl FnOnce(&Compiler) -> R + Se let compiler = Compiler { sess, codegen_backend, override_queries: config.override_queries }; - rustc_span::set_source_map(compiler.sess.parse_sess.clone_source_map(), move || { + rustc_span::set_source_map(compiler.sess.psess.clone_source_map(), move || { // There are two paths out of `f`. // - Normal exit. // - Panic, e.g. triggered by `abort_if_errors`. diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 66140168759..085e9026051 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -44,9 +44,9 @@ pub fn parse<'a>(sess: &'a Session) -> PResult<'a, ast::Crate> { let krate = sess.time("parse_crate", || match &sess.io.input { - Input::File(file) => parse_crate_from_file(file, &sess.parse_sess), + Input::File(file) => parse_crate_from_file(file, &sess.psess), Input::Str { input, name } => { - parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess) + parse_crate_from_source_str(name.clone(), input.clone(), &sess.psess) } })?; @@ -205,7 +205,7 @@ fn configure_and_expand( // The rest is error reporting - sess.parse_sess.buffered_lints.with_lock(|buffered_lints: &mut Vec| { + sess.psess.buffered_lints.with_lock(|buffered_lints: &mut Vec| { buffered_lints.append(&mut ecx.buffered_early_lint); }); @@ -297,7 +297,7 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) { }); // Add all buffered lints from the `ParseSess` to the `Session`. - sess.parse_sess.buffered_lints.with_lock(|buffered_lints| { + sess.psess.buffered_lints.with_lock(|buffered_lints| { info!("{} parse sess buffered_lints", buffered_lints.len()); for early_lint in buffered_lints.drain(..) { lint_buffer.add_early_lint(early_lint); @@ -305,7 +305,7 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) { }); // Gate identifiers containing invalid Unicode codepoints that were recovered during lexing. - sess.parse_sess.bad_unicode_identifiers.with_lock(|identifiers| { + sess.psess.bad_unicode_identifiers.with_lock(|identifiers| { for (ident, mut spans) in identifiers.drain(..) { spans.sort(); if ident == sym::ferris { @@ -422,7 +422,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P // Account for explicitly marked-to-track files // (e.g. accessed in proc macros). - let file_depinfo = sess.parse_sess.file_depinfo.borrow(); + let file_depinfo = sess.psess.file_depinfo.borrow(); let normalize_path = |path: PathBuf| { let file = FileName::from(path); @@ -485,7 +485,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P } // Emit special comments with information about accessed environment variables. - let env_depinfo = sess.parse_sess.env_depinfo.borrow(); + let env_depinfo = sess.psess.env_depinfo.borrow(); if !env_depinfo.is_empty() { // We will soon sort, so the initial order does not matter. #[allow(rustc::potential_query_instability)] @@ -956,7 +956,7 @@ fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit // `check_builtin_attribute`), but by the time that runs the macro // is expanded, and it doesn't give an error. validate_attr::emit_fatal_malformed_builtin_attribute( - &sess.parse_sess, + &sess.psess, attr, sym::recursion_limit, ); diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs index 86858bfe41d..7cdf7cd25b1 100644 --- a/compiler/rustc_interface/src/queries.rs +++ b/compiler/rustc_interface/src/queries.rs @@ -120,7 +120,7 @@ pub fn global_ctxt(&'tcx self) -> Result> rustc_builtin_macros::cmdline_attrs::inject( &mut krate, - &sess.parse_sess, + &sess.psess, &sess.opts.unstable_opts.crate_attr, ); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 823614e1f06..7572805f661 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -369,7 +369,7 @@ pub(crate) fn check_attr_crate_type( // by the time that runs the macro is expanded, and it doesn't // give an error. validate_attr::emit_fatal_malformed_builtin_attribute( - &sess.parse_sess, + &sess.psess, a, sym::crate_type, ); diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 9f73d2e6812..8d61d297839 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1868,7 +1868,7 @@ fn check_ident_token( }; // Don't lint `r#foo`. - if cx.sess().parse_sess.raw_identifier_spans.contains(ident.span) { + if cx.sess().psess.raw_identifier_spans.contains(ident.span) { return; } diff --git a/compiler/rustc_lint/src/context/diagnostics.rs b/compiler/rustc_lint/src/context/diagnostics.rs index 728996f0741..ad1808b5f80 100644 --- a/compiler/rustc_lint/src/context/diagnostics.rs +++ b/compiler/rustc_lint/src/context/diagnostics.rs @@ -186,12 +186,12 @@ pub(super) fn builtin(sess: &Session, diagnostic: BuiltinLintDiagnostics, diag: BuiltinLintDiagnostics::UnexpectedCfgName((name, name_span), value) => { #[allow(rustc::potential_query_instability)] let possibilities: Vec = - sess.parse_sess.check_config.expecteds.keys().copied().collect(); + sess.psess.check_config.expecteds.keys().copied().collect(); let mut names_possibilities: Vec<_> = if value.is_none() { // We later sort and display all the possibilities, so the order here does not matter. #[allow(rustc::potential_query_instability)] - sess.parse_sess + sess.psess .check_config .expecteds .iter() @@ -212,7 +212,7 @@ pub(super) fn builtin(sess: &Session, diagnostic: BuiltinLintDiagnostics, diag: // Suggest the most probable if we found one } else if let Some(best_match) = find_best_match_for_name(&possibilities, name, None) { if let Some(ExpectedValues::Some(best_match_values)) = - sess.parse_sess.check_config.expecteds.get(&best_match) + sess.psess.check_config.expecteds.get(&best_match) { // We will soon sort, so the initial order does not matter. #[allow(rustc::potential_query_instability)] @@ -322,8 +322,7 @@ pub(super) fn builtin(sess: &Session, diagnostic: BuiltinLintDiagnostics, diag: } } BuiltinLintDiagnostics::UnexpectedCfgValue((name, name_span), value) => { - let Some(ExpectedValues::Some(values)) = - &sess.parse_sess.check_config.expecteds.get(&name) + let Some(ExpectedValues::Some(values)) = &sess.psess.check_config.expecteds.get(&name) else { bug!( "it shouldn't be possible to have a diagnostic on a value whose name is not in values" @@ -398,8 +397,7 @@ pub(super) fn builtin(sess: &Session, diagnostic: BuiltinLintDiagnostics, diag: // We don't want to suggest adding values to well known names // since those are defined by rustc it-self. Users can still // do it if they want, but should not encourage them. - let is_cfg_a_well_know_name = - sess.parse_sess.check_config.well_known_names.contains(&name); + let is_cfg_a_well_know_name = sess.psess.check_config.well_known_names.contains(&name); let inst = if let Some((value, _value_span)) = value { let pre = if is_from_cargo { "\\" } else { "" }; diff --git a/compiler/rustc_lint/src/non_ascii_idents.rs b/compiler/rustc_lint/src/non_ascii_idents.rs index 5e66ade0357..79bc78ae55a 100644 --- a/compiler/rustc_lint/src/non_ascii_idents.rs +++ b/compiler/rustc_lint/src/non_ascii_idents.rs @@ -172,7 +172,7 @@ fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) { } let mut has_non_ascii_idents = false; - let symbols = cx.sess().parse_sess.symbol_gallery.symbols.lock(); + let symbols = cx.sess().psess.symbol_gallery.symbols.lock(); // Sort by `Span` so that error messages make sense with respect to the // order of identifier locations in the code. diff --git a/compiler/rustc_lint/src/non_fmt_panic.rs b/compiler/rustc_lint/src/non_fmt_panic.rs index ebef77f6879..a2d07fff506 100644 --- a/compiler/rustc_lint/src/non_fmt_panic.rs +++ b/compiler/rustc_lint/src/non_fmt_panic.rs @@ -231,7 +231,7 @@ fn check_panic_str<'tcx>( let fmt_span = arg.span.source_callsite(); - let (snippet, style) = match cx.sess().parse_sess.source_map().span_to_snippet(fmt_span) { + let (snippet, style) = match cx.sess().psess.source_map().span_to_snippet(fmt_span) { Ok(snippet) => { // Count the number of `#`s between the `r` and `"`. let style = snippet.strip_prefix('r').and_then(|s| s.find('"')); @@ -282,7 +282,7 @@ fn check_panic_str<'tcx>( /// Given the span of `some_macro!(args);`, gives the span of `(` and `)`, /// and the type of (opening) delimiter used. fn find_delimiters(cx: &LateContext<'_>, span: Span) -> Option<(Span, Span, char)> { - let snippet = cx.sess().parse_sess.source_map().span_to_snippet(span).ok()?; + let snippet = cx.sess().psess.source_map().span_to_snippet(span).ok()?; let (open, open_ch) = snippet.char_indices().find(|&(_, c)| "([{".contains(c))?; let close = snippet.rfind(|c| ")]}".contains(c))?; Some(( diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs index f65fe1a29c7..8b48570fbba 100644 --- a/compiler/rustc_metadata/src/creader.rs +++ b/compiler/rustc_metadata/src/creader.rs @@ -971,7 +971,7 @@ fn report_unused_deps(&mut self, krate: &ast::Crate) { continue; } - self.sess.parse_sess.buffer_lint( + self.sess.psess.buffer_lint( lint::builtin::UNUSED_CRATE_DEPENDENCIES, span, ast::CRATE_NODE_ID, diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index e7c80639a0d..09cb6b9fa0a 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -420,7 +420,7 @@ fn decode(d: &mut DecodeContext<'a, 'tcx>) -> ExpnIndex { impl<'a, 'tcx> SpanDecoder for DecodeContext<'a, 'tcx> { fn decode_attr_id(&mut self) -> rustc_span::AttrId { let sess = self.sess.expect("can't decode AttrId without Session"); - sess.parse_sess.attr_id_generator.mk_attr_id() + sess.psess.attr_id_generator.mk_attr_id() } fn decode_crate_num(&mut self) -> CrateNum { diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index fdb2b4f2024..7d3f84060f6 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1789,7 +1789,7 @@ fn encode_proc_macros(&mut self) -> Option { let stability = tcx.lookup_stability(CRATE_DEF_ID); let macros = self.lazy_array(tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index)); - for (i, span) in self.tcx.sess.parse_sess.proc_macro_quoted_spans() { + for (i, span) in self.tcx.sess.psess.proc_macro_quoted_spans() { let span = self.lazy(span); self.tables.proc_macro_quoted_spans.set_some(i, span); } diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index 31f986403ab..1b09f9fdc49 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -269,7 +269,7 @@ fn suggestion_for_allocator_api( if feature == sym::allocator_api { if let Some(trait_) = tcx.opt_parent(def_id) { if tcx.is_diagnostic_item(sym::Vec, trait_) { - let sm = tcx.sess.parse_sess.source_map(); + let sm = tcx.sess.psess.source_map(); let inner_types = sm.span_extend_to_prev_char(span, '<', true); if let Ok(snippet) = sm.span_to_snippet(inner_types) { return Some(( diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 4cfd061ebd9..ca998ccc356 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -42,12 +42,12 @@ pub struct UnmatchedDelim { pub candidate_span: Option, } -pub(crate) fn parse_token_trees<'sess, 'src>( - sess: &'sess ParseSess, +pub(crate) fn parse_token_trees<'psess, 'src>( + psess: &'psess ParseSess, mut src: &'src str, mut start_pos: BytePos, override_span: Option, -) -> Result>> { +) -> Result>> { // Skip `#!`, if present. if let Some(shebang_len) = rustc_lexer::strip_shebang(src) { src = &src[shebang_len..]; @@ -56,7 +56,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>( let cursor = Cursor::new(src); let string_reader = StringReader { - sess, + psess, start_pos, pos: start_pos, src, @@ -75,7 +75,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>( let mut buffer = Vec::with_capacity(1); for unmatched in unmatched_delims { - if let Some(err) = make_unclosed_delims_error(unmatched, sess) { + if let Some(err) = make_unclosed_delims_error(unmatched, psess) { buffer.push(err); } } @@ -90,8 +90,8 @@ pub(crate) fn parse_token_trees<'sess, 'src>( } } -struct StringReader<'sess, 'src> { - sess: &'sess ParseSess, +struct StringReader<'psess, 'src> { + psess: &'psess ParseSess, /// Initial position, read-only. start_pos: BytePos, /// The absolute offset within the source_map of the current character. @@ -107,9 +107,9 @@ struct StringReader<'sess, 'src> { nbsp_is_whitespace: bool, } -impl<'sess, 'src> StringReader<'sess, 'src> { - pub fn dcx(&self) -> &'sess DiagCtxt { - &self.sess.dcx +impl<'psess, 'src> StringReader<'psess, 'src> { + pub fn dcx(&self) -> &'psess DiagCtxt { + &self.psess.dcx } fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { @@ -176,11 +176,11 @@ fn next_token(&mut self) -> (Token, bool) { rustc_lexer::TokenKind::RawIdent => { let sym = nfc_normalize(self.str_from(start + BytePos(2))); let span = self.mk_sp(start, self.pos); - self.sess.symbol_gallery.insert(sym, span); + self.psess.symbol_gallery.insert(sym, span); if !sym.can_be_raw() { self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym }); } - self.sess.raw_identifier_spans.push(span); + self.psess.raw_identifier_spans.push(span); token::Ident(sym, IdentIsRaw::Yes) } rustc_lexer::TokenKind::UnknownPrefix => { @@ -199,7 +199,7 @@ fn next_token(&mut self) -> (Token, bool) { { let sym = nfc_normalize(self.str_from(start)); let span = self.mk_sp(start, self.pos); - self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default() + self.psess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default() .push(span); token::Ident(sym, IdentIsRaw::No) } @@ -230,7 +230,7 @@ fn next_token(&mut self) -> (Token, bool) { let suffix = if suffix_start < self.pos { let string = self.str_from(suffix_start); if string == "_" { - self.sess + self.psess .dcx .emit_err(errors::UnderscoreLiteralSuffix { span: self.mk_sp(suffix_start, self.pos) }); None @@ -338,7 +338,7 @@ fn next_token(&mut self) -> (Token, bool) { fn ident(&self, start: BytePos) -> TokenKind { let sym = nfc_normalize(self.str_from(start)); let span = self.mk_sp(start, self.pos); - self.sess.symbol_gallery.insert(sym, span); + self.psess.symbol_gallery.insert(sym, span); token::Ident(sym, IdentIsRaw::No) } @@ -350,7 +350,7 @@ fn lint_unicode_text_flow(&self, start: BytePos) { let content = self.str_from(content_start); if contains_text_flow_control_chars(content) { let span = self.mk_sp(start, self.pos); - self.sess.buffer_lint_with_diagnostic( + self.psess.buffer_lint_with_diagnostic( TEXT_DIRECTION_CODEPOINT_IN_COMMENT, span, ast::CRATE_NODE_ID, @@ -566,7 +566,7 @@ fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! { } fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! { - self.sess + self.psess .dcx .struct_span_fatal( self.mk_sp(start, self.pos), @@ -680,7 +680,7 @@ fn report_unknown_prefix(&self, start: BytePos) { self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg }); } else { // Before Rust 2021, only emit a lint for migration. - self.sess.buffer_lint_with_diagnostic( + self.psess.buffer_lint_with_diagnostic( RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, prefix_span, ast::CRATE_NODE_ID, diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index c9ff2d58e2c..a506f98bf3a 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -8,18 +8,18 @@ use rustc_errors::{Applicability, PErr}; use rustc_span::symbol::kw; -pub(super) struct TokenTreesReader<'sess, 'src> { - string_reader: StringReader<'sess, 'src>, +pub(super) struct TokenTreesReader<'psess, 'src> { + string_reader: StringReader<'psess, 'src>, /// The "next" token, which has been obtained from the `StringReader` but /// not yet handled by the `TokenTreesReader`. token: Token, diag_info: TokenTreeDiagInfo, } -impl<'sess, 'src> TokenTreesReader<'sess, 'src> { +impl<'psess, 'src> TokenTreesReader<'psess, 'src> { pub(super) fn parse_all_token_trees( - string_reader: StringReader<'sess, 'src>, - ) -> (TokenStream, Result<(), Vec>>, Vec) { + string_reader: StringReader<'psess, 'src>, + ) -> (TokenStream, Result<(), Vec>>, Vec) { let mut tt_reader = TokenTreesReader { string_reader, token: Token::dummy(), @@ -35,7 +35,7 @@ pub(super) fn parse_all_token_trees( fn parse_token_trees( &mut self, is_delimited: bool, - ) -> (Spacing, TokenStream, Result<(), Vec>>) { + ) -> (Spacing, TokenStream, Result<(), Vec>>) { // Move past the opening delimiter. let (_, open_spacing) = self.bump(false); @@ -71,9 +71,9 @@ fn parse_token_trees( } } - fn eof_err(&mut self) -> PErr<'sess> { + fn eof_err(&mut self) -> PErr<'psess> { let msg = "this file contains an unclosed delimiter"; - let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg); + let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg); for &(_, sp) in &self.diag_info.open_braces { err.span_label(sp, "unclosed delimiter"); self.diag_info.unmatched_delims.push(UnmatchedDelim { @@ -89,7 +89,7 @@ fn eof_err(&mut self) -> PErr<'sess> { report_suspicious_mismatch_block( &mut err, &self.diag_info, - self.string_reader.sess.source_map(), + self.string_reader.psess.source_map(), *delim, ) } @@ -99,7 +99,7 @@ fn eof_err(&mut self) -> PErr<'sess> { fn parse_token_tree_open_delim( &mut self, open_delim: Delimiter, - ) -> Result>> { + ) -> Result>> { // The span for beginning of the delimited section let pre_span = self.token.span; @@ -115,7 +115,7 @@ fn parse_token_tree_open_delim( // Expand to cover the entire delimited token tree let delim_span = DelimSpan::from_pair(pre_span, self.token.span); - let sm = self.string_reader.sess.source_map(); + let sm = self.string_reader.psess.source_map(); let close_spacing = match self.token.kind { // Correct delimiter. @@ -232,11 +232,11 @@ fn bump(&mut self, glue: bool) -> (Token, Spacing) { fn unclosed_delim_err( &mut self, tts: TokenStream, - mut errs: Vec>, - ) -> Vec> { + mut errs: Vec>, + ) -> Vec> { // If there are unclosed delims, see if there are diff markers and if so, point them // out instead of complaining about the unclosed delims. - let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None); + let mut parser = crate::stream_to_parser(self.string_reader.psess, tts, None); let mut diff_errs = vec![]; // Suggest removing a `{` we think appears in an `if`/`while` condition // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but @@ -289,17 +289,17 @@ fn unclosed_delim_err( return errs; } - fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> { + fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'psess> { // An unexpected closing delimiter (i.e., there is no // matching opening delimiter). let token_str = token_to_string(&self.token); let msg = format!("unexpected closing delimiter: `{token_str}`"); - let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg); + let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg); report_suspicious_mismatch_block( &mut err, &self.diag_info, - self.string_reader.sess.source_map(), + self.string_reader.psess.source_map(), delim, ); err.span_label(self.token.span, "unexpected closing delimiter"); diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 3b4e05332fa..6b055fc844a 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -350,7 +350,7 @@ pub(super) fn check_for_substitution( let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else { let msg = format!("substitution character not found for '{ch}'"); - reader.sess.dcx.span_bug(span, msg); + reader.psess.dcx.span_bug(span, msg); }; // special help suggestion for "directed" double quotes diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 53bcb35101e..a46372d368f 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -57,84 +57,84 @@ macro_rules! panictry_buffer { }}; } -pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { - let mut parser = new_parser_from_file(sess, input, None); +pub fn parse_crate_from_file<'a>(input: &Path, psess: &'a ParseSess) -> PResult<'a, ast::Crate> { + let mut parser = new_parser_from_file(psess, input, None); parser.parse_crate_mod() } pub fn parse_crate_attrs_from_file<'a>( input: &Path, - sess: &'a ParseSess, + psess: &'a ParseSess, ) -> PResult<'a, ast::AttrVec> { - let mut parser = new_parser_from_file(sess, input, None); + let mut parser = new_parser_from_file(psess, input, None); parser.parse_inner_attributes() } pub fn parse_crate_from_source_str( name: FileName, source: String, - sess: &ParseSess, + psess: &ParseSess, ) -> PResult<'_, ast::Crate> { - new_parser_from_source_str(sess, name, source).parse_crate_mod() + new_parser_from_source_str(psess, name, source).parse_crate_mod() } pub fn parse_crate_attrs_from_source_str( name: FileName, source: String, - sess: &ParseSess, + psess: &ParseSess, ) -> PResult<'_, ast::AttrVec> { - new_parser_from_source_str(sess, name, source).parse_inner_attributes() + new_parser_from_source_str(psess, name, source).parse_inner_attributes() } pub fn parse_stream_from_source_str( name: FileName, source: String, - sess: &ParseSess, + psess: &ParseSess, override_span: Option, ) -> TokenStream { - source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) + source_file_to_stream(psess, psess.source_map().new_source_file(name, source), override_span) } /// Creates a new parser from a source string. -pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { - panictry_buffer!(maybe_new_parser_from_source_str(sess, name, source)) +pub fn new_parser_from_source_str(psess: &ParseSess, name: FileName, source: String) -> Parser<'_> { + panictry_buffer!(maybe_new_parser_from_source_str(psess, name, source)) } /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial /// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur /// when they are dropped. pub fn maybe_new_parser_from_source_str( - sess: &ParseSess, + psess: &ParseSess, name: FileName, source: String, ) -> Result, Vec>> { - maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source)) + maybe_source_file_to_parser(psess, psess.source_map().new_source_file(name, source)) } /// Creates a new parser, aborting if the file doesn't exist. If a span is given, that is used on /// an error as the source of the problem. -pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option) -> Parser<'a> { - let source_file = sess.source_map().load_file(path).unwrap_or_else(|e| { +pub fn new_parser_from_file<'a>(psess: &'a ParseSess, path: &Path, sp: Option) -> Parser<'a> { + let source_file = psess.source_map().load_file(path).unwrap_or_else(|e| { let msg = format!("couldn't read {}: {}", path.display(), e); - let mut err = sess.dcx.struct_fatal(msg); + let mut err = psess.dcx.struct_fatal(msg); if let Some(sp) = sp { err.span(sp); } err.emit(); }); - panictry_buffer!(maybe_source_file_to_parser(sess, source_file)) + panictry_buffer!(maybe_source_file_to_parser(psess, source_file)) } /// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing /// the initial token stream. fn maybe_source_file_to_parser( - sess: &ParseSess, + psess: &ParseSess, source_file: Lrc, ) -> Result, Vec>> { let end_pos = source_file.end_position(); - let stream = maybe_file_to_stream(sess, source_file, None)?; - let mut parser = stream_to_parser(sess, stream, None); + let stream = maybe_file_to_stream(psess, source_file, None)?; + let mut parser = stream_to_parser(psess, stream, None); if parser.token == token::Eof { parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); } @@ -146,47 +146,47 @@ fn maybe_source_file_to_parser( /// Given a `source_file`, produces a sequence of token trees. pub fn source_file_to_stream( - sess: &ParseSess, + psess: &ParseSess, source_file: Lrc, override_span: Option, ) -> TokenStream { - panictry_buffer!(maybe_file_to_stream(sess, source_file, override_span)) + panictry_buffer!(maybe_file_to_stream(psess, source_file, override_span)) } /// Given a source file, produces a sequence of token trees. Returns any buffered errors from /// parsing the token stream. -fn maybe_file_to_stream<'sess>( - sess: &'sess ParseSess, +fn maybe_file_to_stream<'psess>( + psess: &'psess ParseSess, source_file: Lrc, override_span: Option, -) -> Result>> { +) -> Result>> { let src = source_file.src.as_ref().unwrap_or_else(|| { - sess.dcx.bug(format!( + psess.dcx.bug(format!( "cannot lex `source_file` without source: {}", - sess.source_map().filename_for_diagnostics(&source_file.name) + psess.source_map().filename_for_diagnostics(&source_file.name) )); }); - lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span) + lexer::parse_token_trees(psess, src.as_str(), source_file.start_pos, override_span) } /// Given a stream and the `ParseSess`, produces a parser. pub fn stream_to_parser<'a>( - sess: &'a ParseSess, + psess: &'a ParseSess, stream: TokenStream, subparser_name: Option<&'static str>, ) -> Parser<'a> { - Parser::new(sess, stream, subparser_name) + Parser::new(psess, stream, subparser_name) } /// Runs the given subparser `f` on the tokens of the given `attr`'s item. pub fn parse_in<'a, T>( - sess: &'a ParseSess, + psess: &'a ParseSess, tts: TokenStream, name: &'static str, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, T> { - let mut parser = Parser::new(sess, tts, Some(name)); + let mut parser = Parser::new(psess, tts, Some(name)); let result = f(&mut parser)?; if parser.token != token::Eof { parser.unexpected()?; @@ -194,28 +194,28 @@ pub fn parse_in<'a, T>( Ok(result) } -pub fn fake_token_stream_for_item(sess: &ParseSess, item: &ast::Item) -> TokenStream { +pub fn fake_token_stream_for_item(psess: &ParseSess, item: &ast::Item) -> TokenStream { let source = pprust::item_to_string(item); let filename = FileName::macro_expansion_source_code(&source); - parse_stream_from_source_str(filename, source, sess, Some(item.span)) + parse_stream_from_source_str(filename, source, psess, Some(item.span)) } -pub fn fake_token_stream_for_crate(sess: &ParseSess, krate: &ast::Crate) -> TokenStream { +pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> TokenStream { let source = pprust::crate_to_string_for_macros(krate); let filename = FileName::macro_expansion_source_code(&source); - parse_stream_from_source_str(filename, source, sess, Some(krate.spans.inner_span)) + parse_stream_from_source_str(filename, source, psess, Some(krate.spans.inner_span)) } pub fn parse_cfg_attr( attr: &Attribute, - parse_sess: &ParseSess, + psess: &ParseSess, ) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> { match attr.get_normal_item().args { ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens }) if !tokens.is_empty() => { - crate::validate_attr::check_cfg_attr_bad_delim(parse_sess, dspan, delim); - match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) { + crate::validate_attr::check_cfg_attr_bad_delim(psess, dspan, delim); + match parse_in(psess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) { Ok(r) => return Some(r), Err(e) => { e.with_help(format!("the valid syntax is `{CFG_ATTR_GRAMMAR_HELP}`")) @@ -224,7 +224,7 @@ pub fn parse_cfg_attr( } } } - _ => error_malformed_cfg_attr_missing(attr.span, parse_sess), + _ => error_malformed_cfg_attr_missing(attr.span, psess), } None } @@ -234,6 +234,6 @@ pub fn parse_cfg_attr( "; -fn error_malformed_cfg_attr_missing(span: Span, parse_sess: &ParseSess) { - parse_sess.dcx.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP }); +fn error_malformed_cfg_attr_missing(span: Span, psess: &ParseSess) { + psess.dcx.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP }); } diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 685af6546f5..eb9a10f4bda 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -85,7 +85,7 @@ pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> { // Always make an outer attribute - this allows us to recover from a misplaced // inner attribute. Some(attr::mk_doc_comment( - &self.sess.attr_id_generator, + &self.psess.attr_id_generator, comment_kind, ast::AttrStyle::Outer, data, @@ -135,7 +135,7 @@ pub fn parse_attribute( this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy); } - Ok(attr::mk_attr_from_item(&self.sess.attr_id_generator, item, None, style, attr_sp)) + Ok(attr::mk_attr_from_item(&self.psess.attr_id_generator, item, None, style, attr_sp)) }) } @@ -288,7 +288,7 @@ pub(crate) fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> { if attr_style == ast::AttrStyle::Inner { self.bump(); Some(attr::mk_doc_comment( - &self.sess.attr_id_generator, + &self.psess.attr_id_generator, comment_kind, attr_style, data, diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 2307f4cfffa..a1dd7d6f673 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -40,8 +40,8 @@ pub fn empty() -> AttrWrapper { AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX } } - pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec { - sess.dcx.span_delayed_bug( + pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec { + psess.dcx.span_delayed_bug( self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP), "AttrVec is taken for recovery but no error is produced", ); diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 2a8cb74337b..2f7ac7d3a12 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -242,7 +242,7 @@ fn deref_mut(&mut self) -> &mut Self::Target { impl<'a> Parser<'a> { pub fn dcx(&self) -> &'a DiagCtxt { - &self.sess.dcx + &self.psess.dcx } /// Replace `self` with `snapshot.parser`. @@ -257,7 +257,7 @@ pub fn create_snapshot_for_diagnostic(&self) -> SnapshotParser<'a> { } pub(super) fn span_to_snippet(&self, span: Span) -> Result { - self.sess.source_map().span_to_snippet(span) + self.psess.source_map().span_to_snippet(span) } /// Emits an error with suggestions if an identifier was expected but not found. @@ -364,7 +364,7 @@ pub(super) fn expected_ident_found( if !self.look_ahead(1, |t| *t == token::Lt) && let Ok(snippet) = - self.sess.source_map().span_to_snippet(generic.span) + self.psess.source_map().span_to_snippet(generic.span) { err.multipart_suggestion_verbose( format!("place the generic parameter name after the {ident_name} name"), @@ -489,7 +489,7 @@ fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool { expected.sort_by_cached_key(|x| x.to_string()); expected.dedup(); - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); // Special-case "expected `;`" errors. if expected.contains(&TokenType::Token(token::Semi)) { @@ -822,7 +822,7 @@ pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) -> ErrorGuaranteed { // #[cfg(..)] // other_expr // So we suggest using `if cfg!(..) { expr } else if cfg!(..) { other_expr }`. - let margin = self.sess.source_map().span_to_margin(next_expr.span).unwrap_or(0); + let margin = self.psess.source_map().span_to_margin(next_expr.span).unwrap_or(0); let sugg = vec![ (attr.span.with_hi(segment.span().hi()), "if cfg!".to_string()), (args_span.shrink_to_hi().with_hi(attr.span.hi()), " {".to_string()), @@ -850,7 +850,7 @@ pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) -> ErrorGuaranteed { } fn check_too_many_raw_str_terminators(&mut self, err: &mut Diag<'_>) -> bool { - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); match (&self.prev_token.kind, &self.token.kind) { ( TokenKind::Literal(Lit { @@ -935,7 +935,7 @@ pub fn maybe_suggest_struct_literal( // expand `before` so that we take care of module path such as: // `foo::Bar { ... } ` // we expect to suggest `(foo::Bar { ... })` instead of `foo::(Bar { ... })` - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); let before = maybe_struct_name.span.shrink_to_lo(); if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| { t.is_alphanumeric() || t == ':' || t == '_' @@ -1872,7 +1872,7 @@ pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Re ); let mut err = self.dcx().struct_span_err(sp, msg); let label_exp = format!("expected `{token_str}`"); - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); if !sm.is_multiline(prev_sp.until(sp)) { // When the spans are in the same line, it means that the only content // between them is whitespace, point only at the found token. @@ -1893,7 +1893,7 @@ pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> { pub(super) fn recover_colon_as_semi(&mut self) -> bool { let line_idx = |span: Span| { - self.sess + self.psess .source_map() .span_to_lines(span) .ok() @@ -1906,7 +1906,7 @@ pub(super) fn recover_colon_as_semi(&mut self) -> bool { { self.dcx().emit_err(ColonAsSemi { span: self.token.span, - type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()), + type_ascription: self.psess.unstable_features.is_nightly_build().then_some(()), }); self.bump(); return true; @@ -2357,8 +2357,8 @@ pub(super) fn expected_expression_found(&self) -> Diag<'a> { ), }; let mut err = self.dcx().struct_span_err(span, msg); - let sp = self.sess.source_map().start_point(self.token.span); - if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { + let sp = self.psess.source_map().start_point(self.token.span); + if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) { err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp)); } err.span_label(span, "expected expression"); @@ -2539,7 +2539,7 @@ fn recover_const_param_decl(&mut self, ty_generics: Option<&Generics>) -> Option }; let ident = param.ident.to_string(); - let sugg = match (ty_generics, self.sess.source_map().span_to_snippet(param.span())) { + let sugg = match (ty_generics, self.psess.source_map().span_to_snippet(param.span())) { (Some(Generics { params, span: impl_generics, .. }), Ok(snippet)) => { Some(match ¶ms[..] { [] => UnexpectedConstParamDeclarationSugg::AddParam { diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 54771cccd75..eb9a2659618 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -403,8 +403,8 @@ fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool { // suggestions based on the assumption that double-refs are rarely intentional, // and closures are distinct enough that they don't get mixed up with their // return value. - let sp = self.sess.source_map().start_point(self.token.span); - self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span); + let sp = self.psess.source_map().start_point(self.token.span); + self.psess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span); false } (true, Some(op)) if !op.can_continue_expr_unambiguously() => false, @@ -608,7 +608,7 @@ macro_rules! make_it { }; // a block on the LHS might have been intended to be an expression instead - if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) { + if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) { err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp)); } else { err.remove_plus = Some(lo); @@ -666,7 +666,7 @@ fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { fn parse_expr_box(&mut self, box_kw: Span) -> PResult<'a, (Span, ExprKind)> { let (span, _) = self.parse_expr_prefix_common(box_kw)?; let inner_span = span.with_lo(box_kw.hi()); - let code = self.sess.source_map().span_to_snippet(inner_span).unwrap(); + let code = self.psess.source_map().span_to_snippet(inner_span).unwrap(); let guar = self.dcx().emit_err(errors::BoxSyntaxRemoved { span: span, code: code.trim() }); Ok((span, ExprKind::Err(guar))) } @@ -700,7 +700,7 @@ fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { // Span the `not` plus trailing whitespace to avoid // trailing whitespace after the `!` in our suggestion sub: sub_diag( - self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)), + self.psess.source_map().span_until_non_whitespace(lo.to(negated_token.span)), ), }); @@ -915,7 +915,7 @@ fn parse_borrow_modifiers(&mut self, lo: Span) -> (ast::BorrowKind, ast::Mutabil let found_raw = self.eat_keyword(kw::Raw); assert!(found_raw); let mutability = self.parse_const_or_mut().unwrap(); - self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span)); + self.psess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span)); (ast::BorrowKind::Raw, mutability) } else { // `mut?` @@ -1013,7 +1013,7 @@ pub fn parse_dot_suffix_expr(&mut self, lo: Span, base: P) -> PResult<'a, fn error_unexpected_after_dot(&self) { let actual = pprust::token_to_string(&self.token); let span = self.token.span; - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); let (span, actual) = match (&self.token.kind, self.subparser_name) { (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => { (span.shrink_to_hi(), actual.into()) @@ -1434,7 +1434,7 @@ fn parse_expr_bottom(&mut self) -> PResult<'a, P> { this.parse_expr_closure().map_err(|mut err| { // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }` // then suggest parens around the lhs. - if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) { + if let Some(sp) = this.psess.ambiguous_block_expr_parse.borrow().get(&lo) { err.subdiagnostic(this.dcx(), ExprParenthesesNeeded::surrounding(*sp)); } err @@ -1634,7 +1634,7 @@ fn parse_expr_path_start(&mut self) -> PResult<'a, P> { && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) { if qself.is_some() { - self.sess.gated_spans.gate(sym::more_qualified_paths, path.span); + self.psess.gated_spans.gate(sym::more_qualified_paths, path.span); } return expr; } else { @@ -1821,7 +1821,7 @@ fn parse_expr_yeet(&mut self) -> PResult<'a, P> { let kind = ExprKind::Yeet(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); - self.sess.gated_spans.gate(sym::yeet_expr, span); + self.psess.gated_spans.gate(sym::yeet_expr, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } @@ -1831,7 +1831,7 @@ fn parse_expr_become(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Become(self.parse_expr()?); let span = lo.to(self.prev_token.span); - self.sess.gated_spans.gate(sym::explicit_tail_calls, span); + self.psess.gated_spans.gate(sym::explicit_tail_calls, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } @@ -1875,7 +1875,7 @@ fn parse_expr_break(&mut self) -> PResult<'a, P> { | ExprKind::Block(_, None) ) { - self.sess.buffer_lint_with_diagnostic( + self.psess.buffer_lint_with_diagnostic( BREAK_WITH_LABEL_AND_LOOP, lo.to(expr.span), ast::CRATE_NODE_ID, @@ -1926,7 +1926,7 @@ fn parse_expr_yield(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Yield(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); - self.sess.gated_spans.gate(sym::yield_expr, span); + self.psess.gated_spans.gate(sym::yield_expr, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } @@ -1955,7 +1955,7 @@ pub(crate) fn parse_builtin( let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span }); return Err(err); }; - self.sess.gated_spans.gate(sym::builtin_syntax, ident.span); + self.psess.gated_spans.gate(sym::builtin_syntax, ident.span); self.bump(); self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?; @@ -2143,7 +2143,7 @@ pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option { Err(err) => { let span = token.uninterpolated_span(); self.bump(); - let guar = report_lit_error(self.sess, err, lit, span); + let guar = report_lit_error(self.psess, err, lit, span); // Pack possible quotes and prefixes from the original literal into // the error literal's symbol so they can be pretty-printed faithfully. let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); @@ -2236,7 +2236,7 @@ fn suggest_missing_semicolon_before_array( } if self.token.kind == token::Comma { - if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) { + if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) { return Ok(()); } let mut snapshot = self.create_snapshot_for_diagnostic(); @@ -2312,7 +2312,7 @@ fn parse_expr_closure(&mut self) -> PResult<'a, P> { let lifetime_defs = self.parse_late_bound_lifetime_defs()?; let span = lo.to(self.prev_token.span); - self.sess.gated_spans.gate(sym::closure_lifetime_binder, span); + self.psess.gated_spans.gate(sym::closure_lifetime_binder, span); ClosureBinder::For { span, generic_params: lifetime_defs } } else { @@ -2354,12 +2354,12 @@ fn parse_expr_closure(&mut self) -> PResult<'a, P> { match coroutine_kind { Some(CoroutineKind::Async { span, .. }) => { // Feature-gate `async ||` closures. - self.sess.gated_spans.gate(sym::async_closure, span); + self.psess.gated_spans.gate(sym::async_closure, span); } Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => { // Feature-gate `gen ||` and `async gen ||` closures. // FIXME(gen_blocks): This perhaps should be a different gate. - self.sess.gated_spans.gate(sym::gen_blocks, span); + self.psess.gated_spans.gate(sym::gen_blocks, span); } None => {} } @@ -2502,7 +2502,7 @@ fn parse_if_after_cond(&mut self, lo: Span, mut cond: P) -> PResult<'a, P< ExprKind::Block(_, None) => { let guar = this.dcx().emit_err(errors::IfExpressionMissingCondition { if_span: lo.with_neighbor(cond.span).shrink_to_hi(), - block_span: self.sess.source_map().start_point(cond_span), + block_span: self.psess.source_map().start_point(cond_span), }); std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi(), guar)) } @@ -2594,7 +2594,7 @@ fn parse_expr_cond(&mut self) -> PResult<'a, P> { if let ExprKind::Let(_, _, _, None) = cond.kind { // Remove the last feature gating of a `let` expression since it's stable. - self.sess.gated_spans.ungate_last(sym::let_chains, cond.span); + self.psess.gated_spans.ungate_last(sym::let_chains, cond.span); } Ok(cond) @@ -2690,7 +2690,7 @@ fn error_on_if_block_attrs( attrs: AttrWrapper, ) { if !attrs.is_empty() - && let [x0 @ xn] | [x0, .., xn] = &*attrs.take_for_recovery(self.sess) + && let [x0 @ xn] | [x0, .., xn] = &*attrs.take_for_recovery(self.psess) { let attributes = x0.span.to(xn.span); let last = xn.span; @@ -2787,7 +2787,7 @@ fn parse_expr_for(&mut self, opt_label: Option