From a483d3bc37f00c6e39501075fc8d4fb9c3d0084a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 14 Apr 2024 16:02:38 +0200 Subject: [PATCH] internal: Thread edition through to parsing/tt-to-syntax-tree routines for macros --- crates/base-db/src/lib.rs | 3 +- crates/cfg/src/tests.rs | 10 +-- crates/hir-def/src/attr/tests.rs | 2 +- crates/hir-def/src/find_path.rs | 3 +- crates/hir-expand/src/builtin_fn_macro.rs | 2 +- crates/hir-expand/src/cfg_process.rs | 2 +- crates/hir-expand/src/db.rs | 73 ++++++++++--------- crates/hir-expand/src/declarative.rs | 9 ++- crates/hir-expand/src/fixup.rs | 2 +- crates/ide-completion/src/context.rs | 5 +- crates/ide-completion/src/snippet.rs | 2 +- crates/ide-db/src/imports/insert_use.rs | 2 +- crates/ide-db/src/imports/insert_use/tests.rs | 8 +- crates/ide-ssr/src/fragments.rs | 4 +- crates/ide/src/file_structure.rs | 2 +- crates/ide/src/folding_ranges.rs | 2 +- crates/ide/src/join_lines.rs | 4 +- crates/ide/src/matching_brace.rs | 2 +- crates/ide/src/syntax_tree.rs | 2 +- crates/ide/src/typing.rs | 5 +- crates/mbe/src/benchmark.rs | 8 +- crates/mbe/src/expander.rs | 5 +- crates/mbe/src/expander/matcher.rs | 69 +++++++++--------- crates/mbe/src/lib.rs | 3 +- crates/mbe/src/syntax_bridge.rs | 12 ++- crates/mbe/src/syntax_bridge/tests.rs | 2 +- crates/rust-analyzer/src/cargo_target_spec.rs | 3 +- crates/rust-analyzer/src/cli/parse.rs | 3 +- crates/syntax/src/algo.rs | 6 +- crates/syntax/src/ast.rs | 13 +++- crates/syntax/src/ast/edit_in_place.rs | 3 +- crates/syntax/src/ast/expr_ext.rs | 5 +- crates/syntax/src/ast/make.rs | 13 ++-- crates/syntax/src/fuzz.rs | 9 ++- crates/syntax/src/hacks.rs | 4 +- crates/syntax/src/lib.rs | 14 ++-- crates/syntax/src/parsing/reparsing.rs | 5 +- crates/syntax/src/ptr.rs | 2 +- crates/syntax/src/tests.rs | 9 ++- 39 files changed, 187 insertions(+), 145 deletions(-) diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 785ff9ceffa..a268b6a78fc 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -83,7 +83,8 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option Parse { let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered(); let text = db.file_text(file_id); - SourceFile::parse(&text) + // FIXME: Edition based parsing + SourceFile::parse(&text, span::Edition::CURRENT) } /// We don't want to give HIR knowledge of source roots, hence we extract these diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 62fb429a63f..a1ae15fcdda 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,12 +1,12 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; -use syntax::{ast, AstNode}; +use syntax::{ast, AstNode, Edition}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; fn assert_parse_result(input: &str, expected: CfgExpr) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); @@ -14,7 +14,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { } fn check_dnf(input: &str, expect: Expect) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); @@ -23,7 +23,7 @@ fn check_dnf(input: &str, expect: Expect) { } fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); @@ -34,7 +34,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { #[track_caller] fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index 1a63e96bfa9..9b68797fbf7 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -11,7 +11,7 @@ use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let tt = syntax_node_to_token_tree( diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index d06fc4df768..bf728a71079 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -610,7 +610,8 @@ fn check_found_path_( ) { let (db, pos) = TestDB::with_position(ra_fixture); let module = db.module_at_position(pos); - let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); + let parsed_path_file = + syntax::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT); let ast_path = parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); let mod_path = ModPath::from_src(&db, ast_path, &mut |range| { diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index fd3e4e7a4db..4d6fe6db396 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -219,7 +219,7 @@ fn assert_expand( span: Span, ) -> ExpandResult { let call_site_span = span_with_call_site_ctxt(db, span, id); - let args = parse_exprs_with_sep(tt, ',', call_site_span); + let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT); let dollar_crate = dollar_crate(span); let expanded = match &*args { [cond, panic_args @ ..] => { diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs index f37ce8ba6de..9dd44262ba9 100644 --- a/crates/hir-expand/src/cfg_process.rs +++ b/crates/hir-expand/src/cfg_process.rs @@ -327,7 +327,7 @@ mod tests { use crate::cfg_process::parse_from_attr_meta; fn check_dnf_from_syntax(input: &str, expect: Expect) { - let parse = SourceFile::parse(input); + let parse = SourceFile::parse(input, span::Edition::CURRENT); let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { Some(it) => it, None => { diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 0923aeb8597..97fa9cf2cc5 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -225,43 +225,45 @@ pub fn expand_speculative( // Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. - let mut speculative_expansion = match loc.def.kind { - MacroDefKind::ProcMacro(expander, _, ast) => { - let span = db.proc_macro_span(ast); - tt.delimiter = tt::Delimiter::invisible_spanned(span); - expander.expand( - db, - loc.def.krate, - loc.krate, - &tt, - attr_arg.as_ref(), - span_with_def_site_ctxt(db, span, actual_macro_call), - span_with_call_site_ctxt(db, span, actual_macro_call), - span_with_mixed_site_ctxt(db, span, actual_macro_call), - ) - } - MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { - pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span) - } - MacroDefKind::Declarative(it) => { - db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span) - } - MacroDefKind::BuiltIn(it, _) => { - it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) - } - MacroDefKind::BuiltInDerive(it, ..) => { - it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) - } - MacroDefKind::BuiltInEager(it, _) => { - it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) - } - MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span), - }; + let mut speculative_expansion = + match loc.def.kind { + MacroDefKind::ProcMacro(expander, _, ast) => { + let span = db.proc_macro_span(ast); + tt.delimiter = tt::Delimiter::invisible_spanned(span); + expander.expand( + db, + loc.def.krate, + loc.krate, + &tt, + attr_arg.as_ref(), + span_with_def_site_ctxt(db, span, actual_macro_call), + span_with_call_site_ctxt(db, span, actual_macro_call), + span_with_mixed_site_ctxt(db, span, actual_macro_call), + ) + } + MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { + pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span) + } + MacroDefKind::Declarative(it) => db + .decl_macro_expander(loc.krate, it) + .expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition), + MacroDefKind::BuiltIn(it, _) => { + it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) + } + MacroDefKind::BuiltInDerive(it, ..) => { + it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) + } + MacroDefKind::BuiltInEager(it, _) => { + it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) + } + MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span), + }; let expand_to = loc.expand_to(); fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info); - let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); + let (node, rev_tmap) = + token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition); let syntax_node = node.syntax_node(); let token = rev_tmap @@ -309,6 +311,7 @@ fn parse_macro_expansion( ) -> ExpandResult<(Parse, Arc)> { let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered(); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let edition = loc.def.edition; let expand_to = loc.expand_to(); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); @@ -318,6 +321,7 @@ fn parse_macro_expansion( CowArc::Owned(it) => it, }, expand_to, + edition, ); ExpandResult { value: (parse, Arc::new(rev_token_map)), err } @@ -668,6 +672,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult (Parse, ExpansionSpanMap) { let entry_point = match expand_to { ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, @@ -676,7 +681,7 @@ fn token_tree_to_syntax_node( ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point, parser::Edition::CURRENT) + mbe::token_tree_to_syntax_node(tt, entry_point, edition) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 9a0b218e6d1..f9ea8e2ea53 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -2,7 +2,7 @@ use std::sync::OnceLock; use base_db::{CrateId, VersionReq}; -use span::{MacroCallId, Span, SyntaxContextId}; +use span::{Edition, MacroCallId, Span, SyntaxContextId}; use syntax::{ast, AstNode}; use triomphe::Arc; @@ -56,6 +56,7 @@ pub fn expand( |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), new_meta_vars, span, + loc.def.edition, ) .map_err(Into::into), } @@ -67,6 +68,7 @@ pub fn expand_unhygienic( tt: tt::Subtree, krate: CrateId, call_site: Span, + def_site_edition: Edition, ) -> ExpandResult { let toolchain = db.toolchain(krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { @@ -85,7 +87,10 @@ pub fn expand_unhygienic( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::MacroDefinition, ), - None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), + None => self + .mac + .expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition) + .map_err(Into::into), } } diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index b33ae49a944..711acfeb3d8 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -396,7 +396,7 @@ fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool { #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { - let parsed = syntax::SourceFile::parse(ra_fixture); + let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let fixups = super::fixup_syntax( span_map.as_ref(), diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 995e3f48253..8b435f419c7 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -17,7 +17,7 @@ }; use syntax::{ ast::{self, AttrKind, NameOrNameRef}, - AstNode, SmolStr, + AstNode, Edition, SmolStr, SyntaxKind::{self, *}, SyntaxToken, TextRange, TextSize, T, }; @@ -667,7 +667,8 @@ pub(crate) fn new( let file_with_fake_ident = { let parse = db.parse(file_id); let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned()); - parse.reparse(&edit).tree() + // FIXME: Edition + parse.reparse(&edit, Edition::CURRENT).tree() }; // always pick the token to the immediate left of the cursor, as that is what we are actually diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs index e667e2e0168..7d710f1e130 100644 --- a/crates/ide-completion/src/snippet.rs +++ b/crates/ide-completion/src/snippet.rs @@ -200,7 +200,7 @@ fn validate_snippet( ) -> Option<(Box<[GreenNode]>, String, Option>)> { let mut imports = Vec::with_capacity(requires.len()); for path in requires.iter() { - let use_path = ast::SourceFile::parse(&format!("use {path};")) + let use_path = ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT) .syntax_node() .descendants() .find_map(ast::Path::cast)?; diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs index bd5c464c557..e97f1b86143 100644 --- a/crates/ide-db/src/imports/insert_use.rs +++ b/crates/ide-db/src/imports/insert_use.rs @@ -176,7 +176,7 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) { pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) { let text: &str = "use foo as _"; - let parse = syntax::SourceFile::parse(text); + let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT); let node = parse .tree() .syntax() diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs index 10c285a13fb..9d1f1cc09c6 100644 --- a/crates/ide-db/src/imports/insert_use/tests.rs +++ b/crates/ide-db/src/imports/insert_use/tests.rs @@ -1243,7 +1243,7 @@ fn check_with_config( .and_then(|it| ImportScope::find_insert_use_container(&it, sema)) .or_else(|| ImportScope::from(syntax)) .unwrap(); - let path = ast::SourceFile::parse(&format!("use {path};")) + let path = ast::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT) .tree() .syntax() .descendants() @@ -1292,14 +1292,14 @@ fn check_one(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { } fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) { - let use0 = ast::SourceFile::parse(ra_fixture0) + let use0 = ast::SourceFile::parse(ra_fixture0, span::Edition::CURRENT) .tree() .syntax() .descendants() .find_map(ast::Use::cast) .unwrap(); - let use1 = ast::SourceFile::parse(ra_fixture1) + let use1 = ast::SourceFile::parse(ra_fixture1, span::Edition::CURRENT) .tree() .syntax() .descendants() @@ -1311,7 +1311,7 @@ fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior } fn check_guess(ra_fixture: &str, expected: ImportGranularityGuess) { - let syntax = ast::SourceFile::parse(ra_fixture).tree().syntax().clone(); + let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree().syntax().clone(); let file = ImportScope::from(syntax).unwrap(); assert_eq!(super::guess_granularity_from_scope(&file), expected); } diff --git a/crates/ide-ssr/src/fragments.rs b/crates/ide-ssr/src/fragments.rs index 4d6809efbe1..ca937a03f82 100644 --- a/crates/ide-ssr/src/fragments.rs +++ b/crates/ide-ssr/src/fragments.rs @@ -27,7 +27,7 @@ pub(crate) fn expr(s: &str) -> Result { pub(crate) fn stmt(s: &str) -> Result { let template = "const _: () = { {}; };"; let input = template.replace("{}", s); - let parse = syntax::SourceFile::parse(&input); + let parse = syntax::SourceFile::parse(&input, syntax::Edition::CURRENT); if !parse.errors().is_empty() { return Err(()); } @@ -48,7 +48,7 @@ pub(crate) fn stmt(s: &str) -> Result { fn fragment(template: &str, s: &str) -> Result { let s = s.trim(); let input = template.replace("{}", s); - let parse = syntax::SourceFile::parse(&input); + let parse = syntax::SourceFile::parse(&input, syntax::Edition::CURRENT); if !parse.errors().is_empty() { return Err(()); } diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index 813691540f9..568906a098e 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -220,7 +220,7 @@ mod tests { use super::*; fn check(ra_fixture: &str, expect: Expect) { - let file = SourceFile::parse(ra_fixture).ok().unwrap(); + let file = SourceFile::parse(ra_fixture, span::Edition::CURRENT).ok().unwrap(); let structure = file_structure(&file); expect.assert_debug_eq(&structure) } diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs index 2bc07211231..c1b7693a650 100755 --- a/crates/ide/src/folding_ranges.rs +++ b/crates/ide/src/folding_ranges.rs @@ -289,7 +289,7 @@ mod tests { fn check(ra_fixture: &str) { let (ranges, text) = extract_tags(ra_fixture, "fold"); - let parse = SourceFile::parse(&text); + let parse = SourceFile::parse(&text, span::Edition::CURRENT); let mut folds = folding_ranges(&parse.tree()); folds.sort_by_key(|fold| (fold.range.start(), fold.range.end())); diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index 815a4ba7fd7..9d8ba90b2ff 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -316,7 +316,7 @@ fn check_join_lines(ra_fixture_before: &str, ra_fixture_after: &str) { }; let (before_cursor_pos, before) = extract_offset(ra_fixture_before); - let file = SourceFile::parse(&before).ok().unwrap(); + let file = SourceFile::parse(&before, span::Edition::CURRENT).ok().unwrap(); let range = TextRange::empty(before_cursor_pos); let result = join_lines(&config, &file, range); @@ -342,7 +342,7 @@ fn check_join_lines_sel(ra_fixture_before: &str, ra_fixture_after: &str) { }; let (sel, before) = extract_range(ra_fixture_before); - let parse = SourceFile::parse(&before); + let parse = SourceFile::parse(&before, span::Edition::CURRENT); let result = join_lines(&config, &parse.tree(), sel); let actual = { let mut actual = before; diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs index 6e8a6d020cc..57356152836 100644 --- a/crates/ide/src/matching_brace.rs +++ b/crates/ide/src/matching_brace.rs @@ -50,7 +50,7 @@ mod tests { fn test_matching_brace() { fn do_check(before: &str, after: &str) { let (pos, before) = extract_offset(before); - let parse = SourceFile::parse(&before); + let parse = SourceFile::parse(&before, span::Edition::CURRENT); let new_pos = match matching_brace(&parse.tree(), pos) { None => pos, Some(pos) => pos, diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 1065d5899ab..05cdf430efb 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -88,7 +88,7 @@ fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option Option { let (offset, mut before) = extract_offset(before); let edit = TextEdit::insert(offset, char_typed.to_string()); edit.apply(&mut before); - let parse = SourceFile::parse(&before); + let parse = SourceFile::parse(&before, span::Edition::CURRENT); on_char_typed_inner(&parse, offset, char_typed).map(|it| { it.apply(&mut before); before.to_string() diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 4d5531ae307..1dca288017f 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -1,7 +1,7 @@ //! This module add real world mbe example for benchmark tests use rustc_hash::FxHashMap; -use span::Span; +use span::{Edition, Span}; use syntax::{ ast::{self, HasName}, AstNode, SmolStr, @@ -46,7 +46,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt, |_| (), true, DUMMY); + let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -66,7 +66,7 @@ fn macro_rules_fixtures() -> FxHashMap { fn macro_rules_fixtures_tt() -> FxHashMap> { let fixture = bench_fixture::numerous_macro_rules(); - let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); + let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap(); source_file .syntax() @@ -120,7 +120,7 @@ fn invocation_fixtures( }, token_trees: token_trees.into_boxed_slice(), }; - if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() { + if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() { res.push((name.clone(), subtree)); break; } diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 2f2c0aa6ff5..2d495da0dbc 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -6,7 +6,7 @@ mod transcriber; use rustc_hash::FxHashMap; -use span::Span; +use span::{Edition, Span}; use syntax::SmolStr; use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; @@ -17,10 +17,11 @@ pub(crate) fn expand_rules( marker: impl Fn(&mut Span) + Copy, new_meta_vars: bool, call_site: Span, + def_site_edition: Edition, ) -> ExpandResult> { let mut match_: Option<(matcher::Match, &crate::Rule)> = None; for rule in rules { - let new_match = matcher::match_(&rule.lhs, input); + let new_match = matcher::match_(&rule.lhs, input, def_site_edition); if new_match.err.is_none() { // If we find a rule that applies without errors, we're done. diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index cb6bad6e708..78d4bfee2a1 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -62,7 +62,7 @@ use std::rc::Rc; use smallvec::{smallvec, SmallVec}; -use span::Span; +use span::{Edition, Span}; use syntax::SmolStr; use tt::DelimSpan; @@ -108,8 +108,8 @@ fn add_err(&mut self, err: ExpandError) { } /// Matching errors are added to the `Match`. -pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match { - let mut res = match_loop(pattern, input); +pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, edition: Edition) -> Match { + let mut res = match_loop(pattern, input, edition); res.bound_count = count(res.bindings.bindings()); return res; @@ -363,6 +363,7 @@ fn match_loop_inner<'t>( eof_items: &mut SmallVec<[MatchState<'t>; 1]>, error_items: &mut SmallVec<[MatchState<'t>; 1]>, delim_span: tt::DelimSpan, + edition: Edition, ) { macro_rules! try_push { ($items: expr, $it:expr) => { @@ -473,7 +474,7 @@ macro_rules! try_push { OpDelimited::Op(Op::Var { kind, name, .. }) => { if let &Some(kind) = kind { let mut fork = src.clone(); - let match_res = match_meta_var(kind, &mut fork, delim_span); + let match_res = match_meta_var(kind, &mut fork, delim_span, edition); match match_res.err { None => { // Some meta variables are optional (e.g. vis) @@ -586,7 +587,7 @@ macro_rules! try_push { } } -fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { +fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, edition: Edition) -> Match { let span = src.delimiter.delim_span(); let mut src = TtIter::new(src); let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new(); @@ -627,6 +628,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { &mut eof_items, &mut error_items, span, + edition, ); stdx::always!(cur_items.is_empty()); @@ -740,23 +742,14 @@ fn match_meta_var( kind: MetaVarKind, input: &mut TtIter<'_, Span>, delim_span: DelimSpan, + edition: Edition, ) -> ExpandResult> { let fragment = match kind { MetaVarKind::Path => { - return input - .expect_fragment(parser::PrefixEntryPoint::Path, parser::Edition::CURRENT) - .map(|it| { - it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) - }); + return input.expect_fragment(parser::PrefixEntryPoint::Path, edition).map(|it| { + it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) + }); } - MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, - MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop, - MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat, - MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt, - MetaVarKind::Block => parser::PrefixEntryPoint::Block, - MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem, - MetaVarKind::Item => parser::PrefixEntryPoint::Item, - MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, MetaVarKind::Expr => { // `expr` should not match underscores, let expressions, or inline const. The latter // two are for [backwards compatibility][0]. @@ -772,23 +765,21 @@ fn match_meta_var( } _ => {} }; - return input - .expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::CURRENT) - .map(|tt| { - tt.map(|tt| match tt { - tt::TokenTree::Leaf(leaf) => tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), - token_trees: Box::new([leaf.into()]), - }, - tt::TokenTree::Subtree(mut s) => { - if s.delimiter.kind == tt::DelimiterKind::Invisible { - s.delimiter.kind = tt::DelimiterKind::Parenthesis; - } - s + return input.expect_fragment(parser::PrefixEntryPoint::Expr, edition).map(|tt| { + tt.map(|tt| match tt { + tt::TokenTree::Leaf(leaf) => tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), + token_trees: Box::new([leaf.into()]), + }, + tt::TokenTree::Subtree(mut s) => { + if s.delimiter.kind == tt::DelimiterKind::Invisible { + s.delimiter.kind = tt::DelimiterKind::Parenthesis; } - }) - .map(Fragment::Expr) - }); + s + } + }) + .map(Fragment::Expr) + }); } MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { let tt_result = match kind { @@ -822,8 +813,16 @@ fn match_meta_var( }; return tt_result.map(|it| Some(Fragment::Tokens(it))).into(); } + MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, + MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop, + MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat, + MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt, + MetaVarKind::Block => parser::PrefixEntryPoint::Block, + MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem, + MetaVarKind::Item => parser::PrefixEntryPoint::Item, + MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, }; - input.expect_fragment(fragment, parser::Edition::CURRENT).map(|it| it.map(Fragment::Tokens)) + input.expect_fragment(fragment, edition).map(|it| it.map(Fragment::Tokens)) } fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 3a853512660..5445f8790fc 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -250,8 +250,9 @@ pub fn expand( marker: impl Fn(&mut Span) + Copy, new_meta_vars: bool, call_site: Span, + def_site_edition: Edition, ) -> ExpandResult> { - expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site) + expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition) } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index d2c42dcacc3..3230eeb5bd8 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -3,7 +3,7 @@ use std::fmt; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{SpanAnchor, SpanData, SpanMap}; +use span::{Edition, SpanAnchor, SpanData, SpanMap}; use stdx::{never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, @@ -183,7 +183,12 @@ pub fn parse_to_token_tree_static_span(span: S, text: &str) -> Option(tt: &tt::Subtree, sep: char, span: S) -> Vec> +pub fn parse_exprs_with_sep( + tt: &tt::Subtree, + sep: char, + span: S, + edition: Edition, +) -> Vec> where S: Copy + fmt::Debug, { @@ -195,8 +200,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char, span: S) -> Vec break, diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index a261b1d4319..bbfe378200d 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -10,7 +10,7 @@ use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; fn check_punct_spacing(fixture: &str) { - let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); + let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap(); let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY); let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) .into_iter() diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 815a98980b9..83dd99b36b3 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -208,6 +208,7 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec) { mod tests { use super::*; + use ide::Edition; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, @@ -216,7 +217,7 @@ mod tests { fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { - let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); + let source_file = ast::SourceFile::parse(cfg, Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY); CfgExpr::parse(&tt) diff --git a/crates/rust-analyzer/src/cli/parse.rs b/crates/rust-analyzer/src/cli/parse.rs index 757f2dd70ca..ead4d706e65 100644 --- a/crates/rust-analyzer/src/cli/parse.rs +++ b/crates/rust-analyzer/src/cli/parse.rs @@ -1,4 +1,5 @@ //! Read Rust code on stdin, print syntax tree on stdout. +use ide::Edition; use syntax::{AstNode, SourceFile}; use crate::cli::{flags, read_stdin}; @@ -7,7 +8,7 @@ impl flags::Parse { pub fn run(self) -> anyhow::Result<()> { let _p = tracing::span!(tracing::Level::INFO, "parsing").entered(); let text = read_stdin()?; - let file = SourceFile::parse(&text).tree(); + let file = SourceFile::parse(&text, Edition::CURRENT).tree(); if !self.no_dump { println!("{:#?}", file.syntax()); } diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 01f2af419ed..0e62de5febb 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs @@ -255,7 +255,7 @@ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) { mod tests { use expect_test::{expect, Expect}; use itertools::Itertools; - use parser::SyntaxKind; + use parser::{Edition, SyntaxKind}; use text_edit::TextEdit; use crate::{AstNode, SyntaxElement}; @@ -607,8 +607,8 @@ fn main() { } fn check_diff(from: &str, to: &str, expected_diff: Expect) { - let from_node = crate::SourceFile::parse(from).tree().syntax().clone(); - let to_node = crate::SourceFile::parse(to).tree().syntax().clone(); + let from_node = crate::SourceFile::parse(from, Edition::CURRENT).tree().syntax().clone(); + let to_node = crate::SourceFile::parse(to, Edition::CURRENT).tree().syntax().clone(); let diff = super::diff(&from_node, &to_node); let line_number = diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index e9ab7a4320b..168ca9f1328 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -174,6 +174,7 @@ fn test_doc_comment_none() { // non-doc mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -189,6 +190,7 @@ fn test_outer_doc_comment_of_items() { // non-doc mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -204,6 +206,7 @@ fn test_inner_doc_comment_of_items() { // non-doc mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -218,6 +221,7 @@ fn test_doc_comment_of_statics() { /// Number of levels static LEVELS: i32 = 0; "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -237,6 +241,7 @@ fn test_doc_comment_preserves_indents() { /// ``` mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -257,6 +262,7 @@ fn test_doc_comment_preserves_newlines() { /// foo mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -271,6 +277,7 @@ fn test_doc_comment_single_line_block_strips_suffix() { /** this is mod foo*/ mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -285,6 +292,7 @@ fn test_doc_comment_single_line_block_strips_suffix_whitespace() { /** this is mod foo */ mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -303,6 +311,7 @@ mod foo */ mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -316,7 +325,7 @@ mod foo {} #[test] fn test_comments_preserve_trailing_whitespace() { let file = SourceFile::parse( - "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", + "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -335,6 +344,7 @@ fn test_four_slash_line_comment() { /// doc comment mod foo {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); @@ -360,6 +370,7 @@ fn foo() for<'a> F: Fn(&'a str) {} "#, + parser::Edition::CURRENT, ) .ok() .unwrap(); diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 41d33c457ce..2445e4f1a32 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -1054,6 +1054,7 @@ impl Indent for N {} mod tests { use std::fmt; + use parser::Edition; use stdx::trim_indent; use test_utils::assert_eq_text; @@ -1062,7 +1063,7 @@ mod tests { use super::*; fn ast_mut_from_text(text: &str) -> N { - let parse = SourceFile::parse(text); + let parse = SourceFile::parse(text, Edition::CURRENT); parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update() } diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 18a56e2823a..28a9dadacef 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs @@ -89,6 +89,7 @@ fn test() { else { "else" } } "#, + parser::Edition::CURRENT, ); let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); @@ -123,6 +124,7 @@ fn test() { else { "else" } } "#, + parser::Edition::CURRENT, ); let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); @@ -386,7 +388,8 @@ pub fn is_standalone(&self) -> bool { #[test] fn test_literal_with_attr() { - let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); + let parse = + ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#, parser::Edition::CURRENT); let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap(); assert_eq!(lit.token().text(), r#""Hello""#); } diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index ff18fee9bab..186f1b01da4 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -11,7 +11,7 @@ //! term, it will be replaced with direct tree manipulation. use itertools::Itertools; -use parser::T; +use parser::{Edition, T}; use rowan::NodeOrToken; use stdx::{format_to, format_to_acc, never}; @@ -1127,7 +1127,7 @@ pub fn token_tree( #[track_caller] fn ast_from_text(text: &str) -> N { - let parse = SourceFile::parse(text); + let parse = SourceFile::parse(text, Edition::CURRENT); let node = match parse.tree().syntax().descendants().find_map(N::cast) { Some(it) => it, None => { @@ -1153,12 +1153,13 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { pub mod tokens { use once_cell::sync::Lazy; + use parser::Edition; use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { SourceFile::parse( - "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", + "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT, ) }); @@ -1186,13 +1187,13 @@ pub fn single_space() -> SyntaxToken { pub fn whitespace(text: &str) -> SyntaxToken { assert!(text.trim().is_empty()); - let sf = SourceFile::parse(text).ok().unwrap(); + let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap(); sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap() } pub fn doc_comment(text: &str) -> SyntaxToken { assert!(!text.trim().is_empty()); - let sf = SourceFile::parse(text).ok().unwrap(); + let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap(); sf.syntax().first_child_or_token().unwrap().into_token().unwrap() } @@ -1240,7 +1241,7 @@ pub fn blank_line() -> SyntaxToken { impl WsBuilder { pub fn new(text: &str) -> WsBuilder { - WsBuilder(SourceFile::parse(text).ok().unwrap()) + WsBuilder(SourceFile::parse(text, Edition::CURRENT).ok().unwrap()) } pub fn ws(&self) -> SyntaxToken { self.0.syntax().first_child_or_token().unwrap().into_token().unwrap() diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs index 28738671790..682dcd7cc44 100644 --- a/crates/syntax/src/fuzz.rs +++ b/crates/syntax/src/fuzz.rs @@ -4,6 +4,7 @@ use std::str::{self, FromStr}; +use parser::Edition; use text_edit::Indel; use crate::{validation, AstNode, SourceFile, TextRange}; @@ -14,7 +15,7 @@ fn check_file_invariants(file: &SourceFile) { } pub fn check_parser(text: &str) { - let file = SourceFile::parse(text); + let file = SourceFile::parse(text, Edition::CURRENT); check_file_invariants(&file.tree()); } @@ -48,11 +49,11 @@ pub fn from_data(data: &[u8]) -> Option { #[allow(clippy::print_stderr)] pub fn run(&self) { - let parse = SourceFile::parse(&self.text); - let new_parse = parse.reparse(&self.edit); + let parse = SourceFile::parse(&self.text, Edition::CURRENT); + let new_parse = parse.reparse(&self.edit, Edition::CURRENT); check_file_invariants(&new_parse.tree()); assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text); - let full_reparse = SourceFile::parse(&self.edited_text); + let full_reparse = SourceFile::parse(&self.edited_text, Edition::CURRENT); for (a, b) in new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants()) { diff --git a/crates/syntax/src/hacks.rs b/crates/syntax/src/hacks.rs index a3023c3195f..36615d11d85 100644 --- a/crates/syntax/src/hacks.rs +++ b/crates/syntax/src/hacks.rs @@ -2,11 +2,13 @@ //! //! Please avoid adding new usages of the functions in this module +use parser::Edition; + use crate::{ast, AstNode}; pub fn parse_expr_from_str(s: &str) -> Option { let s = s.trim(); - let file = ast::SourceFile::parse(&format!("const _: () = {s};")); + let file = ast::SourceFile::parse(&format!("const _: () = {s};"), Edition::CURRENT); let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?; if expr.syntax().text() != s { return None; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index a3455435e34..e7bbf936dc1 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -141,8 +141,8 @@ pub fn debug_dump(&self) -> String { buf } - pub fn reparse(&self, indel: &Indel) -> Parse { - self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel)) + pub fn reparse(&self, indel: &Indel, edition: Edition) -> Parse { + self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel, edition)) } fn incremental_reparse(&self, indel: &Indel) -> Option> { @@ -159,10 +159,10 @@ fn incremental_reparse(&self, indel: &Indel) -> Option> { }) } - fn full_reparse(&self, indel: &Indel) -> Parse { + fn full_reparse(&self, indel: &Indel, edition: Edition) -> Parse { let mut text = self.tree().syntax().text().to_string(); indel.apply(&mut text); - SourceFile::parse(&text) + SourceFile::parse(&text, edition) } } @@ -170,9 +170,9 @@ fn full_reparse(&self, indel: &Indel) -> Parse { pub use crate::ast::SourceFile; impl SourceFile { - pub fn parse(text: &str) -> Parse { + pub fn parse(text: &str, edition: Edition) -> Parse { let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered(); - let (green, errors) = parsing::parse_text(text, parser::Edition::CURRENT); + let (green, errors) = parsing::parse_text(text, edition); let root = SyntaxNode::new_root(green.clone()); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); @@ -340,7 +340,7 @@ fn foo() { // // The `parse` method returns a `Parse` -- a pair of syntax tree and a list // of errors. That is, syntax tree is constructed even in presence of errors. - let parse = SourceFile::parse(source_code); + let parse = SourceFile::parse(source_code, parser::Edition::CURRENT); assert!(parse.errors().is_empty()); // The `tree` method returns an owned syntax node of type `SourceFile`. diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 43435056c45..354b89fd490 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -177,6 +177,7 @@ fn merge_errors( #[cfg(test)] mod tests { + use parser::Edition; use test_utils::{assert_eq_text, extract_range}; use super::*; @@ -191,9 +192,9 @@ fn do_check(before: &str, replace_with: &str, reparsed_len: u32) { after }; - let fully_reparsed = SourceFile::parse(&after); + let fully_reparsed = SourceFile::parse(&after, Edition::CURRENT); let incrementally_reparsed: Parse = { - let before = SourceFile::parse(&before); + let before = SourceFile::parse(&before, Edition::CURRENT); let (green, new_errors, range) = incremental_reparse( before.tree().syntax(), &edit, diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs index fb8aee9c3b0..ed4894f9b9c 100644 --- a/crates/syntax/src/ptr.rs +++ b/crates/syntax/src/ptr.rs @@ -120,7 +120,7 @@ fn from(ptr: AstPtr) -> SyntaxNodePtr { fn test_local_syntax_ptr() { use crate::{ast, AstNode, SourceFile}; - let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap(); + let file = SourceFile::parse("struct Foo { f: u32, }", parser::Edition::CURRENT).ok().unwrap(); let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap(); let ptr = SyntaxNodePtr::new(field.syntax()); let field_syntax = ptr.to_node(file.syntax()); diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 439daa358a8..f0d58efc01e 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs @@ -5,6 +5,7 @@ use ast::HasName; use expect_test::expect_file; +use parser::Edition; use rayon::prelude::*; use stdx::format_to_acc; use test_utils::{bench, bench_fixture, project_root}; @@ -19,7 +20,7 @@ fn main() { } "#; - let parse = SourceFile::parse(code); + let parse = SourceFile::parse(code, Edition::CURRENT); // eprintln!("{:#?}", parse.syntax_node()); assert!(parse.ok().is_ok()); } @@ -33,7 +34,7 @@ fn benchmark_parser() { let data = bench_fixture::glorious_old_parser(); let tree = { let _b = bench("parsing"); - let p = SourceFile::parse(&data); + let p = SourceFile::parse(&data, Edition::CURRENT); assert!(p.errors().is_empty()); assert_eq!(p.tree().syntax.text_range().len(), 352474.into()); p.tree() @@ -50,7 +51,7 @@ fn benchmark_parser() { #[test] fn validation_tests() { dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| { - let parse = SourceFile::parse(text); + let parse = SourceFile::parse(text, Edition::CURRENT); let errors = parse.errors(); assert_errors_are_present(&errors, path); parse.debug_dump() @@ -110,7 +111,7 @@ fn self_hosting_parsing() { .into_par_iter() .filter_map(|file| { let text = read_text(&file); - match SourceFile::parse(&text).ok() { + match SourceFile::parse(&text, Edition::CURRENT).ok() { Ok(_) => None, Err(err) => Some((file, err)), }