internal: Thread edition through to parsing/tt-to-syntax-tree routines for macros

This commit is contained in:
Lukas Wirth 2024-04-14 16:02:38 +02:00
parent 83370fe5d7
commit a483d3bc37
39 changed files with 187 additions and 145 deletions

View File

@ -83,7 +83,8 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered(); let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered();
let text = db.file_text(file_id); let text = db.file_text(file_id);
SourceFile::parse(&text) // FIXME: Edition based parsing
SourceFile::parse(&text, span::Edition::CURRENT)
} }
/// We don't want to give HIR knowledge of source roots, hence we extract these /// We don't want to give HIR knowledge of source roots, hence we extract these

View File

@ -1,12 +1,12 @@
use arbitrary::{Arbitrary, Unstructured}; use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode, Edition};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) { fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
@ -14,7 +14,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
} }
fn check_dnf(input: &str, expect: Expect) { fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
@ -23,7 +23,7 @@ fn check_dnf(input: &str, expect: Expect) {
} }
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
@ -34,7 +34,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller] #[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);

View File

@ -11,7 +11,7 @@
use crate::attr::{DocAtom, DocExpr}; use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) { fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree( let tt = syntax_node_to_token_tree(

View File

@ -610,7 +610,8 @@ fn check_found_path_(
) { ) {
let (db, pos) = TestDB::with_position(ra_fixture); let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos); let module = db.module_at_position(pos);
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let parsed_path_file =
syntax::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT);
let ast_path = let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &mut |range| { let mod_path = ModPath::from_src(&db, ast_path, &mut |range| {

View File

@ -219,7 +219,7 @@ fn assert_expand(
span: Span, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id); let call_site_span = span_with_call_site_ctxt(db, span, id);
let args = parse_exprs_with_sep(tt, ',', call_site_span); let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT);
let dollar_crate = dollar_crate(span); let dollar_crate = dollar_crate(span);
let expanded = match &*args { let expanded = match &*args {
[cond, panic_args @ ..] => { [cond, panic_args @ ..] => {

View File

@ -327,7 +327,7 @@ mod tests {
use crate::cfg_process::parse_from_attr_meta; use crate::cfg_process::parse_from_attr_meta;
fn check_dnf_from_syntax(input: &str, expect: Expect) { fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input); let parse = SourceFile::parse(input, span::Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
Some(it) => it, Some(it) => it,
None => { None => {

View File

@ -225,7 +225,8 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind { let mut speculative_expansion =
match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => { MacroDefKind::ProcMacro(expander, _, ast) => {
let span = db.proc_macro_span(ast); let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span); tt.delimiter = tt::Delimiter::invisible_spanned(span);
@ -243,9 +244,9 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span) pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
} }
MacroDefKind::Declarative(it) => { MacroDefKind::Declarative(it) => db
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span) .decl_macro_expander(loc.krate, it)
} .expand_unhygienic(db, tt, loc.def.krate, span, loc.def.edition),
MacroDefKind::BuiltIn(it, _) => { MacroDefKind::BuiltIn(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into) it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
} }
@ -261,7 +262,8 @@ pub fn expand_speculative(
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info); fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); let (node, rev_tmap) =
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node(); let syntax_node = node.syntax_node();
let token = rev_tmap let token = rev_tmap
@ -309,6 +311,7 @@ fn parse_macro_expansion(
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered(); let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let edition = loc.def.edition;
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
@ -318,6 +321,7 @@ fn parse_macro_expansion(
CowArc::Owned(it) => it, CowArc::Owned(it) => it,
}, },
expand_to, expand_to,
edition,
); );
ExpandResult { value: (parse, Arc::new(rev_token_map)), err } ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
@ -668,6 +672,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
fn token_tree_to_syntax_node( fn token_tree_to_syntax_node(
tt: &tt::Subtree, tt: &tt::Subtree,
expand_to: ExpandTo, expand_to: ExpandTo,
edition: parser::Edition,
) -> (Parse<SyntaxNode>, ExpansionSpanMap) { ) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to { let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
@ -676,7 +681,7 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr, ExpandTo::Expr => mbe::TopEntryPoint::Expr,
}; };
mbe::token_tree_to_syntax_node(tt, entry_point, parser::Edition::CURRENT) mbe::token_tree_to_syntax_node(tt, entry_point, edition)
} }
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {

View File

@ -2,7 +2,7 @@
use std::sync::OnceLock; use std::sync::OnceLock;
use base_db::{CrateId, VersionReq}; use base_db::{CrateId, VersionReq};
use span::{MacroCallId, Span, SyntaxContextId}; use span::{Edition, MacroCallId, Span, SyntaxContextId};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use triomphe::Arc; use triomphe::Arc;
@ -56,6 +56,7 @@ pub fn expand(
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars, new_meta_vars,
span, span,
loc.def.edition,
) )
.map_err(Into::into), .map_err(Into::into),
} }
@ -67,6 +68,7 @@ pub fn expand_unhygienic(
tt: tt::Subtree, tt: tt::Subtree,
krate: CrateId, krate: CrateId,
call_site: Span, call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let toolchain = db.toolchain(krate); let toolchain = db.toolchain(krate);
let new_meta_vars = toolchain.as_ref().map_or(false, |version| { let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
@ -85,7 +87,10 @@ pub fn expand_unhygienic(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::MacroDefinition, ExpandError::MacroDefinition,
), ),
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), None => self
.mac
.expand(&tt, |_| (), new_meta_vars, call_site, def_site_edition)
.map_err(Into::into),
} }
} }

View File

@ -396,7 +396,7 @@ fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
#[track_caller] #[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) { fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture); let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax( let fixups = super::fixup_syntax(
span_map.as_ref(), span_map.as_ref(),

View File

@ -17,7 +17,7 @@
}; };
use syntax::{ use syntax::{
ast::{self, AttrKind, NameOrNameRef}, ast::{self, AttrKind, NameOrNameRef},
AstNode, SmolStr, AstNode, Edition, SmolStr,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
SyntaxToken, TextRange, TextSize, T, SyntaxToken, TextRange, TextSize, T,
}; };
@ -667,7 +667,8 @@ pub(crate) fn new(
let file_with_fake_ident = { let file_with_fake_ident = {
let parse = db.parse(file_id); let parse = db.parse(file_id);
let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned()); let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned());
parse.reparse(&edit).tree() // FIXME: Edition
parse.reparse(&edit, Edition::CURRENT).tree()
}; };
// always pick the token to the immediate left of the cursor, as that is what we are actually // always pick the token to the immediate left of the cursor, as that is what we are actually

View File

@ -200,7 +200,7 @@ fn validate_snippet(
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> { ) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
let mut imports = Vec::with_capacity(requires.len()); let mut imports = Vec::with_capacity(requires.len());
for path in requires.iter() { for path in requires.iter() {
let use_path = ast::SourceFile::parse(&format!("use {path};")) let use_path = ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT)
.syntax_node() .syntax_node()
.descendants() .descendants()
.find_map(ast::Path::cast)?; .find_map(ast::Path::cast)?;

View File

@ -176,7 +176,7 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) { pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
let text: &str = "use foo as _"; let text: &str = "use foo as _";
let parse = syntax::SourceFile::parse(text); let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT);
let node = parse let node = parse
.tree() .tree()
.syntax() .syntax()

View File

@ -1243,7 +1243,7 @@ fn check_with_config(
.and_then(|it| ImportScope::find_insert_use_container(&it, sema)) .and_then(|it| ImportScope::find_insert_use_container(&it, sema))
.or_else(|| ImportScope::from(syntax)) .or_else(|| ImportScope::from(syntax))
.unwrap(); .unwrap();
let path = ast::SourceFile::parse(&format!("use {path};")) let path = ast::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT)
.tree() .tree()
.syntax() .syntax()
.descendants() .descendants()
@ -1292,14 +1292,14 @@ fn check_one(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
} }
fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) { fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) {
let use0 = ast::SourceFile::parse(ra_fixture0) let use0 = ast::SourceFile::parse(ra_fixture0, span::Edition::CURRENT)
.tree() .tree()
.syntax() .syntax()
.descendants() .descendants()
.find_map(ast::Use::cast) .find_map(ast::Use::cast)
.unwrap(); .unwrap();
let use1 = ast::SourceFile::parse(ra_fixture1) let use1 = ast::SourceFile::parse(ra_fixture1, span::Edition::CURRENT)
.tree() .tree()
.syntax() .syntax()
.descendants() .descendants()
@ -1311,7 +1311,7 @@ fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior
} }
fn check_guess(ra_fixture: &str, expected: ImportGranularityGuess) { fn check_guess(ra_fixture: &str, expected: ImportGranularityGuess) {
let syntax = ast::SourceFile::parse(ra_fixture).tree().syntax().clone(); let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree().syntax().clone();
let file = ImportScope::from(syntax).unwrap(); let file = ImportScope::from(syntax).unwrap();
assert_eq!(super::guess_granularity_from_scope(&file), expected); assert_eq!(super::guess_granularity_from_scope(&file), expected);
} }

View File

@ -27,7 +27,7 @@ pub(crate) fn expr(s: &str) -> Result<SyntaxNode, ()> {
pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> { pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
let template = "const _: () = { {}; };"; let template = "const _: () = { {}; };";
let input = template.replace("{}", s); let input = template.replace("{}", s);
let parse = syntax::SourceFile::parse(&input); let parse = syntax::SourceFile::parse(&input, syntax::Edition::CURRENT);
if !parse.errors().is_empty() { if !parse.errors().is_empty() {
return Err(()); return Err(());
} }
@ -48,7 +48,7 @@ pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
fn fragment<T: AstNode>(template: &str, s: &str) -> Result<SyntaxNode, ()> { fn fragment<T: AstNode>(template: &str, s: &str) -> Result<SyntaxNode, ()> {
let s = s.trim(); let s = s.trim();
let input = template.replace("{}", s); let input = template.replace("{}", s);
let parse = syntax::SourceFile::parse(&input); let parse = syntax::SourceFile::parse(&input, syntax::Edition::CURRENT);
if !parse.errors().is_empty() { if !parse.errors().is_empty() {
return Err(()); return Err(());
} }

View File

@ -220,7 +220,7 @@ mod tests {
use super::*; use super::*;
fn check(ra_fixture: &str, expect: Expect) { fn check(ra_fixture: &str, expect: Expect) {
let file = SourceFile::parse(ra_fixture).ok().unwrap(); let file = SourceFile::parse(ra_fixture, span::Edition::CURRENT).ok().unwrap();
let structure = file_structure(&file); let structure = file_structure(&file);
expect.assert_debug_eq(&structure) expect.assert_debug_eq(&structure)
} }

View File

@ -289,7 +289,7 @@ mod tests {
fn check(ra_fixture: &str) { fn check(ra_fixture: &str) {
let (ranges, text) = extract_tags(ra_fixture, "fold"); let (ranges, text) = extract_tags(ra_fixture, "fold");
let parse = SourceFile::parse(&text); let parse = SourceFile::parse(&text, span::Edition::CURRENT);
let mut folds = folding_ranges(&parse.tree()); let mut folds = folding_ranges(&parse.tree());
folds.sort_by_key(|fold| (fold.range.start(), fold.range.end())); folds.sort_by_key(|fold| (fold.range.start(), fold.range.end()));

View File

@ -316,7 +316,7 @@ fn check_join_lines(ra_fixture_before: &str, ra_fixture_after: &str) {
}; };
let (before_cursor_pos, before) = extract_offset(ra_fixture_before); let (before_cursor_pos, before) = extract_offset(ra_fixture_before);
let file = SourceFile::parse(&before).ok().unwrap(); let file = SourceFile::parse(&before, span::Edition::CURRENT).ok().unwrap();
let range = TextRange::empty(before_cursor_pos); let range = TextRange::empty(before_cursor_pos);
let result = join_lines(&config, &file, range); let result = join_lines(&config, &file, range);
@ -342,7 +342,7 @@ fn check_join_lines_sel(ra_fixture_before: &str, ra_fixture_after: &str) {
}; };
let (sel, before) = extract_range(ra_fixture_before); let (sel, before) = extract_range(ra_fixture_before);
let parse = SourceFile::parse(&before); let parse = SourceFile::parse(&before, span::Edition::CURRENT);
let result = join_lines(&config, &parse.tree(), sel); let result = join_lines(&config, &parse.tree(), sel);
let actual = { let actual = {
let mut actual = before; let mut actual = before;

View File

@ -50,7 +50,7 @@ mod tests {
fn test_matching_brace() { fn test_matching_brace() {
fn do_check(before: &str, after: &str) { fn do_check(before: &str, after: &str) {
let (pos, before) = extract_offset(before); let (pos, before) = extract_offset(before);
let parse = SourceFile::parse(&before); let parse = SourceFile::parse(&before, span::Edition::CURRENT);
let new_pos = match matching_brace(&parse.tree(), pos) { let new_pos = match matching_brace(&parse.tree(), pos) {
None => pos, None => pos,
Some(pos) => pos, Some(pos) => pos,

View File

@ -88,7 +88,7 @@ fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<St
// Remove custom markers // Remove custom markers
.replace("$0", ""); .replace("$0", "");
let parsed = SourceFile::parse(&text); let parsed = SourceFile::parse(&text, span::Edition::CURRENT);
// If the "file" parsed without errors, // If the "file" parsed without errors,
// return its syntax // return its syntax

View File

@ -127,7 +127,8 @@ fn on_opening_bracket_typed(
if !stdx::always!(range.len() == TextSize::of(opening_bracket)) { if !stdx::always!(range.len() == TextSize::of(opening_bracket)) {
return None; return None;
} }
let file = file.reparse(&Indel::delete(range)); // FIXME: Edition
let file = file.reparse(&Indel::delete(range), span::Edition::CURRENT);
if let Some(edit) = bracket_expr(&file.tree(), offset, opening_bracket, closing_bracket) { if let Some(edit) = bracket_expr(&file.tree(), offset, opening_bracket, closing_bracket) {
return Some(edit); return Some(edit);
@ -411,7 +412,7 @@ fn do_type_char(char_typed: char, before: &str) -> Option<String> {
let (offset, mut before) = extract_offset(before); let (offset, mut before) = extract_offset(before);
let edit = TextEdit::insert(offset, char_typed.to_string()); let edit = TextEdit::insert(offset, char_typed.to_string());
edit.apply(&mut before); edit.apply(&mut before);
let parse = SourceFile::parse(&before); let parse = SourceFile::parse(&before, span::Edition::CURRENT);
on_char_typed_inner(&parse, offset, char_typed).map(|it| { on_char_typed_inner(&parse, offset, char_typed).map(|it| {
it.apply(&mut before); it.apply(&mut before);
before.to_string() before.to_string()

View File

@ -1,7 +1,7 @@
//! This module add real world mbe example for benchmark tests //! This module add real world mbe example for benchmark tests
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use span::Span; use span::{Edition, Span};
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
AstNode, SmolStr, AstNode, SmolStr,
@ -46,7 +46,7 @@ fn benchmark_expand_macro_rules() {
invocations invocations
.into_iter() .into_iter()
.map(|(id, tt)| { .map(|(id, tt)| {
let res = rules[&id].expand(&tt, |_| (), true, DUMMY); let res = rules[&id].expand(&tt, |_| (), true, DUMMY, Edition::CURRENT);
assert!(res.err.is_none()); assert!(res.err.is_none());
res.value.token_trees.len() res.value.token_trees.len()
}) })
@ -66,7 +66,7 @@ fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> { fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
let fixture = bench_fixture::numerous_macro_rules(); let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap();
source_file source_file
.syntax() .syntax()
@ -120,7 +120,7 @@ fn invocation_fixtures(
}, },
token_trees: token_trees.into_boxed_slice(), token_trees: token_trees.into_boxed_slice(),
}; };
if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() { if it.expand(&subtree, |_| (), true, DUMMY, Edition::CURRENT).err.is_none() {
res.push((name.clone(), subtree)); res.push((name.clone(), subtree));
break; break;
} }

View File

@ -6,7 +6,7 @@
mod transcriber; mod transcriber;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use span::Span; use span::{Edition, Span};
use syntax::SmolStr; use syntax::SmolStr;
use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
@ -17,10 +17,11 @@ pub(crate) fn expand_rules(
marker: impl Fn(&mut Span) + Copy, marker: impl Fn(&mut Span) + Copy,
new_meta_vars: bool, new_meta_vars: bool,
call_site: Span, call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<tt::Subtree<Span>> { ) -> ExpandResult<tt::Subtree<Span>> {
let mut match_: Option<(matcher::Match, &crate::Rule)> = None; let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
for rule in rules { for rule in rules {
let new_match = matcher::match_(&rule.lhs, input); let new_match = matcher::match_(&rule.lhs, input, def_site_edition);
if new_match.err.is_none() { if new_match.err.is_none() {
// If we find a rule that applies without errors, we're done. // If we find a rule that applies without errors, we're done.

View File

@ -62,7 +62,7 @@
use std::rc::Rc; use std::rc::Rc;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::Span; use span::{Edition, Span};
use syntax::SmolStr; use syntax::SmolStr;
use tt::DelimSpan; use tt::DelimSpan;
@ -108,8 +108,8 @@ fn add_err(&mut self, err: ExpandError) {
} }
/// Matching errors are added to the `Match`. /// Matching errors are added to the `Match`.
pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>) -> Match { pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>, edition: Edition) -> Match {
let mut res = match_loop(pattern, input); let mut res = match_loop(pattern, input, edition);
res.bound_count = count(res.bindings.bindings()); res.bound_count = count(res.bindings.bindings());
return res; return res;
@ -363,6 +363,7 @@ fn match_loop_inner<'t>(
eof_items: &mut SmallVec<[MatchState<'t>; 1]>, eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>, error_items: &mut SmallVec<[MatchState<'t>; 1]>,
delim_span: tt::DelimSpan<Span>, delim_span: tt::DelimSpan<Span>,
edition: Edition,
) { ) {
macro_rules! try_push { macro_rules! try_push {
($items: expr, $it:expr) => { ($items: expr, $it:expr) => {
@ -473,7 +474,7 @@ macro_rules! try_push {
OpDelimited::Op(Op::Var { kind, name, .. }) => { OpDelimited::Op(Op::Var { kind, name, .. }) => {
if let &Some(kind) = kind { if let &Some(kind) = kind {
let mut fork = src.clone(); let mut fork = src.clone();
let match_res = match_meta_var(kind, &mut fork, delim_span); let match_res = match_meta_var(kind, &mut fork, delim_span, edition);
match match_res.err { match match_res.err {
None => { None => {
// Some meta variables are optional (e.g. vis) // Some meta variables are optional (e.g. vis)
@ -586,7 +587,7 @@ macro_rules! try_push {
} }
} }
fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>) -> Match { fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition) -> Match {
let span = src.delimiter.delim_span(); let span = src.delimiter.delim_span();
let mut src = TtIter::new(src); let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new(); let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new();
@ -627,6 +628,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>) -> Match {
&mut eof_items, &mut eof_items,
&mut error_items, &mut error_items,
span, span,
edition,
); );
stdx::always!(cur_items.is_empty()); stdx::always!(cur_items.is_empty());
@ -740,23 +742,14 @@ fn match_meta_var(
kind: MetaVarKind, kind: MetaVarKind,
input: &mut TtIter<'_, Span>, input: &mut TtIter<'_, Span>,
delim_span: DelimSpan<Span>, delim_span: DelimSpan<Span>,
edition: Edition,
) -> ExpandResult<Option<Fragment>> { ) -> ExpandResult<Option<Fragment>> {
let fragment = match kind { let fragment = match kind {
MetaVarKind::Path => { MetaVarKind::Path => {
return input return input.expect_fragment(parser::PrefixEntryPoint::Path, edition).map(|it| {
.expect_fragment(parser::PrefixEntryPoint::Path, parser::Edition::CURRENT)
.map(|it| {
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
}); });
} }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt,
MetaVarKind::Block => parser::PrefixEntryPoint::Block,
MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem,
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
MetaVarKind::Expr => { MetaVarKind::Expr => {
// `expr` should not match underscores, let expressions, or inline const. The latter // `expr` should not match underscores, let expressions, or inline const. The latter
// two are for [backwards compatibility][0]. // two are for [backwards compatibility][0].
@ -772,9 +765,7 @@ fn match_meta_var(
} }
_ => {} _ => {}
}; };
return input return input.expect_fragment(parser::PrefixEntryPoint::Expr, edition).map(|tt| {
.expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::CURRENT)
.map(|tt| {
tt.map(|tt| match tt { tt.map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => tt::Subtree { tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
@ -822,8 +813,16 @@ fn match_meta_var(
}; };
return tt_result.map(|it| Some(Fragment::Tokens(it))).into(); return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
} }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt,
MetaVarKind::Block => parser::PrefixEntryPoint::Block,
MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem,
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
}; };
input.expect_fragment(fragment, parser::Edition::CURRENT).map(|it| it.map(Fragment::Tokens)) input.expect_fragment(fragment, edition).map(|it| it.map(Fragment::Tokens))
} }
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {

View File

@ -250,8 +250,9 @@ pub fn expand(
marker: impl Fn(&mut Span) + Copy, marker: impl Fn(&mut Span) + Copy,
new_meta_vars: bool, new_meta_vars: bool,
call_site: Span, call_site: Span,
def_site_edition: Edition,
) -> ExpandResult<tt::Subtree<Span>> { ) -> ExpandResult<tt::Subtree<Span>> {
expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site) expander::expand_rules(&self.rules, tt, marker, new_meta_vars, call_site, def_site_edition)
} }
} }

View File

@ -3,7 +3,7 @@
use std::fmt; use std::fmt;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{SpanAnchor, SpanData, SpanMap}; use span::{Edition, SpanAnchor, SpanData, SpanMap};
use stdx::{never, non_empty_vec::NonEmptyVec}; use stdx::{never, non_empty_vec::NonEmptyVec};
use syntax::{ use syntax::{
ast::{self, make::tokens::doc_comment}, ast::{self, make::tokens::doc_comment},
@ -183,7 +183,12 @@ pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Sub
} }
/// Split token tree with separate expr: $($e:expr)SEP* /// Split token tree with separate expr: $($e:expr)SEP*
pub fn parse_exprs_with_sep<S>(tt: &tt::Subtree<S>, sep: char, span: S) -> Vec<tt::Subtree<S>> pub fn parse_exprs_with_sep<S>(
tt: &tt::Subtree<S>,
sep: char,
span: S,
edition: Edition,
) -> Vec<tt::Subtree<S>>
where where
S: Copy + fmt::Debug, S: Copy + fmt::Debug,
{ {
@ -195,8 +200,7 @@ pub fn parse_exprs_with_sep<S>(tt: &tt::Subtree<S>, sep: char, span: S) -> Vec<t
let mut res = Vec::new(); let mut res = Vec::new();
while iter.peek_n(0).is_some() { while iter.peek_n(0).is_some() {
let expanded = let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr, edition);
iter.expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::CURRENT);
res.push(match expanded.value { res.push(match expanded.value {
None => break, None => break,

View File

@ -10,7 +10,7 @@
use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
fn check_punct_spacing(fixture: &str) { fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap();
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY); let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
.into_iter() .into_iter()

View File

@ -208,6 +208,7 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
mod tests { mod tests {
use super::*; use super::*;
use ide::Edition;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
@ -216,7 +217,7 @@ mod tests {
fn check(cfg: &str, expected_features: &[&str]) { fn check(cfg: &str, expected_features: &[&str]) {
let cfg_expr = { let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let source_file = ast::SourceFile::parse(cfg, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY); let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY);
CfgExpr::parse(&tt) CfgExpr::parse(&tt)

View File

@ -1,4 +1,5 @@
//! Read Rust code on stdin, print syntax tree on stdout. //! Read Rust code on stdin, print syntax tree on stdout.
use ide::Edition;
use syntax::{AstNode, SourceFile}; use syntax::{AstNode, SourceFile};
use crate::cli::{flags, read_stdin}; use crate::cli::{flags, read_stdin};
@ -7,7 +8,7 @@ impl flags::Parse {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
let _p = tracing::span!(tracing::Level::INFO, "parsing").entered(); let _p = tracing::span!(tracing::Level::INFO, "parsing").entered();
let text = read_stdin()?; let text = read_stdin()?;
let file = SourceFile::parse(&text).tree(); let file = SourceFile::parse(&text, Edition::CURRENT).tree();
if !self.no_dump { if !self.no_dump {
println!("{:#?}", file.syntax()); println!("{:#?}", file.syntax());
} }

View File

@ -255,7 +255,7 @@ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) {
mod tests { mod tests {
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use itertools::Itertools; use itertools::Itertools;
use parser::SyntaxKind; use parser::{Edition, SyntaxKind};
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{AstNode, SyntaxElement}; use crate::{AstNode, SyntaxElement};
@ -607,8 +607,8 @@ fn main() {
} }
fn check_diff(from: &str, to: &str, expected_diff: Expect) { fn check_diff(from: &str, to: &str, expected_diff: Expect) {
let from_node = crate::SourceFile::parse(from).tree().syntax().clone(); let from_node = crate::SourceFile::parse(from, Edition::CURRENT).tree().syntax().clone();
let to_node = crate::SourceFile::parse(to).tree().syntax().clone(); let to_node = crate::SourceFile::parse(to, Edition::CURRENT).tree().syntax().clone();
let diff = super::diff(&from_node, &to_node); let diff = super::diff(&from_node, &to_node);
let line_number = let line_number =

View File

@ -174,6 +174,7 @@ fn test_doc_comment_none() {
// non-doc // non-doc
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -189,6 +190,7 @@ fn test_outer_doc_comment_of_items() {
// non-doc // non-doc
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -204,6 +206,7 @@ fn test_inner_doc_comment_of_items() {
// non-doc // non-doc
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -218,6 +221,7 @@ fn test_doc_comment_of_statics() {
/// Number of levels /// Number of levels
static LEVELS: i32 = 0; static LEVELS: i32 = 0;
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -237,6 +241,7 @@ fn test_doc_comment_preserves_indents() {
/// ``` /// ```
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -257,6 +262,7 @@ fn test_doc_comment_preserves_newlines() {
/// foo /// foo
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -271,6 +277,7 @@ fn test_doc_comment_single_line_block_strips_suffix() {
/** this is mod foo*/ /** this is mod foo*/
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -285,6 +292,7 @@ fn test_doc_comment_single_line_block_strips_suffix_whitespace() {
/** this is mod foo */ /** this is mod foo */
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -303,6 +311,7 @@ mod foo
*/ */
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -316,7 +325,7 @@ mod foo {}
#[test] #[test]
fn test_comments_preserve_trailing_whitespace() { fn test_comments_preserve_trailing_whitespace() {
let file = SourceFile::parse( let file = SourceFile::parse(
"\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -335,6 +344,7 @@ fn test_four_slash_line_comment() {
/// doc comment /// doc comment
mod foo {} mod foo {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();
@ -360,6 +370,7 @@ fn foo()
for<'a> F: Fn(&'a str) for<'a> F: Fn(&'a str)
{} {}
"#, "#,
parser::Edition::CURRENT,
) )
.ok() .ok()
.unwrap(); .unwrap();

View File

@ -1054,6 +1054,7 @@ impl<N: AstNode + Clone> Indent for N {}
mod tests { mod tests {
use std::fmt; use std::fmt;
use parser::Edition;
use stdx::trim_indent; use stdx::trim_indent;
use test_utils::assert_eq_text; use test_utils::assert_eq_text;
@ -1062,7 +1063,7 @@ mod tests {
use super::*; use super::*;
fn ast_mut_from_text<N: AstNode>(text: &str) -> N { fn ast_mut_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text); let parse = SourceFile::parse(text, Edition::CURRENT);
parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update() parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update()
} }

View File

@ -89,6 +89,7 @@ fn test() {
else { "else" } else { "else" }
} }
"#, "#,
parser::Edition::CURRENT,
); );
let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
@ -123,6 +124,7 @@ fn test() {
else { "else" } else { "else" }
} }
"#, "#,
parser::Edition::CURRENT,
); );
let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
@ -386,7 +388,8 @@ pub fn is_standalone(&self) -> bool {
#[test] #[test]
fn test_literal_with_attr() { fn test_literal_with_attr() {
let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); let parse =
ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#, parser::Edition::CURRENT);
let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap(); let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
assert_eq!(lit.token().text(), r#""Hello""#); assert_eq!(lit.token().text(), r#""Hello""#);
} }

View File

@ -11,7 +11,7 @@
//! term, it will be replaced with direct tree manipulation. //! term, it will be replaced with direct tree manipulation.
use itertools::Itertools; use itertools::Itertools;
use parser::T; use parser::{Edition, T};
use rowan::NodeOrToken; use rowan::NodeOrToken;
use stdx::{format_to, format_to_acc, never}; use stdx::{format_to, format_to_acc, never};
@ -1127,7 +1127,7 @@ pub fn token_tree(
#[track_caller] #[track_caller]
fn ast_from_text<N: AstNode>(text: &str) -> N { fn ast_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text); let parse = SourceFile::parse(text, Edition::CURRENT);
let node = match parse.tree().syntax().descendants().find_map(N::cast) { let node = match parse.tree().syntax().descendants().find_map(N::cast) {
Some(it) => it, Some(it) => it,
None => { None => {
@ -1153,12 +1153,13 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken {
pub mod tokens { pub mod tokens {
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use parser::Edition;
use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| { pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
SourceFile::parse( SourceFile::parse(
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
) )
}); });
@ -1186,13 +1187,13 @@ pub fn single_space() -> SyntaxToken {
pub fn whitespace(text: &str) -> SyntaxToken { pub fn whitespace(text: &str) -> SyntaxToken {
assert!(text.trim().is_empty()); assert!(text.trim().is_empty());
let sf = SourceFile::parse(text).ok().unwrap(); let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap();
sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap() sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
} }
pub fn doc_comment(text: &str) -> SyntaxToken { pub fn doc_comment(text: &str) -> SyntaxToken {
assert!(!text.trim().is_empty()); assert!(!text.trim().is_empty());
let sf = SourceFile::parse(text).ok().unwrap(); let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap();
sf.syntax().first_child_or_token().unwrap().into_token().unwrap() sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
} }
@ -1240,7 +1241,7 @@ pub fn blank_line() -> SyntaxToken {
impl WsBuilder { impl WsBuilder {
pub fn new(text: &str) -> WsBuilder { pub fn new(text: &str) -> WsBuilder {
WsBuilder(SourceFile::parse(text).ok().unwrap()) WsBuilder(SourceFile::parse(text, Edition::CURRENT).ok().unwrap())
} }
pub fn ws(&self) -> SyntaxToken { pub fn ws(&self) -> SyntaxToken {
self.0.syntax().first_child_or_token().unwrap().into_token().unwrap() self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()

View File

@ -4,6 +4,7 @@
use std::str::{self, FromStr}; use std::str::{self, FromStr};
use parser::Edition;
use text_edit::Indel; use text_edit::Indel;
use crate::{validation, AstNode, SourceFile, TextRange}; use crate::{validation, AstNode, SourceFile, TextRange};
@ -14,7 +15,7 @@ fn check_file_invariants(file: &SourceFile) {
} }
pub fn check_parser(text: &str) { pub fn check_parser(text: &str) {
let file = SourceFile::parse(text); let file = SourceFile::parse(text, Edition::CURRENT);
check_file_invariants(&file.tree()); check_file_invariants(&file.tree());
} }
@ -48,11 +49,11 @@ pub fn from_data(data: &[u8]) -> Option<Self> {
#[allow(clippy::print_stderr)] #[allow(clippy::print_stderr)]
pub fn run(&self) { pub fn run(&self) {
let parse = SourceFile::parse(&self.text); let parse = SourceFile::parse(&self.text, Edition::CURRENT);
let new_parse = parse.reparse(&self.edit); let new_parse = parse.reparse(&self.edit, Edition::CURRENT);
check_file_invariants(&new_parse.tree()); check_file_invariants(&new_parse.tree());
assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text); assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
let full_reparse = SourceFile::parse(&self.edited_text); let full_reparse = SourceFile::parse(&self.edited_text, Edition::CURRENT);
for (a, b) in for (a, b) in
new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants()) new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
{ {

View File

@ -2,11 +2,13 @@
//! //!
//! Please avoid adding new usages of the functions in this module //! Please avoid adding new usages of the functions in this module
use parser::Edition;
use crate::{ast, AstNode}; use crate::{ast, AstNode};
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> { pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
let s = s.trim(); let s = s.trim();
let file = ast::SourceFile::parse(&format!("const _: () = {s};")); let file = ast::SourceFile::parse(&format!("const _: () = {s};"), Edition::CURRENT);
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?; let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
if expr.syntax().text() != s { if expr.syntax().text() != s {
return None; return None;

View File

@ -141,8 +141,8 @@ pub fn debug_dump(&self) -> String {
buf buf
} }
pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> { pub fn reparse(&self, indel: &Indel, edition: Edition) -> Parse<SourceFile> {
self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel)) self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel, edition))
} }
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> { fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
@ -159,10 +159,10 @@ fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
}) })
} }
fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> { fn full_reparse(&self, indel: &Indel, edition: Edition) -> Parse<SourceFile> {
let mut text = self.tree().syntax().text().to_string(); let mut text = self.tree().syntax().text().to_string();
indel.apply(&mut text); indel.apply(&mut text);
SourceFile::parse(&text) SourceFile::parse(&text, edition)
} }
} }
@ -170,9 +170,9 @@ fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
pub use crate::ast::SourceFile; pub use crate::ast::SourceFile;
impl SourceFile { impl SourceFile {
pub fn parse(text: &str) -> Parse<SourceFile> { pub fn parse(text: &str, edition: Edition) -> Parse<SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered(); let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered();
let (green, errors) = parsing::parse_text(text, parser::Edition::CURRENT); let (green, errors) = parsing::parse_text(text, edition);
let root = SyntaxNode::new_root(green.clone()); let root = SyntaxNode::new_root(green.clone());
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
@ -340,7 +340,7 @@ fn foo() {
// //
// The `parse` method returns a `Parse` -- a pair of syntax tree and a list // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
// of errors. That is, syntax tree is constructed even in presence of errors. // of errors. That is, syntax tree is constructed even in presence of errors.
let parse = SourceFile::parse(source_code); let parse = SourceFile::parse(source_code, parser::Edition::CURRENT);
assert!(parse.errors().is_empty()); assert!(parse.errors().is_empty());
// The `tree` method returns an owned syntax node of type `SourceFile`. // The `tree` method returns an owned syntax node of type `SourceFile`.

View File

@ -177,6 +177,7 @@ fn merge_errors(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use parser::Edition;
use test_utils::{assert_eq_text, extract_range}; use test_utils::{assert_eq_text, extract_range};
use super::*; use super::*;
@ -191,9 +192,9 @@ fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
after after
}; };
let fully_reparsed = SourceFile::parse(&after); let fully_reparsed = SourceFile::parse(&after, Edition::CURRENT);
let incrementally_reparsed: Parse<SourceFile> = { let incrementally_reparsed: Parse<SourceFile> = {
let before = SourceFile::parse(&before); let before = SourceFile::parse(&before, Edition::CURRENT);
let (green, new_errors, range) = incremental_reparse( let (green, new_errors, range) = incremental_reparse(
before.tree().syntax(), before.tree().syntax(),
&edit, &edit,

View File

@ -120,7 +120,7 @@ fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
fn test_local_syntax_ptr() { fn test_local_syntax_ptr() {
use crate::{ast, AstNode, SourceFile}; use crate::{ast, AstNode, SourceFile};
let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap(); let file = SourceFile::parse("struct Foo { f: u32, }", parser::Edition::CURRENT).ok().unwrap();
let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap(); let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
let ptr = SyntaxNodePtr::new(field.syntax()); let ptr = SyntaxNodePtr::new(field.syntax());
let field_syntax = ptr.to_node(file.syntax()); let field_syntax = ptr.to_node(file.syntax());

View File

@ -5,6 +5,7 @@
use ast::HasName; use ast::HasName;
use expect_test::expect_file; use expect_test::expect_file;
use parser::Edition;
use rayon::prelude::*; use rayon::prelude::*;
use stdx::format_to_acc; use stdx::format_to_acc;
use test_utils::{bench, bench_fixture, project_root}; use test_utils::{bench, bench_fixture, project_root};
@ -19,7 +20,7 @@ fn main() {
} }
"#; "#;
let parse = SourceFile::parse(code); let parse = SourceFile::parse(code, Edition::CURRENT);
// eprintln!("{:#?}", parse.syntax_node()); // eprintln!("{:#?}", parse.syntax_node());
assert!(parse.ok().is_ok()); assert!(parse.ok().is_ok());
} }
@ -33,7 +34,7 @@ fn benchmark_parser() {
let data = bench_fixture::glorious_old_parser(); let data = bench_fixture::glorious_old_parser();
let tree = { let tree = {
let _b = bench("parsing"); let _b = bench("parsing");
let p = SourceFile::parse(&data); let p = SourceFile::parse(&data, Edition::CURRENT);
assert!(p.errors().is_empty()); assert!(p.errors().is_empty());
assert_eq!(p.tree().syntax.text_range().len(), 352474.into()); assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
p.tree() p.tree()
@ -50,7 +51,7 @@ fn benchmark_parser() {
#[test] #[test]
fn validation_tests() { fn validation_tests() {
dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| { dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| {
let parse = SourceFile::parse(text); let parse = SourceFile::parse(text, Edition::CURRENT);
let errors = parse.errors(); let errors = parse.errors();
assert_errors_are_present(&errors, path); assert_errors_are_present(&errors, path);
parse.debug_dump() parse.debug_dump()
@ -110,7 +111,7 @@ fn self_hosting_parsing() {
.into_par_iter() .into_par_iter()
.filter_map(|file| { .filter_map(|file| {
let text = read_text(&file); let text = read_text(&file);
match SourceFile::parse(&text).ok() { match SourceFile::parse(&text, Edition::CURRENT).ok() {
Ok(_) => None, Ok(_) => None,
Err(err) => Some((file, err)), Err(err) => Some((file, err)),
} }