Auto merge of #16450 - Urhengulas:edition-aware-parser, r=Veykril

internal: Prepare parser interface for editions
This commit is contained in:
bors 2024-04-14 13:34:12 +00:00
commit 74cef6d79b
24 changed files with 140 additions and 102 deletions

2
Cargo.lock generated
View File

@ -551,6 +551,7 @@ dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit", "limit",
"mbe", "mbe",
"parser",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span", "span",
@ -1821,6 +1822,7 @@ dependencies = [
"salsa", "salsa",
"stdx", "stdx",
"syntax", "syntax",
"text-size",
"vfs", "vfs",
] ]

View File

@ -316,8 +316,11 @@ fn expand(
_: Span, _: Span,
_: Span, _: Span,
) -> Result<Subtree, ProcMacroExpansionError> { ) -> Result<Subtree, ProcMacroExpansionError> {
let (parse, _) = let (parse, _) = ::mbe::token_tree_to_syntax_node(
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); subtree,
::mbe::TopEntryPoint::MacroItems,
span::Edition::CURRENT,
);
if parse.errors().is_empty() { if parse.errors().is_empty() {
Ok(subtree.clone()) Ok(subtree.clone())
} else { } else {

View File

@ -534,8 +534,7 @@ fn inject_prelude(&mut self) {
Edition::Edition2015 => name![rust_2015], Edition::Edition2015 => name![rust_2015],
Edition::Edition2018 => name![rust_2018], Edition::Edition2018 => name![rust_2018],
Edition::Edition2021 => name![rust_2021], Edition::Edition2021 => name![rust_2021],
// FIXME: update this when rust_2024 exists Edition::Edition2024 => name![rust_2024],
Edition::Edition2024 => name![rust_2021],
}; };
let path_kind = match self.def_map.data.edition { let path_kind = match self.def_map.data.edition {

View File

@ -32,6 +32,7 @@ tt.workspace = true
mbe.workspace = true mbe.workspace = true
limit.workspace = true limit.workspace = true
span.workspace = true span.workspace = true
parser.workspace = true
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.0" expect-test = "1.4.0"

View File

@ -204,7 +204,11 @@ struct BasicAdtInfo {
} }
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> { fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
let (parsed, tm) = &mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); let (parsed, tm) = &mbe::token_tree_to_syntax_node(
tt,
mbe::TopEntryPoint::MacroItems,
parser::Edition::CURRENT,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node()) let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other("invalid item definition"))?; .ok_or_else(|| ExpandError::other("invalid item definition"))?;
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?; let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;

View File

@ -676,7 +676,7 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr, ExpandTo::Expr => mbe::TopEntryPoint::Expr,
}; };
mbe::token_tree_to_syntax_node(tt, entry_point) mbe::token_tree_to_syntax_node(tt, entry_point, parser::Edition::CURRENT)
} }
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {

View File

@ -417,7 +417,11 @@ fn check(ra_fixture: &str, mut expect: Expect) {
expect.assert_eq(&actual); expect.assert_eq(&actual);
// the fixed-up tree should be syntactically valid // the fixed-up tree should be syntactically valid
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems); let (parse, _) = mbe::token_tree_to_syntax_node(
&tt,
::mbe::TopEntryPoint::MacroItems,
parser::Edition::CURRENT,
);
assert!( assert!(
parse.errors().is_empty(), parse.errors().is_empty(),
"parse has syntax errors. parse tree:\n{:#?}", "parse has syntax errors. parse tree:\n{:#?}",

View File

@ -303,6 +303,7 @@ macro_rules! known_names {
rust_2015, rust_2015,
rust_2018, rust_2018,
rust_2021, rust_2021,
rust_2024,
v1, v1,
new_display, new_display,
new_debug, new_debug,

View File

@ -1157,7 +1157,7 @@ fn iterate_trait_method_candidates(
{ {
// FIXME: this should really be using the edition of the method name's span, in case it // FIXME: this should really be using the edition of the method name's span, in case it
// comes from a macro // comes from a macro
if db.crate_graph()[krate].edition < Edition::Edition2021 { if db.crate_graph()[krate].edition < Edition::CURRENT {
continue; continue;
} }
} }

View File

@ -743,7 +743,9 @@ fn match_meta_var(
) -> ExpandResult<Option<Fragment>> { ) -> ExpandResult<Option<Fragment>> {
let fragment = match kind { let fragment = match kind {
MetaVarKind::Path => { MetaVarKind::Path => {
return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| { return input
.expect_fragment(parser::PrefixEntryPoint::Path, parser::Edition::CURRENT)
.map(|it| {
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
}); });
} }
@ -770,7 +772,9 @@ fn match_meta_var(
} }
_ => {} _ => {}
}; };
return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { return input
.expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::CURRENT)
.map(|tt| {
tt.map(|tt| match tt { tt.map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => tt::Subtree { tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
@ -819,7 +823,7 @@ fn match_meta_var(
return tt_result.map(|it| Some(Fragment::Tokens(it))).into(); return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
} }
}; };
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) input.expect_fragment(fragment, parser::Edition::CURRENT).map(|it| it.map(Fragment::Tokens))
} }
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {

View File

@ -119,6 +119,7 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
pub fn token_tree_to_syntax_node<Ctx>( pub fn token_tree_to_syntax_node<Ctx>(
tt: &tt::Subtree<SpanData<Ctx>>, tt: &tt::Subtree<SpanData<Ctx>>,
entry_point: parser::TopEntryPoint, entry_point: parser::TopEntryPoint,
edition: parser::Edition,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>) ) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where where
SpanData<Ctx>: Copy + fmt::Debug, SpanData<Ctx>: Copy + fmt::Debug,
@ -131,7 +132,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
_ => TokenBuffer::from_subtree(tt), _ => TokenBuffer::from_subtree(tt),
}; };
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(&buffer);
let parser_output = entry_point.parse(&parser_input); let parser_output = entry_point.parse(&parser_input, edition);
let mut tree_sink = TtTreeSink::new(buffer.begin()); let mut tree_sink = TtTreeSink::new(buffer.begin());
for event in parser_output.iter() { for event in parser_output.iter() {
match event { match event {
@ -194,7 +195,8 @@ pub fn parse_exprs_with_sep<S>(tt: &tt::Subtree<S>, sep: char, span: S) -> Vec<t
let mut res = Vec::new(); let mut res = Vec::new();
while iter.peek_n(0).is_some() { while iter.peek_n(0).is_some() {
let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr); let expanded =
iter.expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::CURRENT);
res.push(match expanded.value { res.push(match expanded.value {
None => break, None => break,

View File

@ -140,10 +140,11 @@ impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> {
pub(crate) fn expect_fragment( pub(crate) fn expect_fragment(
&mut self, &mut self,
entry_point: parser::PrefixEntryPoint, entry_point: parser::PrefixEntryPoint,
edition: parser::Edition,
) -> ExpandResult<Option<tt::TokenTree<S>>> { ) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input); let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.begin(); let mut cursor = buffer.begin();
let mut error = false; let mut error = false;
for step in tree_traversal.iter() { for step in tree_traversal.iter() {

View File

@ -0,0 +1,55 @@
//! The edition of the Rust language used in a crate.
// Ideally this would be defined in the span crate, but the dependency chain is all over the place
// wrt to span, parser and syntax.
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Edition {
Edition2015,
Edition2018,
Edition2021,
Edition2024,
}
impl Edition {
pub const CURRENT: Edition = Edition::Edition2021;
pub const DEFAULT: Edition = Edition::Edition2015;
}
#[derive(Debug)]
pub struct ParseEditionError {
invalid_input: String,
}
impl std::error::Error for ParseEditionError {}
impl fmt::Display for ParseEditionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "invalid edition: {:?}", self.invalid_input)
}
}
impl std::str::FromStr for Edition {
type Err = ParseEditionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let res = match s {
"2015" => Edition::Edition2015,
"2018" => Edition::Edition2018,
"2021" => Edition::Edition2021,
"2024" => Edition::Edition2024,
_ => return Err(ParseEditionError { invalid_input: s.to_owned() }),
};
Ok(res)
}
}
impl fmt::Display for Edition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Edition::Edition2015 => "2015",
Edition::Edition2018 => "2018",
Edition::Edition2021 => "2021",
Edition::Edition2024 => "2024",
})
}
}

View File

@ -26,6 +26,7 @@
#[cfg(feature = "in-rust-tree")] #[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer; extern crate rustc_lexer;
mod edition;
mod event; mod event;
mod grammar; mod grammar;
mod input; mod input;
@ -42,6 +43,7 @@
pub(crate) use token_set::TokenSet; pub(crate) use token_set::TokenSet;
pub use crate::{ pub use crate::{
edition::Edition,
input::Input, input::Input,
lexed_str::LexedStr, lexed_str::LexedStr,
output::{Output, Step}, output::{Output, Step},
@ -86,7 +88,7 @@ pub enum TopEntryPoint {
} }
impl TopEntryPoint { impl TopEntryPoint {
pub fn parse(&self, input: &Input) -> Output { pub fn parse(&self, input: &Input, edition: Edition) -> Output {
let _p = tracing::span!(tracing::Level::INFO, "TopEntryPoint::parse", ?self).entered(); let _p = tracing::span!(tracing::Level::INFO, "TopEntryPoint::parse", ?self).entered();
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self { let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
TopEntryPoint::SourceFile => grammar::entry::top::source_file, TopEntryPoint::SourceFile => grammar::entry::top::source_file,
@ -98,7 +100,7 @@ pub fn parse(&self, input: &Input) -> Output {
TopEntryPoint::MetaItem => grammar::entry::top::meta_item, TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input, TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input,
}; };
let mut p = parser::Parser::new(input); let mut p = parser::Parser::new(input, edition);
entry_point(&mut p); entry_point(&mut p);
let events = p.finish(); let events = p.finish();
let res = event::process(events); let res = event::process(events);
@ -150,7 +152,7 @@ pub enum PrefixEntryPoint {
} }
impl PrefixEntryPoint { impl PrefixEntryPoint {
pub fn parse(&self, input: &Input) -> Output { pub fn parse(&self, input: &Input, edition: Edition) -> Output {
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self { let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
PrefixEntryPoint::Vis => grammar::entry::prefix::vis, PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
PrefixEntryPoint::Block => grammar::entry::prefix::block, PrefixEntryPoint::Block => grammar::entry::prefix::block,
@ -163,7 +165,7 @@ pub fn parse(&self, input: &Input) -> Output {
PrefixEntryPoint::Item => grammar::entry::prefix::item, PrefixEntryPoint::Item => grammar::entry::prefix::item,
PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item, PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
}; };
let mut p = parser::Parser::new(input); let mut p = parser::Parser::new(input, edition);
entry_point(&mut p); entry_point(&mut p);
let events = p.finish(); let events = p.finish();
event::process(events) event::process(events)
@ -187,9 +189,9 @@ pub fn for_node(
/// ///
/// Tokens must start with `{`, end with `}` and form a valid brace /// Tokens must start with `{`, end with `}` and form a valid brace
/// sequence. /// sequence.
pub fn parse(self, tokens: &Input) -> Output { pub fn parse(self, tokens: &Input, edition: Edition) -> Output {
let Reparser(r) = self; let Reparser(r) = self;
let mut p = parser::Parser::new(tokens); let mut p = parser::Parser::new(tokens, edition);
r(&mut p); r(&mut p);
let events = p.finish(); let events = p.finish();
event::process(events) event::process(events)

View File

@ -8,6 +8,7 @@
use crate::{ use crate::{
event::Event, event::Event,
input::Input, input::Input,
Edition,
SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
TokenSet, T, TokenSet, T,
}; };
@ -26,13 +27,14 @@ pub(crate) struct Parser<'t> {
pos: usize, pos: usize,
events: Vec<Event>, events: Vec<Event>,
steps: Cell<u32>, steps: Cell<u32>,
_edition: Edition,
} }
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000); static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
impl<'t> Parser<'t> { impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input) -> Parser<'t> { pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) } Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), _edition: edition }
} }
pub(crate) fn finish(self) -> Vec<Event> { pub(crate) fn finish(self) -> Vec<Event> {

View File

@ -88,7 +88,7 @@ fn parse_inline_err() {
fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
let lexed = LexedStr::new(text); let lexed = LexedStr::new(text);
let input = lexed.to_input(); let input = lexed.to_input();
let output = entry.parse(&input); let output = entry.parse(&input, crate::Edition::CURRENT);
let mut buf = String::new(); let mut buf = String::new();
let mut errors = Vec::new(); let mut errors = Vec::new();

View File

@ -86,7 +86,7 @@ fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
let input = lexed.to_input(); let input = lexed.to_input();
let mut n_tokens = 0; let mut n_tokens = 0;
for step in entry.parse(&input).iter() { for step in entry.parse(&input, crate::Edition::CURRENT).iter() {
match step { match step {
Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize, Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
Step::FloatSplit { .. } => n_tokens += 1, Step::FloatSplit { .. } => n_tokens += 1,

View File

@ -14,6 +14,7 @@ la-arena.workspace = true
salsa.workspace = true salsa.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
hashbrown.workspace = true hashbrown.workspace = true
text-size.workspace = true
# local deps # local deps
vfs.workspace = true vfs.workspace = true

View File

@ -13,59 +13,10 @@
map::{RealSpanMap, SpanMap}, map::{RealSpanMap, SpanMap},
}; };
pub use syntax::{TextRange, TextSize}; pub use syntax::Edition;
pub use text_size::{TextRange, TextSize};
pub use vfs::FileId; pub use vfs::FileId;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Edition {
Edition2015,
Edition2018,
Edition2021,
Edition2024,
}
impl Edition {
pub const CURRENT: Edition = Edition::Edition2021;
pub const DEFAULT: Edition = Edition::Edition2015;
}
#[derive(Debug)]
pub struct ParseEditionError {
invalid_input: String,
}
impl std::error::Error for ParseEditionError {}
impl fmt::Display for ParseEditionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "invalid edition: {:?}", self.invalid_input)
}
}
impl std::str::FromStr for Edition {
type Err = ParseEditionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let res = match s {
"2015" => Edition::Edition2015,
"2018" => Edition::Edition2018,
"2021" => Edition::Edition2021,
"2024" => Edition::Edition2024,
_ => return Err(ParseEditionError { invalid_input: s.to_owned() }),
};
Ok(res)
}
}
impl fmt::Display for Edition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Edition::Edition2015 => "2015",
Edition::Edition2018 => "2018",
Edition::Edition2021 => "2021",
Edition::Edition2024 => "2024",
})
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct FilePosition { pub struct FilePosition {
pub file_id: FileId, pub file_id: FileId,

View File

@ -4,11 +4,11 @@
use std::{fmt, hash::Hash}; use std::{fmt, hash::Hash};
use stdx::{always, itertools::Itertools}; use stdx::{always, itertools::Itertools};
use syntax::{TextRange, TextSize};
use vfs::FileId; use vfs::FileId;
use crate::{ use crate::{
ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID, ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, TextRange, TextSize,
ROOT_ERASED_FILE_AST_ID,
}; };
/// Maps absolute text ranges for the corresponding file to the relevant span data. /// Maps absolute text ranges for the corresponding file to the relevant span data.

View File

@ -60,7 +60,7 @@
}, },
token_text::TokenText, token_text::TokenText,
}; };
pub use parser::{SyntaxKind, T}; pub use parser::{Edition, SyntaxKind, T};
pub use rowan::{ pub use rowan::{
api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
TokenAtOffset, WalkEvent, TokenAtOffset, WalkEvent,
@ -172,7 +172,7 @@ fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
impl SourceFile { impl SourceFile {
pub fn parse(text: &str) -> Parse<SourceFile> { pub fn parse(text: &str) -> Parse<SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered(); let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered();
let (green, errors) = parsing::parse_text(text); let (green, errors) = parsing::parse_text(text, parser::Edition::CURRENT);
let root = SyntaxNode::new_root(green.clone()); let root = SyntaxNode::new_root(green.clone());
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
@ -185,7 +185,10 @@ pub fn parse(text: &str) -> Parse<SourceFile> {
} }
impl ast::TokenTree { impl ast::TokenTree {
pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> { pub fn reparse_as_comma_separated_expr(
self,
edition: parser::Edition,
) -> Parse<ast::MacroEagerInput> {
let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
let mut parser_input = parser::Input::default(); let mut parser_input = parser::Input::default();
@ -219,7 +222,7 @@ pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> {
} }
} }
let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input); let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition);
let mut tokens = let mut tokens =
self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);

View File

@ -9,11 +9,11 @@
pub(crate) use crate::parsing::reparsing::incremental_reparse; pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered(); let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered();
let lexed = parser::LexedStr::new(text); let lexed = parser::LexedStr::new(text);
let parser_input = lexed.to_input(); let parser_input = lexed.to_input();
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input); let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
let (node, errors, _eof) = build_tree(lexed, parser_output); let (node, errors, _eof) = build_tree(lexed, parser_output);
(node, errors) (node, errors)
} }

View File

@ -26,7 +26,9 @@ pub(crate) fn incremental_reparse(
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
} }
if let Some((green, new_errors, old_range)) = reparse_block(node, edit) { if let Some((green, new_errors, old_range)) =
reparse_block(node, edit, parser::Edition::CURRENT)
{
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
} }
None None
@ -84,6 +86,7 @@ fn reparse_token(
fn reparse_block( fn reparse_block(
root: &SyntaxNode, root: &SyntaxNode,
edit: &Indel, edit: &Indel,
edition: parser::Edition,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let (node, reparser) = find_reparsable_node(root, edit.delete)?; let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit); let text = get_text_after_edit(node.clone().into(), edit);
@ -94,7 +97,7 @@ fn reparse_block(
return None; return None;
} }
let tree_traversal = reparser.parse(&parser_input); let tree_traversal = reparser.parse(&parser_input, edition);
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal); let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);

View File

@ -260,7 +260,7 @@ pub fn parse_with_proc_macros(
let core_crate = crate_graph.add_crate_root( let core_crate = crate_graph.add_crate_root(
core_file, core_file,
Edition::Edition2021, Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name("core".to_owned())), Some(CrateDisplayName::from_canonical_name("core".to_owned())),
None, None,
Default::default(), Default::default(),
@ -299,7 +299,7 @@ pub fn parse_with_proc_macros(
let proc_macros_crate = crate_graph.add_crate_root( let proc_macros_crate = crate_graph.add_crate_root(
proc_lib_file, proc_lib_file,
Edition::Edition2021, Edition::CURRENT,
Some(CrateDisplayName::from_canonical_name("proc_macros".to_owned())), Some(CrateDisplayName::from_canonical_name("proc_macros".to_owned())),
None, None,
Default::default(), Default::default(),