add TopEntryPoint

This commit is contained in:
Aleksey Kladov 2021-12-27 17:54:51 +03:00
parent 8e7fc7be65
commit afffa096f6
8 changed files with 96 additions and 52 deletions

View File

@ -714,8 +714,7 @@ fn from_tt(
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) =
mbe::token_tree_to_syntax_node(tt, mbe::ParserEntryPoint::MetaItem).ok()?;
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem).ok()?;
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)

View File

@ -72,7 +72,7 @@ struct BasicAdtInfo {
}
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::ParserEntryPoint::Items)?; // FragmentKind::Items doesn't parse attrs?
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems)?; // FragmentKind::Items doesn't parse attrs?
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
mbe::ExpandError::UnexpectedToken

View File

@ -497,11 +497,11 @@ fn token_tree_to_syntax_node(
expand_to: ExpandTo,
) -> Result<(Parse<SyntaxNode>, mbe::TokenMap), ExpandError> {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::ParserEntryPoint::Statements,
ExpandTo::Items => mbe::ParserEntryPoint::Items,
ExpandTo::Pattern => mbe::ParserEntryPoint::Pattern,
ExpandTo::Type => mbe::ParserEntryPoint::Type,
ExpandTo::Expr => mbe::ParserEntryPoint::Expr,
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
};
mbe::token_tree_to_syntax_node(tt, entry_point)
}

View File

@ -131,7 +131,7 @@ pub fn expand_eager_macro(
let arg_file_id = arg_id;
let parsed_args = diagnostic_sink
.result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::ParserEntryPoint::Expr))?
.result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr))?
.0;
let result = eager_macro_recur(
db,

View File

@ -24,7 +24,7 @@
};
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::ParserEntryPoint;
pub use ::parser::TopEntryPoint;
pub use tt::{Delimiter, DelimiterKind, Punct};
#[derive(Debug, PartialEq, Eq, Clone)]

View File

@ -9,9 +9,7 @@
};
use tt::buffer::{Cursor, TokenBuffer};
use crate::{
to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, TokenMap};
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
@ -46,7 +44,7 @@ pub fn syntax_node_to_token_tree_censored(
pub fn token_tree_to_syntax_node(
tt: &tt::Subtree,
entry_point: ParserEntryPoint,
entry_point: parser::TopEntryPoint,
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
let buffer = match tt {
tt::Subtree { delimiter: None, token_trees } => {
@ -55,7 +53,7 @@ pub fn token_tree_to_syntax_node(
_ => TokenBuffer::from_subtree(tt),
};
let parser_input = to_parser_input(&buffer);
let parser_output = parser::parse(&parser_input, entry_point);
let parser_output = entry_point.parse(&parser_input);
let mut tree_sink = TtTreeSink::new(buffer.begin());
for event in parser_output.iter() {
match event {

View File

@ -83,43 +83,47 @@ pub(crate) fn meta_item(p: &mut Parser) {
attributes::meta(p);
}
}
pub(crate) mod top {
use super::*;
pub(crate) fn source_file(p: &mut Parser) {
let m = p.start();
p.eat(SHEBANG);
items::mod_contents(p, false);
m.complete(p, SOURCE_FILE);
}
pub(crate) fn macro_stmts(p: &mut Parser) {
let m = p.start();
while !p.at(EOF) {
if p.at(T![;]) {
p.bump(T![;]);
continue;
}
expressions::stmt(p, expressions::StmtWithSemi::Optional, true);
}
m.complete(p, MACRO_STMTS);
}
pub(crate) fn macro_items(p: &mut Parser) {
let m = p.start();
items::mod_contents(p, false);
m.complete(p, MACRO_ITEMS);
}
}
}
pub(crate) mod entry_points {
use super::*;
pub(crate) fn source_file(p: &mut Parser) {
let m = p.start();
p.eat(SHEBANG);
items::mod_contents(p, false);
m.complete(p, SOURCE_FILE);
}
pub(crate) fn stmt_optional_semi(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::Optional, false);
}
pub(crate) fn macro_items(p: &mut Parser) {
let m = p.start();
items::mod_contents(p, false);
m.complete(p, MACRO_ITEMS);
}
pub(crate) fn macro_stmts(p: &mut Parser) {
let m = p.start();
while !p.at(EOF) {
if p.at(T![;]) {
p.bump(T![;]);
continue;
}
expressions::stmt(p, expressions::StmtWithSemi::Optional, true);
}
m.complete(p, MACRO_STMTS);
}
pub(crate) fn attr(p: &mut Parser) {
attributes::outer_attrs(p);
}

View File

@ -41,7 +41,7 @@
syntax_kind::SyntaxKind,
};
/// Parse a syntactic construct at the *start* of the input.
/// Parse a prefix of the input as a given syntactic construct.
///
/// This is used by macro-by-example parser to implement things like `$i:item`
/// and the naming of variants follows the naming of macro fragments.
@ -83,13 +83,61 @@ pub fn parse(&self, input: &Input) -> Output {
}
}
/// Parse the whole of the input as a given syntactic construct.
///
/// This covers two main use-cases:
///
/// * Parsing a Rust file.
/// * Parsing a result of macro expansion.
///
/// That is, for something like
///
/// ```
/// quick_check! {
/// fn prop() {}
/// }
/// ```
///
/// the input to the macro will be parsed with [`PrefixEntryPoint::Item`], and
/// the result will be [`TopEntryPoint::Items`].
///
/// This *should* (but currently doesn't) guarantee that all input is consumed.
#[derive(Debug)]
pub enum TopEntryPoint {
SourceFile,
MacroStmts,
MacroItems,
Pattern,
Type,
Expr,
MetaItem,
}
impl TopEntryPoint {
pub fn parse(&self, input: &Input) -> Output {
let entry_point: fn(&'_ mut parser::Parser) = match self {
TopEntryPoint::SourceFile => grammar::entry::top::source_file,
TopEntryPoint::MacroStmts => grammar::entry::top::macro_stmts,
TopEntryPoint::MacroItems => grammar::entry::top::macro_items,
// FIXME
TopEntryPoint::Pattern => grammar::entry::prefix::pat,
TopEntryPoint::Type => grammar::entry::prefix::ty,
TopEntryPoint::Expr => grammar::entry::prefix::expr,
TopEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
};
let mut p = parser::Parser::new(input);
entry_point(&mut p);
let events = p.finish();
event::process(events)
}
}
/// rust-analyzer parser allows you to choose one of the possible entry points.
///
/// The primary consumer of this API are declarative macros, `$x:expr` matchers
/// are implemented by calling into the parser with non-standard entry point.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum ParserEntryPoint {
SourceFile,
Path,
Expr,
StatementOptionalSemi,
@ -97,14 +145,12 @@ pub enum ParserEntryPoint {
Pattern,
Item,
MetaItem,
Items,
Statements,
Attr,
}
/// Parse given tokens into the given sink as a rust file.
pub fn parse_source_file(inp: &Input) -> Output {
parse(inp, ParserEntryPoint::SourceFile)
pub fn parse_source_file(input: &Input) -> Output {
TopEntryPoint::SourceFile.parse(input)
}
/// Parses the given [`Input`] into [`Output`] assuming that the top-level
@ -117,7 +163,6 @@ pub fn parse_source_file(inp: &Input) -> Output {
/// indices between the four stages.
pub fn parse(inp: &Input, entry_point: ParserEntryPoint) -> Output {
let entry_point: fn(&'_ mut parser::Parser) = match entry_point {
ParserEntryPoint::SourceFile => grammar::entry_points::source_file,
ParserEntryPoint::Path => grammar::entry::prefix::path,
ParserEntryPoint::Expr => grammar::entry::prefix::expr,
ParserEntryPoint::Type => grammar::entry::prefix::ty,
@ -125,8 +170,6 @@ pub fn parse(inp: &Input, entry_point: ParserEntryPoint) -> Output {
ParserEntryPoint::Item => grammar::entry::prefix::item,
ParserEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
ParserEntryPoint::StatementOptionalSemi => grammar::entry_points::stmt_optional_semi,
ParserEntryPoint::Items => grammar::entry_points::macro_items,
ParserEntryPoint::Statements => grammar::entry_points::macro_stmts,
ParserEntryPoint::Attr => grammar::entry_points::attr,
};