dead code

This commit is contained in:
Aleksey Kladov 2021-12-28 19:13:30 +03:00
parent bfc263f1f9
commit 660cf34d8c
8 changed files with 9 additions and 70 deletions

View File

@ -36,9 +36,8 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let input_expressions = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.map(|mut tokens| ast::Expr::parse(&tokens.join("")))
.collect::<Result<Vec<ast::Expr>, _>>()
.ok()?;
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Option<Vec<ast::Expr>>>()?;
let parent = macro_call.syntax().parent()?;
let (range, text) = match &*input_expressions {

View File

@ -117,18 +117,6 @@ pub(crate) mod entry {
}
}
pub(crate) mod entry_points {
use super::*;
pub(crate) fn stmt_optional_semi(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::Optional, false);
}
pub(crate) fn attr(p: &mut Parser) {
attributes::outer_attrs(p);
}
}
pub(crate) fn reparser(
node: SyntaxKind,
first_child: Option<SyntaxKind>,

View File

@ -132,47 +132,6 @@ impl TopEntryPoint {
}
}
/// rust-analyzer parser allows you to choose one of the possible entry points.
///
/// The primary consumer of this API are declarative macros, `$x:expr` matchers
/// are implemented by calling into the parser with non-standard entry point.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum ParserEntryPoint {
Path,
Expr,
StatementOptionalSemi,
Pattern,
Attr,
}
/// Parse given tokens into the given sink as a rust file.
pub fn parse_source_file(input: &Input) -> Output {
TopEntryPoint::SourceFile.parse(input)
}
/// Parses the given [`Input`] into [`Output`] assuming that the top-level
/// syntactic construct is the given [`ParserEntryPoint`].
///
/// Both input and output here are fairly abstract. The overall flow is that the
/// caller has some "real" tokens, converts them to [`Input`], parses them to
/// [`Output`], and then converts that into a "real" tree. The "real" tree is
/// made of "real" tokens, so this all hinges on rather tight coordination of
/// indices between the four stages.
pub fn parse(inp: &Input, entry_point: ParserEntryPoint) -> Output {
let entry_point: fn(&'_ mut parser::Parser) = match entry_point {
ParserEntryPoint::Path => grammar::entry::prefix::path,
ParserEntryPoint::Expr => grammar::entry::prefix::expr,
ParserEntryPoint::Pattern => grammar::entry::prefix::pat,
ParserEntryPoint::StatementOptionalSemi => grammar::entry_points::stmt_optional_semi,
ParserEntryPoint::Attr => grammar::entry_points::attr,
};
let mut p = parser::Parser::new(inp);
entry_point(&mut p);
let events = p.finish();
event::process(events)
}
/// A parsing function for a specific braced-block.
pub struct Reparser(fn(&mut parser::Parser));

View File

@ -52,14 +52,10 @@ impl<'a> LexedStr<'a> {
pub fn intersperse_trivia(
&self,
output: &crate::Output,
synthetic_root: bool,
sink: &mut dyn FnMut(StrStep),
) -> bool {
let mut builder = Builder { lexed: self, pos: 0, state: State::PendingEnter, sink };
if synthetic_root {
builder.enter(SyntaxKind::SOURCE_FILE);
}
for event in output.iter() {
match event {
Step::Token { kind, n_input_tokens: n_raw_tokens } => {
@ -73,9 +69,6 @@ impl<'a> LexedStr<'a> {
}
}
}
if synthetic_root {
builder.exit();
}
match mem::replace(&mut builder.state, State::Normal) {
State::PendingExit => {

View File

@ -80,12 +80,12 @@ fn parse_inline_err() {
fn parse(text: &str) -> (String, bool) {
let lexed = LexedStr::new(text);
let input = lexed.to_input();
let output = crate::parse_source_file(&input);
let output = crate::TopEntryPoint::SourceFile.parse(&input);
let mut buf = String::new();
let mut errors = Vec::new();
let mut indent = String::new();
lexed.intersperse_trivia(&output, false, &mut |step| match step {
lexed.intersperse_trivia(&output, &mut |step| match step {
crate::StrStep::Token { kind, text } => {
write!(buf, "{}", indent).unwrap();
write!(buf, "{:?} {:?}\n", kind, text).unwrap();

View File

@ -5,6 +5,7 @@
use crate::{ast, AstNode};
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
let s = s.trim();
let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
if expr.syntax().text() != s {

View File

@ -12,19 +12,18 @@ pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let lexed = parser::LexedStr::new(text);
let parser_input = lexed.to_input();
let parser_output = parser::parse_source_file(&parser_input);
let (node, errors, _eof) = build_tree(lexed, parser_output, false);
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
let (node, errors, _eof) = build_tree(lexed, parser_output);
(node, errors)
}
pub(crate) fn build_tree(
lexed: parser::LexedStr<'_>,
parser_output: parser::Output,
synthetic_root: bool,
) -> (GreenNode, Vec<SyntaxError>, bool) {
let mut builder = SyntaxTreeBuilder::default();
let is_eof = lexed.intersperse_trivia(&parser_output, synthetic_root, &mut |step| match step {
let is_eof = lexed.intersperse_trivia(&parser_output, &mut |step| match step {
parser::StrStep::Token { kind, text } => builder.token(kind, text),
parser::StrStep::Enter { kind } => builder.start_node(kind),
parser::StrStep::Exit => builder.finish_node(),

View File

@ -96,7 +96,7 @@ fn reparse_block(
let tree_traversal = reparser.parse(&parser_input);
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal, false);
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
Some((node.replace_with(green), new_parser_errors, node.text_range()))
}