move all parsing related bits to a separate module
This commit is contained in:
parent
9d0cda4bc8
commit
5222b8aba3
@ -18,13 +18,7 @@
|
||||
|
||||
pub mod algo;
|
||||
pub mod ast;
|
||||
mod lexer;
|
||||
#[macro_use]
|
||||
mod token_set;
|
||||
mod grammar;
|
||||
mod parser_api;
|
||||
mod parser_impl;
|
||||
mod reparsing;
|
||||
mod parsing;
|
||||
mod string_lexing;
|
||||
mod syntax_kinds;
|
||||
/// Utilities for simple uses of the parser.
|
||||
@ -36,10 +30,10 @@ mod ptr;
|
||||
pub use rowan::{SmolStr, TextRange, TextUnit};
|
||||
pub use crate::{
|
||||
ast::AstNode,
|
||||
lexer::{tokenize, Token},
|
||||
syntax_kinds::SyntaxKind,
|
||||
syntax_node::{Direction, SyntaxError, SyntaxNode, WalkEvent, Location, TreeArc},
|
||||
ptr::{SyntaxNodePtr, AstPtr},
|
||||
parsing::{tokenize, Token},
|
||||
};
|
||||
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
@ -59,9 +53,7 @@ impl SourceFile {
|
||||
}
|
||||
|
||||
pub fn parse(text: &str) -> TreeArc<SourceFile> {
|
||||
let tokens = tokenize(&text);
|
||||
let (green, errors) =
|
||||
parser_impl::parse_with(syntax_node::GreenBuilder::new(), text, &tokens, grammar::root);
|
||||
let (green, errors) = parsing::parse_text(text);
|
||||
SourceFile::new(green, errors)
|
||||
}
|
||||
|
||||
@ -70,7 +62,7 @@ impl SourceFile {
|
||||
}
|
||||
|
||||
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> {
|
||||
reparsing::incremental_reparse(self.syntax(), edit, self.errors())
|
||||
parsing::incremental_reparse(self.syntax(), edit, self.errors())
|
||||
.map(|(green_node, errors)| SourceFile::new(green_node, errors))
|
||||
}
|
||||
|
||||
|
24
crates/ra_syntax/src/parsing.rs
Normal file
24
crates/ra_syntax/src/parsing.rs
Normal file
@ -0,0 +1,24 @@
|
||||
#[macro_use]
|
||||
mod token_set;
|
||||
mod builder;
|
||||
mod lexer;
|
||||
mod parser_impl;
|
||||
mod parser_api;
|
||||
mod reparsing;
|
||||
mod grammar;
|
||||
|
||||
use crate::{
|
||||
parsing::builder::GreenBuilder,
|
||||
syntax_node::{GreenNode, SyntaxError},
|
||||
};
|
||||
|
||||
pub use self::lexer::{tokenize, Token};
|
||||
|
||||
pub(crate) use self::reparsing::incremental_reparse;
|
||||
|
||||
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||
let tokens = tokenize(&text);
|
||||
let (green, errors) =
|
||||
parser_impl::parse_with(GreenBuilder::new(), text, &tokens, grammar::root);
|
||||
(green, errors)
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
parser_impl::Sink,
|
||||
parsing::parser_impl::Sink,
|
||||
syntax_node::{GreenNode, RaTypes, SyntaxError},
|
||||
SmolStr, SyntaxKind,
|
||||
};
|
@ -44,9 +44,11 @@ pub(crate) use self::{
|
||||
},
|
||||
};
|
||||
use crate::{
|
||||
parser_api::{CompletedMarker, Marker, Parser},
|
||||
token_set::TokenSet,
|
||||
SyntaxKind::{self, *},
|
||||
parsing::{
|
||||
token_set::TokenSet,
|
||||
parser_api::{CompletedMarker, Marker, Parser}
|
||||
},
|
||||
};
|
||||
|
||||
pub(crate) fn root(p: &mut Parser) {
|
@ -1,4 +1,4 @@
|
||||
use crate::lexer::ptr::Ptr;
|
||||
use crate::parsing::lexer::ptr::Ptr;
|
||||
|
||||
use crate::SyntaxKind::{self, *};
|
||||
|
@ -1,5 +1,7 @@
|
||||
use crate::lexer::classes::*;
|
||||
use crate::lexer::ptr::Ptr;
|
||||
use crate::parsing::lexer::{
|
||||
ptr::Ptr,
|
||||
classes::*,
|
||||
};
|
||||
|
||||
use crate::SyntaxKind::{self, *};
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::SyntaxKind::{self, *};
|
||||
|
||||
use crate::lexer::ptr::Ptr;
|
||||
use crate::{
|
||||
parsing::lexer::ptr::Ptr,
|
||||
SyntaxKind::{self, *},
|
||||
};
|
||||
|
||||
pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool {
|
||||
match (c, c1, c2) {
|
@ -1,9 +1,11 @@
|
||||
use drop_bomb::DropBomb;
|
||||
|
||||
use crate::{
|
||||
parser_impl::ParserImpl,
|
||||
token_set::TokenSet,
|
||||
SyntaxKind::{self, ERROR},
|
||||
parsing::{
|
||||
token_set::TokenSet,
|
||||
parser_impl::ParserImpl
|
||||
},
|
||||
};
|
||||
|
||||
/// `Parser` struct provides the low-level API for
|
@ -4,18 +4,16 @@ mod input;
|
||||
use std::cell::Cell;
|
||||
|
||||
use crate::{
|
||||
SmolStr,
|
||||
syntax_node::syntax_error::{ParseError, SyntaxError},
|
||||
parsing::{
|
||||
lexer::Token,
|
||||
parser_api::Parser,
|
||||
parser_impl::{
|
||||
event::{Event, EventProcessor},
|
||||
input::{InputPosition, ParserInput},
|
||||
},
|
||||
SmolStr,
|
||||
syntax_node::syntax_error::{
|
||||
ParseError,
|
||||
SyntaxError,
|
||||
},
|
||||
};
|
||||
}};
|
||||
|
||||
use crate::SyntaxKind::{self, EOF, TOMBSTONE};
|
||||
|
@ -7,9 +7,9 @@
|
||||
//! tree builder: the parser produces a stream of events like
|
||||
//! `start node`, `finish node`, and `FileBuilder` converts
|
||||
//! this stream to a real tree.
|
||||
use std::mem;
|
||||
|
||||
use crate::{
|
||||
lexer::Token,
|
||||
parser_impl::Sink,
|
||||
SmolStr,
|
||||
SyntaxKind::{self, *},
|
||||
TextRange, TextUnit,
|
||||
@ -18,8 +18,11 @@ use crate::{
|
||||
SyntaxError,
|
||||
SyntaxErrorKind,
|
||||
},
|
||||
parsing::{
|
||||
lexer::Token,
|
||||
parser_impl::Sink,
|
||||
},
|
||||
};
|
||||
use std::mem;
|
||||
|
||||
/// `Parser` produces a flat list of `Event`s.
|
||||
/// They are converted to a tree-structure in
|
@ -1,4 +1,7 @@
|
||||
use crate::{lexer::Token, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit};
|
||||
use crate::{
|
||||
SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
|
||||
parsing::lexer::Token,
|
||||
};
|
||||
|
||||
use std::ops::{Add, AddAssign};
|
||||
|
@ -1,10 +1,16 @@
|
||||
use crate::algo;
|
||||
use crate::grammar;
|
||||
use crate::lexer::{tokenize, Token};
|
||||
use crate::parser_api::Parser;
|
||||
use crate::parser_impl;
|
||||
use crate::syntax_node::{self, GreenNode, SyntaxError, SyntaxNode};
|
||||
use crate::{SyntaxKind::*, TextRange, TextUnit};
|
||||
use crate::{
|
||||
SyntaxKind::*, TextRange, TextUnit,
|
||||
algo,
|
||||
syntax_node::{GreenNode, SyntaxError, SyntaxNode},
|
||||
parsing::{
|
||||
grammar,
|
||||
parser_impl,
|
||||
builder::GreenBuilder,
|
||||
parser_api::Parser,
|
||||
lexer::{tokenize, Token},
|
||||
}
|
||||
};
|
||||
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
|
||||
pub(crate) fn incremental_reparse(
|
||||
@ -56,7 +62,7 @@ fn reparse_block<'node>(
|
||||
return None;
|
||||
}
|
||||
let (green, new_errors) =
|
||||
parser_impl::parse_with(syntax_node::GreenBuilder::new(), &text, &tokens, reparser);
|
||||
parser_impl::parse_with(GreenBuilder::new(), &text, &tokens, reparser);
|
||||
Some((node, green, new_errors))
|
||||
}
|
||||
|
@ -4,19 +4,19 @@ use crate::SyntaxKind;
|
||||
pub(crate) struct TokenSet(u128);
|
||||
|
||||
impl TokenSet {
|
||||
pub const fn empty() -> TokenSet {
|
||||
pub(crate) const fn empty() -> TokenSet {
|
||||
TokenSet(0)
|
||||
}
|
||||
|
||||
pub const fn singleton(kind: SyntaxKind) -> TokenSet {
|
||||
pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet {
|
||||
TokenSet(mask(kind))
|
||||
}
|
||||
|
||||
pub const fn union(self, other: TokenSet) -> TokenSet {
|
||||
pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
|
||||
TokenSet(self.0 | other.0)
|
||||
}
|
||||
|
||||
pub fn contains(&self, kind: SyntaxKind) -> bool {
|
||||
pub(crate) fn contains(&self, kind: SyntaxKind) -> bool {
|
||||
self.0 & mask(kind) != 0
|
||||
}
|
||||
}
|
@ -1,4 +1,3 @@
|
||||
mod builder;
|
||||
pub mod syntax_error;
|
||||
mod syntax_text;
|
||||
|
||||
@ -8,7 +7,6 @@ use self::syntax_text::SyntaxText;
|
||||
use crate::{SmolStr, SyntaxKind, TextRange};
|
||||
use rowan::{Types, TransparentNewType};
|
||||
|
||||
pub(crate) use self::builder::GreenBuilder;
|
||||
pub use self::syntax_error::{SyntaxError, SyntaxErrorKind, Location};
|
||||
pub use rowan::WalkEvent;
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user