10995: internal: switch from trait-based TokenSource to simple struct of arrays r=matklad a=matklad

cc #10765 

The idea here is to try to simplify the interface as best as we can. The original trait-based approach is a bit over-engineered and hard to debug. Here, we replace callback with just data. The next PR in series will replace the output `TreeSink` trait with data as well. 


The biggest drawback here is that we now require to materialize all parser's input up-front. This is a bad fit for macro by example: when you parse `$e:expr`, you might consume only part of the input. However, today's trait-based solution doesn't really help -- we were already materializing the whole thing! So, let's keep it simple!

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2021-12-12 16:38:56 +00:00 committed by GitHub
commit db2a7087b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 310 additions and 362 deletions

View File

@ -10,7 +10,7 @@
mod expander;
mod syntax_bridge;
mod tt_iter;
mod subtree_source;
mod to_parser_tokens;
#[cfg(test)]
mod benchmark;

View File

@ -1,174 +0,0 @@
//! Our parser is generic over the source of tokens it parses.
//!
//! This module defines tokens sourced from declarative macros.
use parser::{Token, TokenSource};
use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
use tt::buffer::TokenBuffer;
#[derive(Debug, Clone, Eq, PartialEq)]
struct TtToken {
tt: Token,
text: SmolStr,
}
pub(crate) struct SubtreeTokenSource {
cached: Vec<TtToken>,
curr: (Token, usize),
}
impl<'a> SubtreeTokenSource {
pub(crate) fn new(buffer: &TokenBuffer) -> SubtreeTokenSource {
let mut current = buffer.begin();
let mut cached = Vec::with_capacity(100);
while !current.eof() {
let cursor = current;
let tt = cursor.token_tree();
// Check if it is lifetime
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
if punct.char == '\'' {
let next = cursor.bump();
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) =
next.token_tree()
{
let text = SmolStr::new("'".to_string() + &ident.text);
cached.push(TtToken {
tt: Token { kind: LIFETIME_IDENT, is_jointed_to_next: false },
text,
});
current = next.bump();
continue;
} else {
panic!("Next token must be ident : {:#?}", next.token_tree());
}
}
}
current = match tt {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
cached.push(convert_leaf(leaf));
cursor.bump()
}
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
if let Some(d) = subtree.delimiter_kind() {
cached.push(convert_delim(d, false));
}
cursor.subtree().unwrap()
}
None => match cursor.end() {
Some(subtree) => {
if let Some(d) = subtree.delimiter_kind() {
cached.push(convert_delim(d, true));
}
cursor.bump()
}
None => continue,
},
};
}
let mut res = SubtreeTokenSource {
curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
cached,
};
res.curr = (res.token(0), 0);
res
}
fn token(&self, pos: usize) -> Token {
match self.cached.get(pos) {
Some(it) => it.tt,
None => Token { kind: EOF, is_jointed_to_next: false },
}
}
}
impl<'a> TokenSource for SubtreeTokenSource {
fn current(&self) -> Token {
self.curr.0
}
/// Lookahead n token
fn lookahead_nth(&self, n: usize) -> Token {
self.token(self.curr.1 + n)
}
/// bump cursor to next token
fn bump(&mut self) {
if self.current().kind == EOF {
return;
}
self.curr = (self.token(self.curr.1 + 1), self.curr.1 + 1);
}
/// Is the current token a specified keyword?
fn is_keyword(&self, kw: &str) -> bool {
match self.cached.get(self.curr.1) {
Some(t) => t.text == *kw,
None => false,
}
}
}
fn convert_delim(d: tt::DelimiterKind, closing: bool) -> TtToken {
let (kinds, texts) = match d {
tt::DelimiterKind::Parenthesis => ([T!['('], T![')']], "()"),
tt::DelimiterKind::Brace => ([T!['{'], T!['}']], "{}"),
tt::DelimiterKind::Bracket => ([T!['['], T![']']], "[]"),
};
let idx = closing as usize;
let kind = kinds[idx];
let text = &texts[idx..texts.len() - (1 - idx)];
TtToken { tt: Token { kind, is_jointed_to_next: false }, text: SmolStr::new(text) }
}
fn convert_literal(l: &tt::Literal) -> TtToken {
let is_negated = l.text.starts_with('-');
let inner_text = &l.text[if is_negated { 1 } else { 0 }..];
let kind = lex_single_syntax_kind(inner_text)
.map(|(kind, _error)| kind)
.filter(|kind| {
kind.is_literal() && (!is_negated || matches!(kind, FLOAT_NUMBER | INT_NUMBER))
})
.unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
TtToken { tt: Token { kind, is_jointed_to_next: false }, text: l.text.clone() }
}
fn convert_ident(ident: &tt::Ident) -> TtToken {
let kind = match ident.text.as_ref() {
"true" => T![true],
"false" => T![false],
"_" => UNDERSCORE,
i if i.starts_with('\'') => LIFETIME_IDENT,
_ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
};
TtToken { tt: Token { kind, is_jointed_to_next: false }, text: ident.text.clone() }
}
fn convert_punct(p: tt::Punct) -> TtToken {
let kind = match SyntaxKind::from_char(p.char) {
None => panic!("{:#?} is not a valid punct", p),
Some(kind) => kind,
};
let text = {
let mut buf = [0u8; 4];
let s: &str = p.char.encode_utf8(&mut buf);
SmolStr::new(s)
};
TtToken { tt: Token { kind, is_jointed_to_next: p.spacing == tt::Spacing::Joint }, text }
}
fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
match leaf {
tt::Leaf::Literal(l) => convert_literal(l),
tt::Leaf::Ident(ident) => convert_ident(ident),
tt::Leaf::Punct(punct) => convert_punct(*punct),
}
}

View File

@ -12,7 +12,7 @@
use tt::buffer::{Cursor, TokenBuffer};
use crate::{
subtree_source::SubtreeTokenSource, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
to_parser_tokens::to_parser_tokens, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
};
/// Convert the syntax node to a `TokenTree` (what macro
@ -56,9 +56,9 @@ pub fn token_tree_to_syntax_node(
}
_ => TokenBuffer::from_subtree(tt),
};
let mut token_source = SubtreeTokenSource::new(&buffer);
let parser_tokens = to_parser_tokens(&buffer);
let mut tree_sink = TtTreeSink::new(buffer.begin());
parser::parse(&mut token_source, &mut tree_sink, entry_point);
parser::parse(&parser_tokens, &mut tree_sink, entry_point);
if tree_sink.roots.len() != 1 {
return Err(ExpandError::ConversionError);
}

View File

@ -0,0 +1,99 @@
//! Convert macro-by-example tokens which are specific to macro expansion into a
//! format that works for our parser.
use syntax::{lex_single_syntax_kind, SyntaxKind, SyntaxKind::*, T};
use tt::buffer::TokenBuffer;
pub(crate) fn to_parser_tokens(buffer: &TokenBuffer) -> parser::Tokens {
let mut res = parser::Tokens::default();
let mut current = buffer.begin();
while !current.eof() {
let cursor = current;
let tt = cursor.token_tree();
// Check if it is lifetime
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
if punct.char == '\'' {
let next = cursor.bump();
match next.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(_ident), _)) => {
res.push(LIFETIME_IDENT);
current = next.bump();
continue;
}
_ => panic!("Next token must be ident : {:#?}", next.token_tree()),
}
}
}
current = match tt {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
match leaf {
tt::Leaf::Literal(lit) => {
let is_negated = lit.text.starts_with('-');
let inner_text = &lit.text[if is_negated { 1 } else { 0 }..];
let kind = lex_single_syntax_kind(inner_text)
.map(|(kind, _error)| kind)
.filter(|kind| {
kind.is_literal()
&& (!is_negated || matches!(kind, FLOAT_NUMBER | INT_NUMBER))
})
.unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &lit));
res.push(kind);
}
tt::Leaf::Ident(ident) => match ident.text.as_ref() {
"_" => res.push(T![_]),
i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
_ => match SyntaxKind::from_keyword(&ident.text) {
Some(kind) => res.push(kind),
None => {
let contextual_keyword =
SyntaxKind::from_contextual_keyword(&ident.text)
.unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_keyword);
}
},
},
tt::Leaf::Punct(punct) => {
let kind = SyntaxKind::from_char(punct.char)
.unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct));
res.push(kind);
if punct.spacing == tt::Spacing::Joint {
res.was_joint();
}
}
}
cursor.bump()
}
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
if let Some(d) = subtree.delimiter_kind() {
res.push(match d {
tt::DelimiterKind::Parenthesis => T!['('],
tt::DelimiterKind::Brace => T!['{'],
tt::DelimiterKind::Bracket => T!['['],
});
}
cursor.subtree().unwrap()
}
None => match cursor.end() {
Some(subtree) => {
if let Some(d) = subtree.delimiter_kind() {
res.push(match d {
tt::DelimiterKind::Parenthesis => T![')'],
tt::DelimiterKind::Brace => T!['}'],
tt::DelimiterKind::Bracket => T![']'],
})
}
cursor.bump()
}
None => continue,
},
};
}
res
}

View File

@ -1,7 +1,7 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates.
use crate::{subtree_source::SubtreeTokenSource, ExpandError, ExpandResult, ParserEntryPoint};
use crate::{to_parser_tokens::to_parser_tokens, ExpandError, ExpandResult, ParserEntryPoint};
use parser::TreeSink;
use syntax::SyntaxKind;
@ -116,10 +116,10 @@ fn error(&mut self, _error: parser::ParseError) {
}
let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
let mut src = SubtreeTokenSource::new(&buffer);
let parser_tokens = to_parser_tokens(&buffer);
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
parser::parse(&mut src, &mut sink, entry_point);
parser::parse(&parser_tokens, &mut sink, entry_point);
let mut err = if !sink.cursor.is_root() || sink.error {
Some(err!("expected {:?}", entry_point))

View File

@ -296,10 +296,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
T![&] => {
m = p.start();
p.bump(T![&]);
if p.at(IDENT)
&& p.at_contextual_kw("raw")
&& (p.nth_at(1, T![mut]) || p.nth_at(1, T![const]))
{
if p.at_contextual_kw(T![raw]) && (p.nth_at(1, T![mut]) || p.nth_at(1, T![const])) {
p.bump_remap(T![raw]);
p.bump_any();
} else {

View File

@ -122,14 +122,14 @@ pub(super) fn opt_item(p: &mut Parser, m: Marker) -> Result<(), Marker> {
has_mods = true;
abi(p);
}
if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == T![trait] {
if p.at_contextual_kw(T![auto]) && p.nth(1) == T![trait] {
p.bump_remap(T![auto]);
has_mods = true;
}
// test default_item
// default impl T for Foo {}
if p.at(IDENT) && p.at_contextual_kw("default") {
if p.at_contextual_kw(T![default]) {
match p.nth(1) {
T![fn] | T![type] | T![const] | T![impl] => {
p.bump_remap(T![default]);
@ -176,7 +176,7 @@ pub(super) fn opt_item(p: &mut Parser, m: Marker) -> Result<(), Marker> {
// test existential_type
// existential type Foo: Fn() -> usize;
if p.at(IDENT) && p.at_contextual_kw("existential") && p.nth(1) == T![type] {
if p.at_contextual_kw(T![existential]) && p.nth(1) == T![type] {
p.bump_remap(T![existential]);
has_mods = true;
}
@ -224,10 +224,10 @@ fn opt_item_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> {
T![type] => type_alias(p, m),
T![struct] => adt::strukt(p, m),
T![enum] => adt::enum_(p, m),
IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => adt::union(p, m),
IDENT if p.at_contextual_kw(T![union]) && p.nth(1) == IDENT => adt::union(p, m),
T![macro] => macro_def(p, m),
IDENT if p.at_contextual_kw("macro_rules") && p.nth(1) == BANG => macro_rules(p, m),
IDENT if p.at_contextual_kw(T![macro_rules]) && p.nth(1) == BANG => macro_rules(p, m),
T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
T![static] => consts::static_(p, m),
@ -319,7 +319,7 @@ pub(crate) fn extern_item_list(p: &mut Parser) {
}
fn macro_rules(p: &mut Parser, m: Marker) {
assert!(p.at_contextual_kw("macro_rules"));
assert!(p.at_contextual_kw(T![macro_rules]));
p.bump_remap(T![macro_rules]);
p.expect(T![!]);

View File

@ -10,7 +10,7 @@ pub(super) fn strukt(p: &mut Parser, m: Marker) {
// test union_item
// struct U { i: i32, f: f32 }
pub(super) fn union(p: &mut Parser, m: Marker) {
assert!(p.at_contextual_kw("union"));
assert!(p.at_contextual_kw(T![union]));
p.bump_remap(T![union]);
struct_or_union(p, m, false);
}

View File

@ -1,8 +1,11 @@
//! The Rust parser.
//!
//! NOTE: The crate is undergoing refactors, don't believe everything the docs
//! say :-)
//!
//! The parser doesn't know about concrete representation of tokens and syntax
//! trees. Abstract [`TokenSource`] and [`TreeSink`] traits are used instead.
//! As a consequence, this crate does not contain a lexer.
//! trees. Abstract [`TokenSource`] and [`TreeSink`] traits are used instead. As
//! a consequence, this crate does not contain a lexer.
//!
//! The [`Parser`] struct from the [`parser`] module is a cursor into the
//! sequence of tokens. Parsing routines use [`Parser`] to inspect current
@ -20,40 +23,15 @@
mod event;
mod parser;
mod grammar;
mod tokens;
pub(crate) use token_set::TokenSet;
pub use syntax_kind::SyntaxKind;
pub use crate::{syntax_kind::SyntaxKind, tokens::Tokens};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ParseError(pub Box<String>);
/// `TokenSource` abstracts the source of the tokens parser operates on.
///
/// Hopefully this will allow us to treat text and token trees in the same way!
pub trait TokenSource {
fn current(&self) -> Token;
/// Lookahead n token
fn lookahead_nth(&self, n: usize) -> Token;
/// bump cursor to next token
fn bump(&mut self);
/// Is the current token a specified keyword?
fn is_keyword(&self, kw: &str) -> bool;
}
/// `Token` abstracts the cursor of `TokenSource` operates on.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Token {
/// What is the current token?
pub kind: SyntaxKind,
/// Is the current token joined to the next one (`> >` vs `>>`).
pub is_jointed_to_next: bool,
}
/// `TreeSink` abstracts details of a particular syntax tree implementation.
pub trait TreeSink {
/// Adds new token to the current branch.
@ -92,15 +70,11 @@ pub enum ParserEntryPoint {
}
/// Parse given tokens into the given sink as a rust file.
pub fn parse_source_file(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse(token_source, tree_sink, ParserEntryPoint::SourceFile);
pub fn parse_source_file(tokens: &Tokens, tree_sink: &mut dyn TreeSink) {
parse(tokens, tree_sink, ParserEntryPoint::SourceFile);
}
pub fn parse(
token_source: &mut dyn TokenSource,
tree_sink: &mut dyn TreeSink,
entry_point: ParserEntryPoint,
) {
pub fn parse(tokens: &Tokens, tree_sink: &mut dyn TreeSink, entry_point: ParserEntryPoint) {
let entry_point: fn(&'_ mut parser::Parser) = match entry_point {
ParserEntryPoint::SourceFile => grammar::entry_points::source_file,
ParserEntryPoint::Path => grammar::entry_points::path,
@ -118,7 +92,7 @@ pub fn parse(
ParserEntryPoint::Attr => grammar::entry_points::attr,
};
let mut p = parser::Parser::new(token_source);
let mut p = parser::Parser::new(tokens);
entry_point(&mut p);
let events = p.finish();
event::process(tree_sink, events);
@ -141,9 +115,9 @@ pub fn for_node(
///
/// Tokens must start with `{`, end with `}` and form a valid brace
/// sequence.
pub fn parse(self, token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
pub fn parse(self, tokens: &Tokens, tree_sink: &mut dyn TreeSink) {
let Reparser(r) = self;
let mut p = parser::Parser::new(token_source);
let mut p = parser::Parser::new(tokens);
r(&mut p);
let events = p.finish();
event::process(tree_sink, events);

View File

@ -7,9 +7,10 @@
use crate::{
event::Event,
tokens::Tokens,
ParseError,
SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
TokenSet, TokenSource, T,
TokenSet, T,
};
/// `Parser` struct provides the low-level API for
@ -22,7 +23,8 @@
/// "start expression, consume number literal,
/// finish expression". See `Event` docs for more.
pub(crate) struct Parser<'t> {
token_source: &'t mut dyn TokenSource,
tokens: &'t Tokens,
pos: usize,
events: Vec<Event>,
steps: Cell<u32>,
}
@ -30,8 +32,8 @@ pub(crate) struct Parser<'t> {
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
impl<'t> Parser<'t> {
pub(super) fn new(token_source: &'t mut dyn TokenSource) -> Parser<'t> {
Parser { token_source, events: Vec::new(), steps: Cell::new(0) }
pub(super) fn new(tokens: &'t Tokens) -> Parser<'t> {
Parser { tokens, pos: 0, events: Vec::new(), steps: Cell::new(0) }
}
pub(crate) fn finish(self) -> Vec<Event> {
@ -54,7 +56,7 @@ pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
self.steps.set(steps + 1);
self.token_source.lookahead_nth(n).kind
self.tokens.kind(self.pos + n)
}
/// Checks if the current token is `kind`.
@ -90,7 +92,7 @@ pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
T![<<=] => self.at_composite3(n, T![<], T![<], T![=]),
T![>>=] => self.at_composite3(n, T![>], T![>], T![=]),
_ => self.token_source.lookahead_nth(n).kind == kind,
_ => self.tokens.kind(self.pos + n) == kind,
}
}
@ -129,25 +131,17 @@ pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
}
fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
let t1 = self.token_source.lookahead_nth(n);
if t1.kind != k1 || !t1.is_jointed_to_next {
return false;
}
let t2 = self.token_source.lookahead_nth(n + 1);
t2.kind == k2
self.tokens.kind(self.pos + n) == k1
&& self.tokens.kind(self.pos + n + 1) == k2
&& self.tokens.is_joint(self.pos + n)
}
fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
let t1 = self.token_source.lookahead_nth(n);
if t1.kind != k1 || !t1.is_jointed_to_next {
return false;
}
let t2 = self.token_source.lookahead_nth(n + 1);
if t2.kind != k2 || !t2.is_jointed_to_next {
return false;
}
let t3 = self.token_source.lookahead_nth(n + 2);
t3.kind == k3
self.tokens.kind(self.pos + n) == k1
&& self.tokens.kind(self.pos + n + 1) == k2
&& self.tokens.kind(self.pos + n + 2) == k3
&& self.tokens.is_joint(self.pos + n)
&& self.tokens.is_joint(self.pos + n + 1)
}
/// Checks if the current token is in `kinds`.
@ -156,8 +150,8 @@ pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool {
}
/// Checks if the current token is contextual keyword with text `t`.
pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool {
self.token_source.is_keyword(kw)
pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool {
self.tokens.contextual_kind(self.pos) == kw
}
/// Starts a new node in the syntax tree. All nodes and tokens
@ -243,10 +237,7 @@ pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
}
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
for _ in 0..n_raw_tokens {
self.token_source.bump();
}
self.pos += n_raw_tokens as usize;
self.push_event(Event::Token { kind, n_raw_tokens });
}

View File

@ -334,6 +334,18 @@ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
};
Some(kw)
}
pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {
"auto" => AUTO_KW,
"default" => DEFAULT_KW,
"existential" => EXISTENTIAL_KW,
"union" => UNION_KW,
"raw" => RAW_KW,
"macro_rules" => MACRO_RULES_KW,
_ => return None,
};
Some(kw)
}
pub fn from_char(c: char) -> Option<SyntaxKind> {
let tok = match c {
';' => SEMICOLON,

View File

@ -0,0 +1,87 @@
//! Input for the parser -- a sequence of tokens.
//!
//! As of now, parser doesn't have access to the *text* of the tokens, and makes
//! decisions based solely on their classification.
use crate::SyntaxKind;
#[allow(non_camel_case_types)]
type bits = u64;
/// Main input to the parser.
///
/// A sequence of tokens represented internally as a struct of arrays.
#[derive(Default)]
pub struct Tokens {
kind: Vec<SyntaxKind>,
joint: Vec<bits>,
contextual_kind: Vec<SyntaxKind>,
}
/// `pub` impl used by callers to create `Tokens`.
impl Tokens {
#[inline]
pub fn push(&mut self, kind: SyntaxKind) {
self.push_impl(kind, SyntaxKind::EOF)
}
#[inline]
pub fn push_ident(&mut self, contextual_kind: SyntaxKind) {
self.push_impl(SyntaxKind::IDENT, contextual_kind)
}
/// Sets jointness for the last token we've pushed.
///
/// This is a separate API rather than an argument to the `push` to make it
/// convenient both for textual and mbe tokens. With text, you know whether
/// the *previous* token was joint, with mbe, you know whether the *current*
/// one is joint. This API allows for styles of usage:
///
/// ```
/// // In text:
/// tokens.was_joint(prev_joint);
/// tokens.push(curr);
///
/// // In MBE:
/// token.push(curr);
/// tokens.push(curr_joint)
/// ```
#[inline]
pub fn was_joint(&mut self) {
let n = self.len() - 1;
let (idx, b_idx) = self.bit_index(n);
self.joint[idx] |= 1 << b_idx;
}
#[inline]
fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind) {
let idx = self.len();
if idx % (bits::BITS as usize) == 0 {
self.joint.push(0);
}
self.kind.push(kind);
self.contextual_kind.push(contextual_kind);
}
}
/// pub(crate) impl used by the parser to consume `Tokens`.
impl Tokens {
pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
self.kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
}
pub(crate) fn contextual_kind(&self, idx: usize) -> SyntaxKind {
self.contextual_kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
}
pub(crate) fn is_joint(&self, n: usize) -> bool {
let (idx, b_idx) = self.bit_index(n);
self.joint[idx] & 1 << b_idx != 0
}
}
impl Tokens {
fn bit_index(&self, n: usize) -> (usize, usize) {
let idx = n / (bits::BITS as usize);
let b_idx = n % (bits::BITS as usize);
(idx, b_idx)
}
fn len(&self) -> usize {
self.kind.len()
}
}

View File

@ -2,12 +2,10 @@
//! incremental reparsing.
pub(crate) mod lexer;
mod text_token_source;
mod text_tree_sink;
mod reparsing;
use parser::SyntaxKind;
use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink;
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
@ -15,12 +13,12 @@
pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let (tokens, lexer_errors) = tokenize(text);
let (lexer_tokens, lexer_errors) = tokenize(text);
let parser_tokens = to_parser_tokens(text, &lexer_tokens);
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &lexer_tokens);
parser::parse_source_file(&mut token_source, &mut tree_sink);
parser::parse_source_file(&parser_tokens, &mut tree_sink);
let (tree, mut parser_errors) = tree_sink.finish();
parser_errors.extend(lexer_errors);
@ -33,26 +31,52 @@ pub(crate) fn parse_text_as<T: AstNode>(
text: &str,
entry_point: parser::ParserEntryPoint,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(text);
let (lexer_tokens, lexer_errors) = tokenize(text);
if !lexer_errors.is_empty() {
return Err(());
}
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
let parser_tokens = to_parser_tokens(text, &lexer_tokens);
let mut tree_sink = TextTreeSink::new(text, &lexer_tokens);
// TextTreeSink assumes that there's at least some root node to which it can attach errors and
// tokens. We arbitrarily give it a SourceFile.
use parser::TreeSink;
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
parser::parse(&mut token_source, &mut tree_sink, entry_point);
parser::parse(&parser_tokens, &mut tree_sink, entry_point);
tree_sink.finish_node();
let (tree, parser_errors) = tree_sink.finish();
use parser::TokenSource;
if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
let (tree, parser_errors, eof) = tree_sink.finish_eof();
if !parser_errors.is_empty() || !eof {
return Err(());
}
SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(())
}
pub(crate) fn to_parser_tokens(text: &str, lexer_tokens: &[lexer::Token]) -> ::parser::Tokens {
let mut off = 0;
let mut res = parser::Tokens::default();
let mut was_joint = false;
for t in lexer_tokens {
if t.kind.is_trivia() {
was_joint = false;
} else {
if t.kind == SyntaxKind::IDENT {
let token_text = &text[off..][..usize::from(t.len)];
let contextual_kw =
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_kw);
} else {
if was_joint {
res.was_joint();
}
res.push(t.kind);
}
was_joint = true;
}
off += usize::from(t.len);
}
res
}

View File

@ -12,8 +12,8 @@
use crate::{
parsing::{
lexer::{lex_single_syntax_kind, tokenize, Token},
text_token_source::TextTokenSource,
text_tree_sink::TextTreeSink,
to_parser_tokens,
},
syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
SyntaxError,
@ -91,14 +91,14 @@ fn reparse_block(
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit);
let (tokens, new_lexer_errors) = tokenize(&text);
if !is_balanced(&tokens) {
let (lexer_tokens, new_lexer_errors) = tokenize(&text);
if !is_balanced(&lexer_tokens) {
return None;
}
let parser_tokens = to_parser_tokens(&text, &lexer_tokens);
let mut token_source = TextTokenSource::new(&text, &tokens);
let mut tree_sink = TextTreeSink::new(&text, &tokens);
reparser.parse(&mut token_source, &mut tree_sink);
let mut tree_sink = TextTreeSink::new(&text, &lexer_tokens);
reparser.parse(&parser_tokens, &mut tree_sink);
let (green, mut new_parser_errors) = tree_sink.finish();
new_parser_errors.extend(new_lexer_errors);

View File

@ -1,82 +0,0 @@
//! See `TextTokenSource` docs.
use parser::TokenSource;
use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize};
/// Implementation of `parser::TokenSource` that takes tokens from source code text.
pub(crate) struct TextTokenSource<'t> {
text: &'t str,
/// token and its start position (non-whitespace/comment tokens)
/// ```non-rust
/// struct Foo;
/// ^------^--^-
/// | | \________
/// | \____ \
/// | \ |
/// (struct, 0) (Foo, 7) (;, 10)
/// ```
/// `[(struct, 0), (Foo, 7), (;, 10)]`
token_offset_pairs: Vec<(Token, TextSize)>,
/// Current token and position
curr: (parser::Token, usize),
}
impl<'t> TokenSource for TextTokenSource<'t> {
fn current(&self) -> parser::Token {
self.curr.0
}
fn lookahead_nth(&self, n: usize) -> parser::Token {
mk_token(self.curr.1 + n, &self.token_offset_pairs)
}
fn bump(&mut self) {
if self.curr.0.kind == EOF {
return;
}
let pos = self.curr.1 + 1;
self.curr = (mk_token(pos, &self.token_offset_pairs), pos);
}
fn is_keyword(&self, kw: &str) -> bool {
self.token_offset_pairs
.get(self.curr.1)
.map_or(false, |(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw)
}
}
fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Token {
let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) {
Some((token, offset)) => (
token.kind,
token_offset_pairs
.get(pos + 1)
.map_or(false, |(_, next_offset)| offset + token.len == *next_offset),
),
None => (EOF, false),
};
parser::Token { kind, is_jointed_to_next }
}
impl<'t> TextTokenSource<'t> {
/// Generate input from tokens(expect comment and whitespace).
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let token_offset_pairs: Vec<_> = raw_tokens
.iter()
.filter_map({
let mut len = 0.into();
move |token| {
let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) };
len += token.len;
pair
}
})
.collect();
let first = mk_token(0, &token_offset_pairs);
TextTokenSource { text, token_offset_pairs, curr: (first, 0) }
}
}

View File

@ -104,7 +104,7 @@ pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> Self {
}
}
pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
pub(super) fn finish_eof(mut self) -> (GreenNode, Vec<SyntaxError>, bool) {
match mem::replace(&mut self.state, State::Normal) {
State::PendingFinish => {
self.eat_trivias();
@ -113,7 +113,15 @@ pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
State::PendingStart | State::Normal => unreachable!(),
}
self.inner.finish_raw()
let (node, errors) = self.inner.finish_raw();
let is_eof = self.token_pos == self.tokens.len();
(node, errors, is_eof)
}
pub(super) fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
let (node, errors, _eof) = self.finish_eof();
(node, errors)
}
fn eat_trivias(&mut self) {

View File

@ -359,6 +359,10 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
let full_keywords =
full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
let contextual_keywords_values = &grammar.contextual_keywords;
let contextual_keywords =
contextual_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
let all_keywords_values =
grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
@ -428,6 +432,14 @@ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
Some(kw)
}
pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {
#(#contextual_keywords_values => #contextual_keywords,)*
_ => return None,
};
Some(kw)
}
pub fn from_char(c: char) -> Option<SyntaxKind> {
let tok = match c {
#(#single_byte_tokens_values => #single_byte_tokens,)*