Simplify event processing
This commit is contained in:
parent
3c12d38a32
commit
86a7ac2d31
@ -245,11 +245,13 @@ pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> O
|
||||
use std::collections::HashSet;
|
||||
|
||||
let mut shadowed = HashSet::new();
|
||||
scopes.scope_chain(name_ref.syntax())
|
||||
let ret = scopes.scope_chain(name_ref.syntax())
|
||||
.flat_map(|scope| scopes.entries(scope).iter())
|
||||
.filter(|entry| shadowed.insert(entry.name()))
|
||||
.filter(|entry| entry.name() == name_ref.text())
|
||||
.nth(0)
|
||||
.nth(0);
|
||||
eprintln!("ret = {:?}", ret);
|
||||
ret
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -357,7 +359,6 @@ fn do_check_local_name(code: &str, expected_offset: u32) {
|
||||
let scopes = FnScopes::new(fn_def);
|
||||
|
||||
let local_name = resolve_local_name(name_ref, &scopes).unwrap().ast().name().unwrap();
|
||||
|
||||
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();
|
||||
assert_eq!(local_name.syntax().range(), expected_name.syntax().range());
|
||||
}
|
||||
@ -394,4 +395,4 @@ fn foo(x: String) {
|
||||
}",
|
||||
46);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,8 @@ fn new(green: GreenNode, errors: Vec<SyntaxError>) -> File {
|
||||
}
|
||||
pub fn parse(text: &str) -> File {
|
||||
let tokens = tokenize(&text);
|
||||
let (green, errors) = parser_impl::parse_with::<yellow::GreenBuilder>(
|
||||
let (green, errors) = parser_impl::parse_with(
|
||||
yellow::GreenBuilder::new(),
|
||||
text, &tokens, grammar::root,
|
||||
);
|
||||
File::new(green, errors)
|
||||
|
@ -9,6 +9,7 @@
|
||||
//! this stream to a real tree.
|
||||
use std::mem;
|
||||
use {
|
||||
TextUnit, TextRange, SmolStr,
|
||||
lexer::Token,
|
||||
parser_impl::Sink,
|
||||
SyntaxKind::{self, TOMBSTONE},
|
||||
@ -78,77 +79,104 @@ pub(crate) enum Event {
|
||||
},
|
||||
}
|
||||
|
||||
pub(super) struct EventProcessor<'a, S: Sink> {
|
||||
sink: S,
|
||||
text_pos: TextUnit,
|
||||
text: &'a str,
|
||||
token_pos: usize,
|
||||
tokens: &'a [Token],
|
||||
events: &'a mut [Event],
|
||||
}
|
||||
|
||||
pub(super) fn process<'a, S: Sink<'a>>(builder: &mut S, tokens: &[Token], mut events: Vec<Event>) {
|
||||
fn tombstone() -> Event {
|
||||
Event::Start { kind: TOMBSTONE, forward_parent: None }
|
||||
impl<'a, S: Sink> EventProcessor<'a, S> {
|
||||
pub(super) fn new(sink: S, text: &'a str, tokens: &'a[Token], events: &'a mut [Event]) -> EventProcessor<'a, S> {
|
||||
EventProcessor {
|
||||
sink,
|
||||
text_pos: 0.into(),
|
||||
text,
|
||||
token_pos: 0,
|
||||
tokens,
|
||||
events
|
||||
}
|
||||
}
|
||||
let eat_ws = |idx: &mut usize, builder: &mut S| {
|
||||
while let Some(token) = tokens.get(*idx) {
|
||||
|
||||
pub(super) fn process(mut self) -> S {
|
||||
fn tombstone() -> Event {
|
||||
Event::Start { kind: TOMBSTONE, forward_parent: None }
|
||||
}
|
||||
let mut depth = 0;
|
||||
let mut forward_parents = Vec::new();
|
||||
|
||||
for i in 0..self.events.len() {
|
||||
match mem::replace(&mut self.events[i], tombstone()) {
|
||||
Event::Start {
|
||||
kind: TOMBSTONE, ..
|
||||
} => (),
|
||||
|
||||
Event::Start { kind, forward_parent } => {
|
||||
forward_parents.push(kind);
|
||||
let mut idx = i;
|
||||
let mut fp = forward_parent;
|
||||
while let Some(fwd) = fp {
|
||||
idx += fwd as usize;
|
||||
fp = match mem::replace(&mut self.events[idx], tombstone()) {
|
||||
Event::Start {
|
||||
kind,
|
||||
forward_parent,
|
||||
} => {
|
||||
forward_parents.push(kind);
|
||||
forward_parent
|
||||
},
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
for kind in forward_parents.drain(..).rev() {
|
||||
if depth > 0 {
|
||||
self.eat_ws();
|
||||
}
|
||||
depth += 1;
|
||||
self.sink.start_internal(kind);
|
||||
}
|
||||
}
|
||||
Event::Finish => {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
self.eat_ws();
|
||||
}
|
||||
|
||||
self.sink.finish_internal();
|
||||
}
|
||||
Event::Token {
|
||||
kind,
|
||||
mut n_raw_tokens,
|
||||
} => {
|
||||
self.eat_ws();
|
||||
let mut len = 0.into();
|
||||
for _ in 0..n_raw_tokens {
|
||||
len += self.tokens[self.token_pos].len;
|
||||
}
|
||||
self.leaf(kind, len, n_raw_tokens as usize);
|
||||
}
|
||||
Event::Error { msg } => self.sink.error(msg, self.text_pos),
|
||||
}
|
||||
}
|
||||
self.sink
|
||||
}
|
||||
|
||||
fn eat_ws(&mut self) {
|
||||
while let Some(&token) = self.tokens.get(self.token_pos) {
|
||||
if !token.kind.is_trivia() {
|
||||
break;
|
||||
}
|
||||
builder.leaf(token.kind, token.len);
|
||||
*idx += 1
|
||||
}
|
||||
};
|
||||
|
||||
let events: &mut [Event] = &mut events;
|
||||
let mut depth = 0;
|
||||
let mut forward_parents = Vec::new();
|
||||
let mut next_tok_idx = 0;
|
||||
for i in 0..events.len() {
|
||||
match mem::replace(&mut events[i], tombstone()) {
|
||||
Event::Start {
|
||||
kind: TOMBSTONE, ..
|
||||
} => (),
|
||||
|
||||
Event::Start { kind, forward_parent } => {
|
||||
forward_parents.push(kind);
|
||||
let mut idx = i;
|
||||
let mut fp = forward_parent;
|
||||
while let Some(fwd) = fp {
|
||||
idx += fwd as usize;
|
||||
fp = match mem::replace(&mut events[idx], tombstone()) {
|
||||
Event::Start {
|
||||
kind,
|
||||
forward_parent,
|
||||
} => {
|
||||
forward_parents.push(kind);
|
||||
forward_parent
|
||||
},
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
for kind in forward_parents.drain(..).rev() {
|
||||
if depth > 0 {
|
||||
eat_ws(&mut next_tok_idx, builder);
|
||||
}
|
||||
depth += 1;
|
||||
builder.start_internal(kind);
|
||||
}
|
||||
}
|
||||
Event::Finish => {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
eat_ws(&mut next_tok_idx, builder);
|
||||
}
|
||||
|
||||
builder.finish_internal();
|
||||
}
|
||||
Event::Token {
|
||||
kind,
|
||||
mut n_raw_tokens,
|
||||
} => {
|
||||
eat_ws(&mut next_tok_idx, builder);
|
||||
let mut len = 0.into();
|
||||
for _ in 0..n_raw_tokens {
|
||||
len += tokens[next_tok_idx].len;
|
||||
next_tok_idx += 1;
|
||||
}
|
||||
builder.leaf(kind, len);
|
||||
}
|
||||
Event::Error { msg } => builder.error(msg),
|
||||
self.leaf(token.kind, token.len, 1);
|
||||
}
|
||||
}
|
||||
|
||||
fn leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) {
|
||||
let range = TextRange::offset_len(self.text_pos, len);
|
||||
let text: SmolStr = self.text[range].into();
|
||||
self.text_pos += len;
|
||||
self.token_pos += n_tokens;
|
||||
self.sink.leaf(kind, text);
|
||||
}
|
||||
}
|
||||
|
@ -4,45 +4,44 @@
|
||||
use std::cell::Cell;
|
||||
|
||||
use {
|
||||
TextUnit, SmolStr,
|
||||
lexer::Token,
|
||||
parser_api::Parser,
|
||||
parser_impl::{
|
||||
event::{process, Event},
|
||||
event::{EventProcessor, Event},
|
||||
input::{InputPosition, ParserInput},
|
||||
},
|
||||
TextUnit,
|
||||
};
|
||||
|
||||
use SyntaxKind::{self, EOF, TOMBSTONE};
|
||||
|
||||
pub(crate) trait Sink<'a> {
|
||||
pub(crate) trait Sink {
|
||||
type Tree;
|
||||
|
||||
fn new(text: &'a str) -> Self;
|
||||
|
||||
fn leaf(&mut self, kind: SyntaxKind, len: TextUnit);
|
||||
fn leaf(&mut self, kind: SyntaxKind, text: SmolStr);
|
||||
fn start_internal(&mut self, kind: SyntaxKind);
|
||||
fn finish_internal(&mut self);
|
||||
fn error(&mut self, err: String);
|
||||
fn error(&mut self, message: String, offset: TextUnit);
|
||||
fn finish(self) -> Self::Tree;
|
||||
}
|
||||
|
||||
/// Parse a sequence of tokens into the representative node tree
|
||||
pub(crate) fn parse_with<'a, S: Sink<'a>>(
|
||||
text: &'a str,
|
||||
pub(crate) fn parse_with<S: Sink>(
|
||||
sink: S,
|
||||
text: &str,
|
||||
tokens: &[Token],
|
||||
parser: fn(&mut Parser),
|
||||
) -> S::Tree {
|
||||
let events = {
|
||||
let mut events = {
|
||||
let input = input::ParserInput::new(text, tokens);
|
||||
let parser_impl = ParserImpl::new(&input);
|
||||
let mut parser_api = Parser(parser_impl);
|
||||
parser(&mut parser_api);
|
||||
parser_api.0.into_events()
|
||||
};
|
||||
let mut sink = S::new(text);
|
||||
process(&mut sink, tokens, events);
|
||||
sink.finish()
|
||||
EventProcessor::new(sink, text, tokens, &mut events)
|
||||
.process()
|
||||
.finish()
|
||||
}
|
||||
|
||||
/// Implementation details of `Parser`, extracted
|
||||
|
@ -84,7 +84,8 @@ fn reparse_block<'node>(
|
||||
return None;
|
||||
}
|
||||
let (green, new_errors) =
|
||||
parser_impl::parse_with::<yellow::GreenBuilder>(
|
||||
parser_impl::parse_with(
|
||||
yellow::GreenBuilder::new(),
|
||||
&text, &tokens, reparser,
|
||||
);
|
||||
Some((node, green, new_errors))
|
||||
|
@ -1,33 +1,29 @@
|
||||
use rowan::GreenNodeBuilder;
|
||||
use {
|
||||
TextUnit, SmolStr,
|
||||
parser_impl::Sink,
|
||||
yellow::{GreenNode, SyntaxError, RaTypes},
|
||||
SyntaxKind, TextRange, TextUnit,
|
||||
SyntaxKind,
|
||||
};
|
||||
|
||||
pub(crate) struct GreenBuilder<'a> {
|
||||
text: &'a str,
|
||||
pos: TextUnit,
|
||||
pub(crate) struct GreenBuilder {
|
||||
errors: Vec<SyntaxError>,
|
||||
inner: GreenNodeBuilder<RaTypes>,
|
||||
}
|
||||
|
||||
impl<'a> Sink<'a> for GreenBuilder<'a> {
|
||||
type Tree = (GreenNode, Vec<SyntaxError>);
|
||||
|
||||
fn new(text: &'a str) -> Self {
|
||||
impl GreenBuilder {
|
||||
pub(crate) fn new() -> GreenBuilder {
|
||||
GreenBuilder {
|
||||
text,
|
||||
pos: 0.into(),
|
||||
errors: Vec::new(),
|
||||
inner: GreenNodeBuilder::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn leaf(&mut self, kind: SyntaxKind, len: TextUnit) {
|
||||
let range = TextRange::offset_len(self.pos, len);
|
||||
self.pos += len;
|
||||
let text = self.text[range].into();
|
||||
impl Sink for GreenBuilder {
|
||||
type Tree = (GreenNode, Vec<SyntaxError>);
|
||||
|
||||
fn leaf(&mut self, kind: SyntaxKind, text: SmolStr) {
|
||||
self.inner.leaf(kind, text);
|
||||
}
|
||||
|
||||
@ -39,11 +35,9 @@ fn finish_internal(&mut self) {
|
||||
self.inner.finish_internal();
|
||||
}
|
||||
|
||||
fn error(&mut self, message: String) {
|
||||
self.errors.push(SyntaxError {
|
||||
msg: message,
|
||||
offset: self.pos,
|
||||
})
|
||||
fn error(&mut self, message: String, offset: TextUnit) {
|
||||
let error = SyntaxError { msg: message, offset };
|
||||
self.errors.push(error)
|
||||
}
|
||||
|
||||
fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
|
||||
|
Loading…
Reference in New Issue
Block a user