rust/crates/ra_syntax/src/parsing/text_token_source.rs

86 lines
2.4 KiB
Rust
Raw Normal View History

2019-05-25 07:31:53 -05:00
use ra_parser::Token as PToken;
use ra_parser::TokenSource;
2019-02-21 04:37:32 -06:00
use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextUnit};
2019-02-23 07:07:29 -06:00
pub(crate) struct TextTokenSource<'t> {
2019-02-21 06:24:42 -06:00
text: &'t str,
/// start position of each token(expect whitespace and comment)
/// ```non-rust
/// struct Foo;
/// ^------^---
/// | | ^-
/// 0 7 10
/// ```
/// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
start_offsets: Vec<TextUnit>,
/// non-whitespace/comment tokens
/// ```non-rust
/// struct Foo {}
/// ^^^^^^ ^^^ ^^
/// ```
/// tokens: `[struct, Foo, {, }]`
tokens: Vec<Token>,
2019-05-25 07:31:53 -05:00
/// Current token and position
curr: (PToken, usize),
2019-02-21 06:24:42 -06:00
}
2019-02-23 07:07:29 -06:00
impl<'t> TokenSource for TextTokenSource<'t> {
2019-05-25 07:31:53 -05:00
fn current(&self) -> PToken {
2019-07-04 12:26:44 -05:00
self.curr.0
2019-02-20 12:50:07 -06:00
}
2019-05-25 07:31:53 -05:00
fn lookahead_nth(&self, n: usize) -> PToken {
mk_token(self.curr.1 + n, &self.start_offsets, &self.tokens)
}
fn bump(&mut self) {
if self.curr.0.kind == EOF {
return;
2019-02-20 12:50:07 -06:00
}
2019-05-25 07:31:53 -05:00
let pos = self.curr.1 + 1;
self.curr = (mk_token(pos, &self.start_offsets, &self.tokens), pos);
2019-02-20 12:50:07 -06:00
}
2019-05-25 07:31:53 -05:00
fn is_keyword(&self, kw: &str) -> bool {
let pos = self.curr.1;
2019-02-20 13:58:56 -06:00
if !(pos < self.tokens.len()) {
2019-02-20 12:50:07 -06:00
return false;
}
2019-02-20 13:58:56 -06:00
let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
2019-02-20 12:50:07 -06:00
self.text[range] == *kw
}
}
2019-05-25 07:31:53 -05:00
fn mk_token(pos: usize, start_offsets: &[TextUnit], tokens: &[Token]) -> PToken {
let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF);
let is_jointed_to_next = if pos + 1 < start_offsets.len() {
start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1]
} else {
false
};
PToken { kind, is_jointed_to_next }
}
2019-02-23 07:07:29 -06:00
impl<'t> TextTokenSource<'t> {
2018-12-31 07:30:37 -06:00
/// Generate input from tokens(expect comment and whitespace).
2019-02-23 07:07:29 -06:00
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let mut tokens = Vec::new();
let mut start_offsets = Vec::new();
2018-07-28 05:07:10 -05:00
let mut len = 0.into();
for &token in raw_tokens.iter() {
2018-07-29 07:16:07 -05:00
if !token.kind.is_trivia() {
tokens.push(token);
start_offsets.push(len);
}
len += token.len;
}
2019-05-25 07:31:53 -05:00
let first = mk_token(0, &start_offsets, &tokens);
TextTokenSource { text, start_offsets, tokens, curr: (first, 0) }
}
}