2019-02-20 06:47:32 -06:00
|
|
|
use crate::{
|
|
|
|
SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
|
2019-02-20 12:50:07 -06:00
|
|
|
parsing::{
|
2019-02-20 13:58:56 -06:00
|
|
|
TokenSource,
|
2019-02-20 12:50:07 -06:00
|
|
|
lexer::Token,
|
|
|
|
},
|
2019-02-20 06:47:32 -06:00
|
|
|
};
|
2018-02-04 05:35:59 -06:00
|
|
|
|
2019-02-20 12:50:07 -06:00
|
|
|
impl<'t> TokenSource for ParserInput<'t> {
|
2019-02-20 13:58:56 -06:00
|
|
|
fn token_kind(&self, pos: usize) -> SyntaxKind {
|
|
|
|
if !(pos < self.tokens.len()) {
|
2019-02-20 12:50:07 -06:00
|
|
|
return EOF;
|
|
|
|
}
|
2019-02-20 13:58:56 -06:00
|
|
|
self.tokens[pos].kind
|
2019-02-20 12:50:07 -06:00
|
|
|
}
|
2019-02-20 13:58:56 -06:00
|
|
|
fn is_token_joint_to_next(&self, pos: usize) -> bool {
|
|
|
|
if !(pos + 1 < self.tokens.len()) {
|
2019-02-20 12:50:07 -06:00
|
|
|
return true;
|
|
|
|
}
|
2019-02-20 13:58:56 -06:00
|
|
|
self.start_offsets[pos] + self.tokens[pos].len == self.start_offsets[pos + 1]
|
2019-02-20 12:50:07 -06:00
|
|
|
}
|
2019-02-20 13:58:56 -06:00
|
|
|
fn is_keyword(&self, pos: usize, kw: &str) -> bool {
|
|
|
|
if !(pos < self.tokens.len()) {
|
2019-02-20 12:50:07 -06:00
|
|
|
return false;
|
|
|
|
}
|
2019-02-20 13:58:56 -06:00
|
|
|
let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
|
2019-02-20 12:50:07 -06:00
|
|
|
|
|
|
|
self.text[range] == *kw
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-04 05:35:59 -06:00
|
|
|
pub(crate) struct ParserInput<'t> {
|
|
|
|
text: &'t str,
|
2018-12-31 07:30:37 -06:00
|
|
|
/// start position of each token(expect whitespace and comment)
|
|
|
|
/// ```non-rust
|
|
|
|
/// struct Foo;
|
|
|
|
/// ^------^---
|
|
|
|
/// | | ^-
|
|
|
|
/// 0 7 10
|
|
|
|
/// ```
|
|
|
|
/// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
|
2018-02-04 05:35:59 -06:00
|
|
|
start_offsets: Vec<TextUnit>,
|
2018-12-31 07:30:37 -06:00
|
|
|
/// non-whitespace/comment tokens
|
|
|
|
/// ```non-rust
|
|
|
|
/// struct Foo {}
|
|
|
|
/// ^^^^^^ ^^^ ^^
|
|
|
|
/// ```
|
|
|
|
/// tokens: `[struct, Foo, {, }]`
|
|
|
|
tokens: Vec<Token>,
|
2018-02-04 05:35:59 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'t> ParserInput<'t> {
|
2018-12-31 07:30:37 -06:00
|
|
|
/// Generate input from tokens(expect comment and whitespace).
|
2018-02-04 05:35:59 -06:00
|
|
|
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> {
|
|
|
|
let mut tokens = Vec::new();
|
|
|
|
let mut start_offsets = Vec::new();
|
2018-07-28 05:07:10 -05:00
|
|
|
let mut len = 0.into();
|
2018-02-04 05:35:59 -06:00
|
|
|
for &token in raw_tokens.iter() {
|
2018-07-29 07:16:07 -05:00
|
|
|
if !token.kind.is_trivia() {
|
2018-02-04 05:35:59 -06:00
|
|
|
tokens.push(token);
|
|
|
|
start_offsets.push(len);
|
|
|
|
}
|
|
|
|
len += token.len;
|
|
|
|
}
|
|
|
|
|
2019-02-08 05:49:43 -06:00
|
|
|
ParserInput { text, start_offsets, tokens }
|
2018-02-04 05:35:59 -06:00
|
|
|
}
|
|
|
|
}
|