syntax: Use Token in StringReader and TokenTreesReader

This commit is contained in:
Vadim Petrochenkov 2019-06-05 00:02:59 +03:00
parent e0127dbf81
commit c0c57acd7b
3 changed files with 37 additions and 51 deletions

View File

@ -234,7 +234,7 @@ fn write_token<W: Writer>(&mut self,
// reference or dereference operator or a reference or pointer type, instead of the
// bit-and or multiplication operator.
token::BinOp(token::And) | token::BinOp(token::Star)
if self.lexer.peek() != token::Whitespace => Class::RefKeyWord,
if self.lexer.peek() != &token::Whitespace => Class::RefKeyWord,
// Consider this as part of a macro invocation if there was a
// leading identifier.
@ -280,9 +280,9 @@ fn write_token<W: Writer>(&mut self,
// as an attribute.
// Case 1: #![inner_attribute]
if self.lexer.peek() == token::Not {
if self.lexer.peek() == &token::Not {
self.try_next_token()?; // NOTE: consumes `!` token!
if self.lexer.peek() == token::OpenDelim(token::Bracket) {
if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
@ -292,7 +292,7 @@ fn write_token<W: Writer>(&mut self,
}
// Case 2: #[outer_attribute]
if self.lexer.peek() == token::OpenDelim(token::Bracket) {
if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
self.in_attribute = true;
out.enter_span(Class::Attribute)?;
}
@ -341,7 +341,7 @@ fn write_token<W: Writer>(&mut self,
if self.in_macro_nonterminal {
self.in_macro_nonterminal = false;
Class::MacroNonTerminal
} else if self.lexer.peek() == token::Not {
} else if self.lexer.peek() == &token::Not {
self.in_macro = true;
Class::Macro
} else {

View File

@ -12,7 +12,6 @@
use std::borrow::Cow;
use std::char;
use std::iter;
use std::mem::replace;
use rustc_data_structures::sync::Lrc;
use log::debug;
@ -41,8 +40,7 @@ pub struct StringReader<'a> {
/// Stop reading src at this index.
crate end_src_index: usize,
// cached:
peek_tok: TokenKind,
peek_span: Span,
peek_token: Token,
peek_span_src_raw: Span,
fatal_errs: Vec<DiagnosticBuilder<'a>>,
// cache a direct reference to the source text, so that we don't have to
@ -90,10 +88,7 @@ fn next_token(&mut self) -> Token where Self: Sized {
/// Returns the next token. EFFECT: advances the string_reader.
pub fn try_next_token(&mut self) -> Result<Token, ()> {
assert!(self.fatal_errs.is_empty());
let ret_val = Token {
kind: replace(&mut self.peek_tok, token::Whitespace),
span: self.peek_span,
};
let ret_val = self.peek_token.clone();
self.advance_token()?;
Ok(ret_val)
}
@ -158,7 +153,7 @@ fn fail_unterminated_raw_string(&self, pos: BytePos, hash_count: u16) {
}
fn fatal(&self, m: &str) -> FatalError {
self.fatal_span(self.peek_span, m)
self.fatal_span(self.peek_token.span, m)
}
crate fn emit_fatal_errors(&mut self) {
@ -179,12 +174,8 @@ pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
buffer
}
pub fn peek(&self) -> Token {
// FIXME(pcwalton): Bad copy!
Token {
kind: self.peek_tok.clone(),
span: self.peek_span,
}
pub fn peek(&self) -> &Token {
&self.peek_token
}
/// For comments.rs, which hackily pokes into next_pos and ch
@ -215,8 +206,7 @@ fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile
source_file,
end_src_index: src.len(),
// dummy values; not read
peek_tok: token::Eof,
peek_span: syntax_pos::DUMMY_SP,
peek_token: Token { kind: token::Eof, span: syntax_pos::DUMMY_SP },
peek_span_src_raw: syntax_pos::DUMMY_SP,
src,
fatal_errs: Vec::new(),
@ -321,29 +311,28 @@ fn err_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) {
self.err_span_(from_pos, to_pos, &m[..]);
}
/// Advance peek_tok and peek_span to refer to the next token, and
/// Advance peek_token to refer to the next token, and
/// possibly update the interner.
fn advance_token(&mut self) -> Result<(), ()> {
match self.scan_whitespace_or_comment() {
Some(comment) => {
self.peek_span_src_raw = comment.span;
self.peek_span = comment.span;
self.peek_tok = comment.kind;
self.peek_token = comment;
}
None => {
if self.is_eof() {
self.peek_tok = token::Eof;
let (real, raw) = self.mk_sp_and_raw(
self.source_file.end_pos,
self.source_file.end_pos,
);
self.peek_span = real;
self.peek_token = Token { kind: token::Eof, span: real };
self.peek_span_src_raw = raw;
} else {
let start_bytepos = self.pos;
self.peek_tok = self.next_token_inner()?;
let kind = self.next_token_inner()?;
let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos);
self.peek_span = real;
self.peek_token = Token { kind, span: real };
self.peek_span_src_raw = raw;
};
}

View File

@ -2,15 +2,15 @@
use crate::print::pprust::token_to_string;
use crate::parse::lexer::{StringReader, UnmatchedBrace};
use crate::parse::{token, PResult};
use crate::parse::token::{self, Token};
use crate::parse::PResult;
use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader {
string_reader: self,
token: token::Eof,
span: syntax_pos::DUMMY_SP,
token: token::Token { kind: token::Eof, span: syntax_pos::DUMMY_SP },
open_braces: Vec::new(),
unmatched_braces: Vec::new(),
matching_delim_spans: Vec::new(),
@ -23,8 +23,7 @@ impl<'a> StringReader<'a> {
struct TokenTreesReader<'a> {
string_reader: StringReader<'a>,
token: token::TokenKind,
span: Span,
token: Token,
/// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(token::DelimToken, Span)>,
unmatched_braces: Vec<UnmatchedBrace>,
@ -52,7 +51,7 @@ fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
let mut tts = vec![];
loop {
if let token::CloseDelim(..) = self.token {
if let token::CloseDelim(..) = self.token.kind {
return TokenStream::new(tts);
}
@ -68,11 +67,11 @@ fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
let sm = self.string_reader.sess.source_map();
match self.token {
match self.token.kind {
token::Eof => {
let msg = "this file contains an un-closed delimiter";
let mut err = self.string_reader.sess.span_diagnostic
.struct_span_err(self.span, msg);
.struct_span_err(self.token.span, msg);
for &(_, sp) in &self.open_braces {
err.span_label(sp, "un-closed delimiter");
}
@ -102,10 +101,10 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
},
token::OpenDelim(delim) => {
// The span for beginning of the delimited section
let pre_span = self.span;
let pre_span = self.token.span;
// Parse the open delimiter.
self.open_braces.push((delim, self.span));
self.open_braces.push((delim, self.token.span));
self.real_token();
// Parse the token trees within the delimiters.
@ -114,9 +113,9 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
let tts = self.parse_token_trees_until_close_delim();
// Expand to cover the entire delimited token tree
let delim_span = DelimSpan::from_pair(pre_span, self.span);
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
match self.token {
match self.token.kind {
// Correct delimiter.
token::CloseDelim(d) if d == delim => {
let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
@ -126,7 +125,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
self.matching_delim_spans.clear();
} else {
self.matching_delim_spans.push(
(open_brace, open_brace_span, self.span),
(open_brace, open_brace_span, self.token.span),
);
}
// Parse the close delimiter.
@ -136,16 +135,16 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
token::CloseDelim(other) => {
let mut unclosed_delimiter = None;
let mut candidate = None;
if self.last_unclosed_found_span != Some(self.span) {
if self.last_unclosed_found_span != Some(self.token.span) {
// do not complain about the same unclosed delimiter multiple times
self.last_unclosed_found_span = Some(self.span);
self.last_unclosed_found_span = Some(self.token.span);
// This is a conservative error: only report the last unclosed
// delimiter. The previous unclosed delimiters could actually be
// closed! The parser just hasn't gotten to them yet.
if let Some(&(_, sp)) = self.open_braces.last() {
unclosed_delimiter = Some(sp);
};
if let Some(current_padding) = sm.span_to_margin(self.span) {
if let Some(current_padding) = sm.span_to_margin(self.token.span) {
for (brace, brace_span) in &self.open_braces {
if let Some(padding) = sm.span_to_margin(*brace_span) {
// high likelihood of these two corresponding
@ -159,7 +158,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
self.unmatched_braces.push(UnmatchedBrace {
expected_delim: tok,
found_delim: other,
found_span: self.span,
found_span: self.token.span,
unclosed_span: unclosed_delimiter,
candidate_span: candidate,
});
@ -198,12 +197,12 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
let token_str = token_to_string(&self.token);
let msg = format!("unexpected close delimiter: `{}`", token_str);
let mut err = self.string_reader.sess.span_diagnostic
.struct_span_err(self.span, &msg);
err.span_label(self.span, "unexpected close delimiter");
.struct_span_err(self.token.span, &msg);
err.span_label(self.token.span, "unexpected close delimiter");
Err(err)
},
_ => {
let tt = TokenTree::token(self.span, self.token.clone());
let tt = TokenTree::Token(self.token.clone());
// Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account.
@ -219,8 +218,6 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
}
fn real_token(&mut self) {
let t = self.string_reader.real_token();
self.token = t.kind;
self.span = t.span;
self.token = self.string_reader.real_token();
}
}