Move doc comment desugaring into the parser.
This commit is contained in:
parent
e2b3fec778
commit
7ae083383d
src/libsyntax
@ -279,7 +279,7 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
|
||||
}
|
||||
|
||||
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
|
||||
let mut parser = Parser::new(sess, Box::new(rdr));
|
||||
let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
|
||||
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, parser.span.lo));
|
||||
|
||||
loop {
|
||||
|
@ -12,9 +12,7 @@ use self::LockstepIterSize::*;
|
||||
use ast::Ident;
|
||||
use errors::{Handler, DiagnosticBuilder};
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use parse::token::{DocComment, MatchNt, SubstNt};
|
||||
use parse::token::{Token, NtIdent};
|
||||
use parse::token;
|
||||
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{self, TokenTree};
|
||||
@ -48,7 +46,6 @@ pub struct TtReader<'a> {
|
||||
pub cur_span: Span,
|
||||
pub next_tok: Option<TokenAndSpan>,
|
||||
/// Transform doc comments. Only useful in macro invocations
|
||||
pub desugar_doc_comments: bool,
|
||||
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
||||
}
|
||||
|
||||
@ -59,20 +56,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<tokenstream::TokenTree>)
|
||||
-> TtReader {
|
||||
new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
|
||||
}
|
||||
|
||||
/// The extra `desugar_doc_comments` flag enables reading doc comments
|
||||
/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
|
||||
///
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
||||
/// (and should) be None.
|
||||
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<tokenstream::TokenTree>,
|
||||
desugar_doc_comments: bool)
|
||||
-> TtReader {
|
||||
let mut r = TtReader {
|
||||
sp_diag: sp_diag,
|
||||
stack: SmallVector::one(TtFrame {
|
||||
@ -91,7 +74,6 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||
},
|
||||
repeat_idx: Vec::new(),
|
||||
repeat_len: Vec::new(),
|
||||
desugar_doc_comments: desugar_doc_comments,
|
||||
/* dummy values, never read: */
|
||||
cur_tok: token::Eof,
|
||||
cur_span: DUMMY_SP,
|
||||
@ -312,14 +294,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
|
||||
r.stack.push(TtFrame {
|
||||
forest: TokenTree::Token(sp, DocComment(name)),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
}
|
||||
TokenTree::Token(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
|
@ -22,7 +22,7 @@ use std::char;
|
||||
use std::mem::replace;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag};
|
||||
pub use ext::tt::transcribe::{TtReader, new_tt_reader};
|
||||
|
||||
pub mod comments;
|
||||
mod unicode_chars;
|
||||
|
@ -211,6 +211,7 @@ pub struct Parser<'a> {
|
||||
pub root_module_name: Option<String>,
|
||||
pub expected_tokens: Vec<TokenType>,
|
||||
pub tts: Vec<(TokenTree, usize)>,
|
||||
pub desugar_doc_comments: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
@ -275,6 +276,11 @@ impl From<P<Expr>> for LhsExpr {
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub fn new(sess: &'a ParseSess, rdr: Box<Reader+'a>) -> Self {
|
||||
Parser::new_with_doc_flag(sess, rdr, false)
|
||||
}
|
||||
|
||||
pub fn new_with_doc_flag(sess: &'a ParseSess, rdr: Box<Reader+'a>, desugar_doc_comments: bool)
|
||||
-> Self {
|
||||
let mut parser = Parser {
|
||||
reader: rdr,
|
||||
sess: sess,
|
||||
@ -294,6 +300,7 @@ impl<'a> Parser<'a> {
|
||||
root_module_name: None,
|
||||
expected_tokens: Vec::new(),
|
||||
tts: Vec::new(),
|
||||
desugar_doc_comments: desugar_doc_comments,
|
||||
};
|
||||
|
||||
let tok = parser.next_tok();
|
||||
@ -326,6 +333,10 @@ impl<'a> Parser<'a> {
|
||||
loop {
|
||||
let nt = match tok.tok {
|
||||
token::Interpolated(ref nt) => nt.clone(),
|
||||
token::DocComment(name) if self.desugar_doc_comments => {
|
||||
self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
|
||||
continue 'outer
|
||||
}
|
||||
_ => return tok,
|
||||
};
|
||||
match *nt {
|
||||
|
@ -214,11 +214,9 @@ impl TokenTree {
|
||||
mtch: &[TokenTree],
|
||||
tts: &[TokenTree])
|
||||
-> macro_parser::NamedParseResult {
|
||||
let diag = &cx.parse_sess().span_diagnostic;
|
||||
// `None` is because we're not interpolating
|
||||
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
|
||||
None,
|
||||
tts.iter().cloned().collect(),
|
||||
true);
|
||||
let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
|
||||
macro_parser::parse(cx.parse_sess(), arg_rdr, mtch)
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user