Rollup merge of #114081 - nnethercote:desugar_doc_comments-cleanups, r=petrochenkov

`desugar_doc_comments` cleanups

r? `@petrochenkov`
This commit is contained in:
Matthias Krüger 2023-07-27 06:04:14 +02:00 committed by GitHub
commit a926c73b5b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 27 additions and 21 deletions

View File

@ -107,7 +107,7 @@ fn to_attr_token_stream(&self) -> AttrTokenStream {
let tokens = let tokens =
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1)) std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
.chain((0..self.num_calls).map(|_| { .chain((0..self.num_calls).map(|_| {
let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments); let token = cursor_snapshot.next();
(FlatToken::Token(token.0), token.1) (FlatToken::Token(token.0), token.1)
})) }))
.take(self.num_calls); .take(self.num_calls);

View File

@ -138,7 +138,6 @@ pub struct Parser<'a> {
// Important: This must only be advanced from `bump` to ensure that // Important: This must only be advanced from `bump` to ensure that
// `token_cursor.num_next_calls` is updated properly. // `token_cursor.num_next_calls` is updated properly.
token_cursor: TokenCursor, token_cursor: TokenCursor,
desugar_doc_comments: bool,
/// This field is used to keep track of how many left angle brackets we have seen. This is /// This field is used to keep track of how many left angle brackets we have seen. This is
/// required in order to detect extra leading left angle brackets (`<` characters) and error /// required in order to detect extra leading left angle brackets (`<` characters) and error
/// appropriately. /// appropriately.
@ -225,6 +224,9 @@ struct TokenCursor {
// because it's the outermost token stream which never has delimiters. // because it's the outermost token stream which never has delimiters.
stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>, stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
// We need to desugar doc comments from `/// foo` form into `#[doc =
// r"foo"]` form when parsing declarative macro inputs in `parse_tt`,
// because some declarative macros look for `doc` attributes.
desugar_doc_comments: bool, desugar_doc_comments: bool,
// Counts the number of calls to `{,inlined_}next`. // Counts the number of calls to `{,inlined_}next`.
@ -255,33 +257,38 @@ struct TokenCursor {
} }
impl TokenCursor { impl TokenCursor {
fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) { fn next(&mut self) -> (Token, Spacing) {
self.inlined_next(desugar_doc_comments) self.inlined_next()
} }
/// This always-inlined version should only be used on hot code paths. /// This always-inlined version should only be used on hot code paths.
#[inline(always)] #[inline(always)]
fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) { fn inlined_next(&mut self) -> (Token, Spacing) {
loop { loop {
// FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
// need to, whereupon the `delim != Delimiter::Invisible` conditions below can be // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
// removed. // removed.
if let Some(tree) = self.tree_cursor.next_ref() { if let Some(tree) = self.tree_cursor.next_ref() {
match tree { match tree {
&TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) { &TokenTree::Token(ref token, spacing) => {
(true, &Token { kind: token::DocComment(_, attr_style, data), span }) => { match (self.desugar_doc_comments, token) {
let desugared = self.desugar(attr_style, data, span); (
self.tree_cursor.replace_prev_and_rewind(desugared); true,
// Continue to get the first token of the desugared doc comment. &Token { kind: token::DocComment(_, attr_style, data), span },
) => {
let desugared = self.desugar(attr_style, data, span);
self.tree_cursor.replace_prev_and_rewind(desugared);
// Continue to get the first token of the desugared doc comment.
}
_ => {
debug_assert!(!matches!(
token.kind,
token::OpenDelim(_) | token::CloseDelim(_)
));
return (token.clone(), spacing);
}
} }
_ => { }
debug_assert!(!matches!(
token.kind,
token::OpenDelim(_) | token::CloseDelim(_)
));
return (token.clone(), spacing);
}
},
&TokenTree::Delimited(sp, delim, ref tts) => { &TokenTree::Delimited(sp, delim, ref tts) => {
let trees = tts.clone().into_trees(); let trees = tts.clone().into_trees();
self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp)); self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
@ -463,7 +470,6 @@ pub fn new(
desugar_doc_comments, desugar_doc_comments,
break_last_token: false, break_last_token: false,
}, },
desugar_doc_comments,
unmatched_angle_bracket_count: 0, unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0, max_angle_bracket_count: 0,
last_unexpected_token_span: None, last_unexpected_token_span: None,
@ -1107,7 +1113,7 @@ fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
pub fn bump(&mut self) { pub fn bump(&mut self) {
// Note: destructuring here would give nicer code, but it was found in #96210 to be slower // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
// than `.0`/`.1` access. // than `.0`/`.1` access.
let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments); let mut next = self.token_cursor.inlined_next();
self.token_cursor.num_next_calls += 1; self.token_cursor.num_next_calls += 1;
// We've retrieved an token from the underlying // We've retrieved an token from the underlying
// cursor, so we no longer need to worry about // cursor, so we no longer need to worry about
@ -1157,7 +1163,7 @@ pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R
let mut i = 0; let mut i = 0;
let mut token = Token::dummy(); let mut token = Token::dummy();
while i < dist { while i < dist {
token = cursor.next(/* desugar_doc_comments */ false).0; token = cursor.next().0;
if matches!( if matches!(
token.kind, token.kind,
token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)