rustc_ast: Stop using "string typing" for doc comment tokens

Explicitly store their kind and style retrieved during lexing in the token
This commit is contained in:
Vadim Petrochenkov 2020-07-21 22:16:19 +03:00
parent c15bae53b5
commit 46f48d31fe
20 changed files with 173 additions and 160 deletions

View File

@ -23,7 +23,7 @@ pub use GenericArgs::*;
pub use UnsafeSource::*;
use crate::ptr::P;
use crate::token::{self, DelimToken};
use crate::token::{self, CommentKind, DelimToken};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
@ -2365,7 +2365,7 @@ pub enum AttrKind {
/// A doc comment (e.g. `/// ...`, `//! ...`, `/** ... */`, `/*! ... */`).
/// Doc attributes (e.g. `#[doc="..."]`) are represented with the `Normal`
/// variant (which is much less compact and thus more expensive).
DocComment(Symbol),
DocComment(CommentKind, Symbol),
}
/// `TraitRef`s appear in impls.

View File

@ -7,7 +7,7 @@ use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment};
use crate::mut_visit::visit_clobber;
use crate::ptr::P;
use crate::token::{self, Token};
use crate::token::{self, CommentKind, Token};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use rustc_data_structures::sync::Lock;
@ -169,7 +169,7 @@ impl Attribute {
pub fn has_name(&self, name: Symbol) -> bool {
match self.kind {
AttrKind::Normal(ref item) => item.path == name,
AttrKind::DocComment(_) => false,
AttrKind::DocComment(..) => false,
}
}
@ -198,7 +198,7 @@ impl Attribute {
None
}
}
AttrKind::DocComment(_) => None,
AttrKind::DocComment(..) => None,
}
}
pub fn name_or_empty(&self) -> Symbol {
@ -218,7 +218,7 @@ impl Attribute {
Some(MetaItem { kind: MetaItemKind::List(list), .. }) => Some(list),
_ => None,
},
AttrKind::DocComment(_) => None,
AttrKind::DocComment(..) => None,
}
}
@ -314,13 +314,13 @@ impl Attribute {
pub fn is_doc_comment(&self) -> bool {
match self.kind {
AttrKind::Normal(_) => false,
AttrKind::DocComment(_) => true,
AttrKind::DocComment(..) => true,
}
}
pub fn doc_str(&self) -> Option<Symbol> {
match self.kind {
AttrKind::DocComment(symbol) => Some(symbol),
AttrKind::DocComment(.., data) => Some(data),
AttrKind::Normal(ref item) if item.path == sym::doc => {
item.meta(self.span).and_then(|meta| meta.value_str())
}
@ -331,14 +331,14 @@ impl Attribute {
pub fn get_normal_item(&self) -> &AttrItem {
match self.kind {
AttrKind::Normal(ref item) => item,
AttrKind::DocComment(_) => panic!("unexpected doc comment"),
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
}
}
pub fn unwrap_normal_item(self) -> AttrItem {
match self.kind {
AttrKind::Normal(item) => item,
AttrKind::DocComment(_) => panic!("unexpected doc comment"),
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
}
}
@ -405,8 +405,13 @@ pub fn mk_attr_outer(item: MetaItem) -> Attribute {
mk_attr(AttrStyle::Outer, item.path, item.kind.mac_args(item.span), item.span)
}
pub fn mk_doc_comment(style: AttrStyle, comment: Symbol, span: Span) -> Attribute {
Attribute { kind: AttrKind::DocComment(comment), id: mk_attr_id(), style, span }
pub fn mk_doc_comment(
comment_kind: CommentKind,
style: AttrStyle,
data: Symbol,
span: Span,
) -> Attribute {
Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span }
}
pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {

View File

@ -582,7 +582,7 @@ pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
vis.visit_path(path);
visit_mac_args(args, vis);
}
AttrKind::DocComment(_) => {}
AttrKind::DocComment(..) => {}
}
vis.visit_span(span);
}

View File

@ -17,6 +17,12 @@ use rustc_span::{self, Span, DUMMY_SP};
use std::borrow::Cow;
use std::{fmt, mem};
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum CommentKind {
Line,
Block,
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
#[derive(HashStable_Generic)]
pub enum BinOpToken {
@ -238,9 +244,10 @@ pub enum TokenKind {
Interpolated(Lrc<Nonterminal>),
// Can be expanded into several tokens.
/// A doc comment.
DocComment(Symbol),
/// A doc comment token.
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
/// similarly to symbols in string literal tokens.
DocComment(CommentKind, ast::AttrStyle, Symbol),
// Junk. These carry no data because we don't really care about the data
// they *would* carry, and don't really want to allocate a new ident for

View File

@ -1,11 +1,10 @@
pub use CommentStyle::*;
use crate::ast;
use crate::ast::AttrStyle;
use crate::token::CommentKind;
use rustc_span::source_map::SourceMap;
use rustc_span::{BytePos, CharPos, FileName, Pos, Symbol};
use log::debug;
#[cfg(test)]
mod tests;
@ -28,43 +27,46 @@ pub struct Comment {
pub pos: BytePos,
}
pub fn is_line_doc_comment(s: &str) -> bool {
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/')
|| s.starts_with("//!");
debug!("is {:?} a doc comment? {}", s, res);
res
}
pub fn is_block_doc_comment(s: &str) -> bool {
// Prevent `/**/` from being parsed as a doc comment
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*')
|| s.starts_with("/*!"))
&& s.len() >= 5;
debug!("is {:?} a doc comment? {}", s, res);
res
}
// FIXME(#64197): Try to privatize this again.
pub fn is_doc_comment(s: &str) -> bool {
(s.starts_with("///") && is_line_doc_comment(s))
|| s.starts_with("//!")
|| (s.starts_with("/**") && is_block_doc_comment(s))
|| s.starts_with("/*!")
}
pub fn doc_comment_style(comment: Symbol) -> ast::AttrStyle {
let comment = &comment.as_str();
assert!(is_doc_comment(comment));
if comment.starts_with("//!") || comment.starts_with("/*!") {
ast::AttrStyle::Inner
} else {
ast::AttrStyle::Outer
/// For a full line comment string returns its doc comment style if it's a doc comment
/// and returns `None` if it's a regular comment.
pub fn line_doc_comment_style(line_comment: &str) -> Option<AttrStyle> {
let line_comment = line_comment.as_bytes();
assert!(line_comment.starts_with(b"//"));
match line_comment.get(2) {
// `//!` is an inner line doc comment.
Some(b'!') => Some(AttrStyle::Inner),
Some(b'/') => match line_comment.get(3) {
// `////` (more than 3 slashes) is not considered a doc comment.
Some(b'/') => None,
// Otherwise `///` is an outer line doc comment.
_ => Some(AttrStyle::Outer),
},
_ => None,
}
}
pub fn strip_doc_comment_decoration(comment: Symbol) -> String {
let comment = &comment.as_str();
/// For a full block comment string returns its doc comment style if it's a doc comment
/// and returns `None` if it's a regular comment.
pub fn block_doc_comment_style(block_comment: &str, terminated: bool) -> Option<AttrStyle> {
let block_comment = block_comment.as_bytes();
assert!(block_comment.starts_with(b"/*"));
assert!(!terminated || block_comment.ends_with(b"*/"));
match block_comment.get(2) {
// `/*!` is an inner block doc comment.
Some(b'!') => Some(AttrStyle::Inner),
Some(b'*') => match block_comment.get(3) {
// `/***` (more than 2 stars) is not considered a doc comment.
Some(b'*') => None,
// `/**/` is not considered a doc comment.
Some(b'/') if block_comment.len() == 4 => None,
// Otherwise `/**` is an outer block doc comment.
_ => Some(AttrStyle::Outer),
},
_ => None,
}
}
pub fn strip_doc_comment_decoration(data: Symbol, comment_kind: CommentKind) -> String {
/// remove whitespace-only lines from the start/end of lines
fn vertical_trim(lines: Vec<String>) -> Vec<String> {
let mut i = 0;
@ -126,26 +128,19 @@ pub fn strip_doc_comment_decoration(comment: Symbol) -> String {
}
}
// one-line comments lose their prefix
const ONELINERS: &[&str] = &["///!", "///", "//!", "//"];
for prefix in ONELINERS {
if comment.starts_with(*prefix) {
return (&comment[prefix.len()..]).to_string();
match comment_kind {
CommentKind::Line => {
let data = data.as_str();
let prefix_len = if data.starts_with('!') { 1 } else { 0 };
data[prefix_len..].to_string()
}
CommentKind::Block => {
let lines = data.as_str().lines().map(|s| s.to_string()).collect::<Vec<String>>();
let lines = vertical_trim(lines);
let lines = horizontal_trim(lines);
lines.join("\n")
}
}
if comment.starts_with("/*") {
let lines =
comment[3..comment.len() - 2].lines().map(|s| s.to_string()).collect::<Vec<String>>();
let lines = vertical_trim(lines);
let lines = horizontal_trim(lines);
return lines.join("\n");
}
panic!("not a doc-comment: {}", comment);
}
/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
@ -226,8 +221,8 @@ pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comme
}
}
}
rustc_lexer::TokenKind::BlockComment { terminated: _ } => {
if !is_block_doc_comment(token_text) {
rustc_lexer::TokenKind::BlockComment { terminated } => {
if block_doc_comment_style(token_text, terminated).is_none() {
let code_to_the_right = match text[pos + token.len..].chars().next() {
Some('\r' | '\n') => false,
_ => true,
@ -249,7 +244,7 @@ pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comme
}
}
rustc_lexer::TokenKind::LineComment => {
if !is_doc_comment(token_text) {
if line_doc_comment_style(token_text).is_none() {
comments.push(Comment {
style: if code_to_the_left { Trailing } else { Isolated },
lines: vec![token_text.to_string()],

View File

@ -1,11 +1,18 @@
use super::*;
use crate::with_default_session_globals;
#[test]
fn line_doc_comments() {
assert!(line_doc_comment_style("///").is_some());
assert!(line_doc_comment_style("/// blah").is_some());
assert!(line_doc_comment_style("////").is_none());
}
#[test]
fn test_block_doc_comment_1() {
with_default_session_globals(|| {
let comment = "/**\n * Test \n ** Test\n * Test\n*/";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment));
let comment = "\n * Test \n ** Test\n * Test\n";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment), CommentKind::Block);
assert_eq!(stripped, " Test \n* Test\n Test");
})
}
@ -13,8 +20,8 @@ fn test_block_doc_comment_1() {
#[test]
fn test_block_doc_comment_2() {
with_default_session_globals(|| {
let comment = "/**\n * Test\n * Test\n*/";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment));
let comment = "\n * Test\n * Test\n";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment), CommentKind::Block);
assert_eq!(stripped, " Test\n Test");
})
}
@ -22,37 +29,22 @@ fn test_block_doc_comment_2() {
#[test]
fn test_block_doc_comment_3() {
with_default_session_globals(|| {
let comment = "/**\n let a: *i32;\n *a = 5;\n*/";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment));
let comment = "\n let a: *i32;\n *a = 5;\n";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment), CommentKind::Block);
assert_eq!(stripped, " let a: *i32;\n *a = 5;");
})
}
#[test]
fn test_block_doc_comment_4() {
with_default_session_globals(|| {
let comment = "/*******************\n test\n *********************/";
let stripped = strip_doc_comment_decoration(Symbol::intern(comment));
assert_eq!(stripped, " test");
})
}
#[test]
fn test_line_doc_comment() {
with_default_session_globals(|| {
let stripped = strip_doc_comment_decoration(Symbol::intern("/// test"));
let stripped = strip_doc_comment_decoration(Symbol::intern(" test"), CommentKind::Line);
assert_eq!(stripped, " test");
let stripped = strip_doc_comment_decoration(Symbol::intern("///! test"));
let stripped = strip_doc_comment_decoration(Symbol::intern("! test"), CommentKind::Line);
assert_eq!(stripped, " test");
let stripped = strip_doc_comment_decoration(Symbol::intern("// test"));
assert_eq!(stripped, " test");
let stripped = strip_doc_comment_decoration(Symbol::intern("// test"));
assert_eq!(stripped, " test");
let stripped = strip_doc_comment_decoration(Symbol::intern("///test"));
let stripped = strip_doc_comment_decoration(Symbol::intern("test"), CommentKind::Line);
assert_eq!(stripped, "test");
let stripped = strip_doc_comment_decoration(Symbol::intern("///!test"));
assert_eq!(stripped, "test");
let stripped = strip_doc_comment_decoration(Symbol::intern("//test"));
let stripped = strip_doc_comment_decoration(Symbol::intern("!test"), CommentKind::Line);
assert_eq!(stripped, "test");
})
}

View File

@ -880,7 +880,7 @@ pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) {
pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) {
match attr.kind {
AttrKind::Normal(ref item) => walk_mac_args(visitor, &item.args),
AttrKind::DocComment(_) => {}
AttrKind::DocComment(..) => {}
}
}

View File

@ -981,7 +981,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
path: item.path.clone(),
args: self.lower_mac_args(&item.args),
}),
AttrKind::DocComment(comment) => AttrKind::DocComment(comment),
AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data),
};
Attribute { kind, id: attr.id, style: attr.style, span: attr.span }

View File

@ -8,7 +8,7 @@ use rustc_ast::ast::{InlineAsmOperand, InlineAsmRegOrRegClass};
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, BinOpToken, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::token::{self, BinOpToken, CommentKind, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::util::parser::{self, AssocOp, Fixity};
use rustc_ast::util::{classify, comments};
@ -152,8 +152,8 @@ pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
// and also addresses some specific regressions described in #63896 and #73345.
fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
if let TokenTree::Token(token) = prev {
if let token::DocComment(s) = token.kind {
return !s.as_str().starts_with("//");
if let token::DocComment(comment_kind, ..) = token.kind {
return comment_kind != CommentKind::Line;
}
}
match tt {
@ -194,6 +194,19 @@ fn binop_to_string(op: BinOpToken) -> &'static str {
}
}
fn doc_comment_to_string(
comment_kind: CommentKind,
attr_style: ast::AttrStyle,
data: Symbol,
) -> String {
match (comment_kind, attr_style) {
(CommentKind::Line, ast::AttrStyle::Outer) => format!("///{}", data),
(CommentKind::Line, ast::AttrStyle::Inner) => format!("//!{}", data),
(CommentKind::Block, ast::AttrStyle::Outer) => format!("/**{}*/", data),
(CommentKind::Block, ast::AttrStyle::Inner) => format!("/*!{}*/", data),
}
}
pub fn literal_to_string(lit: token::Lit) -> String {
let token::Lit { kind, symbol, suffix } = lit;
let mut out = match kind {
@ -271,7 +284,9 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
token::Lifetime(s) => s.to_string(),
/* Other */
token::DocComment(s) => s.to_string(),
token::DocComment(comment_kind, attr_style, data) => {
doc_comment_to_string(comment_kind, attr_style, data)
}
token::Eof => "<eof>".to_string(),
token::Whitespace => " ".to_string(),
token::Comment => "/* */".to_string(),
@ -599,8 +614,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.print_attr_item(&item, attr.span);
self.word("]");
}
ast::AttrKind::DocComment(comment) => {
self.word(comment.to_string());
ast::AttrKind::DocComment(comment_kind, data) => {
self.word(doc_comment_to_string(comment_kind, attr.style, data));
self.hardbreak()
}
}

View File

@ -1,5 +1,5 @@
use rustc_ast::token::{self, Token, TokenKind};
use rustc_ast::util::comments::is_doc_comment;
use rustc_ast::ast::AttrStyle;
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
use rustc_ast::with_default_session_globals;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{emitter::EmitterWriter, Handler};
@ -223,13 +223,6 @@ fn literal_suffixes() {
})
}
#[test]
fn line_doc_comments() {
assert!(is_doc_comment("///"));
assert!(is_doc_comment("/// blah"));
assert!(!is_doc_comment("////"));
}
#[test]
fn nested_block_comments() {
with_default_session_globals(|| {
@ -251,6 +244,9 @@ fn crlf_comments() {
assert_eq!(comment.kind, token::Comment);
assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token(), token::Whitespace);
assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
assert_eq!(
lexer.next_token(),
token::DocComment(CommentKind::Line, AttrStyle::Outer, Symbol::intern(" test"))
);
})
}

View File

@ -244,20 +244,20 @@ fn crlf_doc_comments() {
let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name_1, source, &sess).unwrap().unwrap();
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
assert_eq!(doc.as_str(), "/// doc comment");
assert_eq!(doc.as_str(), " doc comment");
let name_2 = FileName::Custom("crlf_source_2".to_string());
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name_2, source, &sess).unwrap().unwrap();
let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
let b: &[_] = &[Symbol::intern("/// doc comment"), Symbol::intern("/// line 2")];
let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")];
assert_eq!(&docs[..], b);
let name_3 = FileName::Custom("clrf_source_3".to_string());
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
assert_eq!(doc.as_str(), "/** doc comment\n * with CRLF */");
assert_eq!(doc.as_str(), " doc comment\n * with CRLF ");
});
}

View File

@ -148,9 +148,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
tt!(Punct::new('\'', true))
}
Literal(lit) => tt!(Literal { lit }),
DocComment(c) => {
let style = comments::doc_comment_style(c);
let stripped = comments::strip_doc_comment_decoration(c);
DocComment(comment_kind, attr_style, data) => {
let stripped = comments::strip_doc_comment_decoration(data, comment_kind);
let mut escaped = String::new();
for ch in stripped.chars() {
escaped.extend(ch.escape_debug());
@ -169,7 +168,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
span: DelimSpan::from_single(span),
flatten: false,
}));
if style == ast::AttrStyle::Inner {
if attr_style == ast::AttrStyle::Inner {
stack.push(tt!(Punct::new('!', false)));
}
tt!(Punct::new('#', false))

View File

@ -1,4 +1,4 @@
use rustc_ast::token::{self, Token, TokenKind};
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
use rustc_ast::util::comments;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError};
@ -170,22 +170,19 @@ impl<'a> StringReader<'a> {
match token {
rustc_lexer::TokenKind::LineComment => {
let string = self.str_from(start);
// comments with only more "/"s are not doc comments
if comments::is_line_doc_comment(string) {
if let Some(attr_style) = comments::line_doc_comment_style(string) {
self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment");
token::DocComment(Symbol::intern(string))
token::DocComment(CommentKind::Line, attr_style, Symbol::intern(&string[3..]))
} else {
token::Comment
}
}
rustc_lexer::TokenKind::BlockComment { terminated } => {
let string = self.str_from(start);
// block comments starting with "/**" or "/*!" are doc-comments
// but comments with only "*"s between two "/"s are not
let is_doc_comment = comments::is_block_doc_comment(string);
let attr_style = comments::block_doc_comment_style(string, terminated);
if !terminated {
let msg = if is_doc_comment {
let msg = if attr_style.is_some() {
"unterminated block doc-comment"
} else {
"unterminated block comment"
@ -202,9 +199,13 @@ impl<'a> StringReader<'a> {
FatalError.raise();
}
if is_doc_comment {
if let Some(attr_style) = attr_style {
self.forbid_bare_cr(start, string, "bare CR not allowed in block doc-comment");
token::DocComment(Symbol::intern(string))
token::DocComment(
CommentKind::Block,
attr_style,
Symbol::intern(&string[3..string.len() - if terminated { 2 } else { 0 }]),
)
} else {
token::Comment
}

View File

@ -486,7 +486,9 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
(&OpenDelim(a), &OpenDelim(b)) | (&CloseDelim(a), &CloseDelim(b)) => a == b,
(&DocComment(a), &DocComment(b)) | (&Shebang(a), &Shebang(b)) => a == b,
(&DocComment(a1, a2, a3), &DocComment(b1, b2, b3)) => a1 == b1 && a2 == b2 && a3 == b3,
(&Shebang(a), &Shebang(b)) => a == b,
(&Literal(a), &Literal(b)) => a == b,
@ -524,7 +526,7 @@ fn prepend_attrs(
let item = match attr.kind {
ast::AttrKind::Normal(ref item) => item,
ast::AttrKind::DocComment(_) => {
ast::AttrKind::DocComment(..) => {
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
builder.push(stream);
continue;

View File

@ -2,10 +2,9 @@ use super::{Parser, PathStyle};
use rustc_ast::ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast::util::comments;
use rustc_ast_pretty::pprust;
use rustc_errors::{error_code, PResult};
use rustc_span::{Span, Symbol};
use rustc_span::Span;
use log::debug;
@ -47,8 +46,8 @@ impl<'a> Parser<'a> {
let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
attrs.push(attr);
just_parsed_doc_comment = false;
} else if let token::DocComment(s) = self.token.kind {
let attr = self.mk_doc_comment(s);
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
if attr.style != ast::AttrStyle::Outer {
self.sess
.span_diagnostic
@ -73,10 +72,6 @@ impl<'a> Parser<'a> {
Ok(attrs)
}
fn mk_doc_comment(&self, s: Symbol) -> ast::Attribute {
attr::mk_doc_comment(comments::doc_comment_style(s), s, self.token.span)
}
/// Matches `attribute = # ! [ meta_item ]`.
///
/// If `permit_inner` is `true`, then a leading `!` indicates an inner
@ -184,9 +179,9 @@ impl<'a> Parser<'a> {
let attr = self.parse_attribute(true)?;
assert_eq!(attr.style, ast::AttrStyle::Inner);
attrs.push(attr);
} else if let token::DocComment(s) = self.token.kind {
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
// We need to get the position of this token before we bump.
let attr = self.mk_doc_comment(s);
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
if attr.style == ast::AttrStyle::Inner {
attrs.push(attr);
self.bump();

View File

@ -1419,7 +1419,7 @@ impl<'a> Parser<'a> {
}
pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
if let token::DocComment(_) = self.token.kind {
if let token::DocComment(..) = self.token.kind {
self.struct_span_err(
self.token.span,
"documentation comments cannot be applied to a function parameter's type",

View File

@ -610,7 +610,7 @@ impl<'a> Parser<'a> {
/// Recover on a doc comment before `}`.
fn recover_doc_comment_before_brace(&mut self) -> bool {
if let token::DocComment(_) = self.token.kind {
if let token::DocComment(..) = self.token.kind {
if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) {
struct_span_err!(
self.diagnostic(),
@ -1231,7 +1231,7 @@ impl<'a> Parser<'a> {
self.bump();
}
token::CloseDelim(token::Brace) => {}
token::DocComment(_) => {
token::DocComment(..) => {
let previous_span = self.prev_token.span;
let mut err = self.span_fatal_err(self.token.span, Error::UselessDocComment);
self.bump(); // consume the doc comment

View File

@ -22,7 +22,7 @@ use rustc_ast::ast::{
use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use rustc_ast::util::comments::{doc_comment_style, strip_doc_comment_decoration};
use rustc_ast::util::comments::strip_doc_comment_decoration;
use rustc_ast_pretty::pprust;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError, PResult};
use rustc_session::parse::ParseSess;
@ -209,12 +209,14 @@ impl TokenCursor {
}
fn next_desugared(&mut self) -> Token {
let (name, sp) = match self.next() {
Token { kind: token::DocComment(name), span } => (name, span),
let (data, comment_kind, attr_style, sp) = match self.next() {
Token { kind: token::DocComment(comment_kind, attr_style, data), span } => {
(data, comment_kind, attr_style, span)
}
tok => return tok,
};
let stripped = strip_doc_comment_decoration(name);
let stripped = strip_doc_comment_decoration(data, comment_kind);
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
// required to wrap the text.
@ -251,7 +253,7 @@ impl TokenCursor {
TokenCursorFrame::new(
delim_span,
token::NoDelim,
&if doc_comment_style(name) == AttrStyle::Inner {
&if attr_style == AttrStyle::Inner {
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
.iter()
.cloned()

View File

@ -822,8 +822,8 @@ impl<'tcx> SaveContext<'tcx> {
for attr in attrs {
if let Some(val) = attr.doc_str() {
if attr.is_doc_comment() {
result.push_str(&strip_doc_comment_decoration(val));
if let ast::AttrKind::DocComment(comment_kind, _) = attr.kind {
result.push_str(&strip_doc_comment_decoration(val, comment_kind));
} else {
result.push_str(&val.as_str());
}

View File

@ -506,11 +506,15 @@ impl Attributes {
.iter()
.filter_map(|attr| {
if let Some(value) = attr.doc_str() {
let (value, mk_fragment): (_, fn(_, _, _) -> _) = if attr.is_doc_comment() {
(strip_doc_comment_decoration(value), DocFragment::SugaredDoc)
} else {
(value.to_string(), DocFragment::RawDoc)
};
let (value, mk_fragment): (_, fn(_, _, _) -> _) =
if let ast::AttrKind::DocComment(comment_kind, _) = attr.kind {
(
strip_doc_comment_decoration(value, comment_kind),
DocFragment::SugaredDoc,
)
} else {
(value.to_string(), DocFragment::RawDoc)
};
let line = doc_line;
doc_line += value.lines().count();