mbe: fix token conversion for doc comments

This commit is contained in:
Jake Heinz 2023-04-22 02:49:13 +00:00
parent af3b6a0893
commit a497e9a05e
2 changed files with 64 additions and 36 deletions

View File

@ -850,6 +850,32 @@ fn foo() {}
); );
} }
#[test]
fn goto_through_included_file_struct_with_doc_comment() {
check(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include {}
include!("foo.rs");
fn f() {
let x = Foo$0;
}
mod confuse_index {
pub struct Foo;
}
//- /foo.rs
/// This is a doc comment
pub struct Foo;
//^^^
"#,
);
}
#[test] #[test]
fn goto_for_type_param() { fn goto_for_type_param() {
check( check(

View File

@ -190,20 +190,13 @@ struct StackEntry {
let kind = token.kind(conv); let kind = token.kind(conv);
if kind == COMMENT { if kind == COMMENT {
if let Some(tokens) = conv.convert_doc_comment(&token) { // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
// FIXME: There has to be a better way to do this // figure out which token id to use for the doc comment, if it is converted successfully.
// Add the comments token id to the converted doc string let next_id = conv.id_alloc().peek_next_id();
if let Some(tokens) = conv.convert_doc_comment(&token, next_id) {
let id = conv.id_alloc().alloc(range, synth_id); let id = conv.id_alloc().alloc(range, synth_id);
result.extend(tokens.into_iter().map(|mut tt| { debug_assert_eq!(id, next_id);
if let tt::TokenTree::Subtree(sub) = &mut tt { result.extend(tokens);
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
sub.token_trees.get_mut(2)
{
lit.span = id
}
}
tt
}));
} }
continue; continue;
} }
@ -382,49 +375,46 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
text.into() text.into()
} }
fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> { fn convert_doc_comment(
token: &syntax::SyntaxToken,
span: tt::TokenId,
) -> Option<Vec<tt::TokenTree>> {
cov_mark::hit!(test_meta_doc_comments); cov_mark::hit!(test_meta_doc_comments);
let comment = ast::Comment::cast(token.clone())?; let comment = ast::Comment::cast(token.clone())?;
let doc = comment.kind().doc?; let doc = comment.kind().doc?;
// Make `doc="\" Comments\"" // Make `doc="\" Comments\""
let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; let meta_tkns =
vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)];
// Make `#![]` // Make `#![]`
let mut token_trees = Vec::with_capacity(3); let mut token_trees = Vec::with_capacity(3);
token_trees.push(mk_punct('#')); token_trees.push(mk_punct('#', span));
if let ast::CommentPlacement::Inner = doc { if let ast::CommentPlacement::Inner = doc {
token_trees.push(mk_punct('!')); token_trees.push(mk_punct('!', span));
} }
token_trees.push(tt::TokenTree::from(tt::Subtree { token_trees.push(tt::TokenTree::from(tt::Subtree {
delimiter: tt::Delimiter { delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
open: tt::TokenId::UNSPECIFIED,
close: tt::TokenId::UNSPECIFIED,
kind: tt::DelimiterKind::Bracket,
},
token_trees: meta_tkns, token_trees: meta_tkns,
})); }));
return Some(token_trees); return Some(token_trees);
// Helper functions // Helper functions
fn mk_ident(s: &str) -> tt::TokenTree { fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree {
tt::TokenTree::from(tt::Leaf::from(tt::Ident { tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }))
text: s.into(),
span: tt::TokenId::unspecified(),
}))
} }
fn mk_punct(c: char) -> tt::TokenTree { fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree {
tt::TokenTree::from(tt::Leaf::from(tt::Punct { tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c, char: c,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(), span,
})) }))
} }
fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree {
let lit = tt::Literal { text: doc_comment_text(comment), span: tt::TokenId::unspecified() }; let lit = tt::Literal { text: doc_comment_text(comment), span };
tt::TokenTree::from(tt::Leaf::from(lit)) tt::TokenTree::from(tt::Leaf::from(lit))
} }
@ -480,6 +470,10 @@ fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
} }
} }
} }
fn peek_next_id(&self) -> tt::TokenId {
tt::TokenId(self.next_id)
}
} }
/// A raw token (straight from lexer) converter /// A raw token (straight from lexer) converter
@ -502,7 +496,11 @@ trait SrcToken<Ctx>: std::fmt::Debug {
trait TokenConverter: Sized { trait TokenConverter: Sized {
type Token: SrcToken<Self>; type Token: SrcToken<Self>;
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>; fn convert_doc_comment(
&self,
token: &Self::Token,
span: tt::TokenId,
) -> Option<Vec<tt::TokenTree>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange)>; fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
@ -532,9 +530,9 @@ fn synthetic_id(&self, _ctx: &RawConverter<'a>) -> Option<SyntheticTokenId> {
impl<'a> TokenConverter for RawConverter<'a> { impl<'a> TokenConverter for RawConverter<'a> {
type Token = usize; type Token = usize;
fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> { fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
let text = self.lexed.text(token); let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text)) convert_doc_comment(&doc_comment(text), span)
} }
fn bump(&mut self) -> Option<(Self::Token, TextRange)> { fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@ -681,8 +679,12 @@ fn synthetic_id(&self, _ctx: &Converter) -> Option<SyntheticTokenId> {
impl TokenConverter for Converter { impl TokenConverter for Converter {
type Token = SynToken; type Token = SynToken;
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { fn convert_doc_comment(
convert_doc_comment(token.token()?) &self,
token: &Self::Token,
span: tt::TokenId,
) -> Option<Vec<tt::TokenTree>> {
convert_doc_comment(token.token()?, span)
} }
fn bump(&mut self) -> Option<(Self::Token, TextRange)> { fn bump(&mut self) -> Option<(Self::Token, TextRange)> {