Rework Attribute::get_tokens.

Returning `Vec<TokenTree>` works better for the call sites than
returning `TokenStream`.
This commit is contained in:
Nicholas Nethercote 2024-07-10 14:51:41 +10:00
parent 8a390bae06
commit fee152556f
3 changed files with 16 additions and 20 deletions

View File

@ -202,21 +202,18 @@ pub fn meta_kind(&self) -> Option<MetaItemKind> {
} }
} }
// Named `get_tokens` to distinguish it from the `<Attribute as HasTokens>::tokens` method. pub fn token_trees(&self) -> Vec<TokenTree> {
pub fn get_tokens(&self) -> TokenStream { match self.kind {
match &self.kind { AttrKind::Normal(ref normal) => normal
AttrKind::Normal(normal) => TokenStream::new( .tokens
normal .as_ref()
.tokens .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
.as_ref() .to_attr_token_stream()
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}")) .to_token_trees(),
.to_attr_token_stream() AttrKind::DocComment(comment_kind, data) => vec![TokenTree::token_alone(
.to_token_trees(),
),
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
token::DocComment(comment_kind, self.style, data), token::DocComment(comment_kind, self.style, data),
self.span, self.span,
), )],
} }
} }
} }

View File

@ -225,11 +225,12 @@ pub fn to_token_trees(&self) -> Vec<TokenTree> {
// properly implemented - we always synthesize fake tokens, // properly implemented - we always synthesize fake tokens,
// so we never reach this code. // so we never reach this code.
let mut stream = TokenStream::default(); let mut tts = vec![];
for inner_attr in inner_attrs { for inner_attr in inner_attrs {
stream.push_stream(inner_attr.get_tokens()); tts.extend(inner_attr.token_trees());
} }
stream.push_stream(delim_tokens.clone()); tts.extend(delim_tokens.0.iter().cloned());
let stream = TokenStream::new(tts);
*tree = TokenTree::Delimited(*span, *spacing, *delim, stream); *tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
found = true; found = true;
break; break;
@ -242,7 +243,7 @@ pub fn to_token_trees(&self) -> Vec<TokenTree> {
); );
} }
for attr in outer_attrs { for attr in outer_attrs {
res.extend(attr.get_tokens().0.iter().cloned()); res.extend(attr.token_trees());
} }
res.extend(target_tokens); res.extend(target_tokens);
} }

View File

@ -292,8 +292,6 @@ fn expand_cfg_attr_item(
attr: &Attribute, attr: &Attribute,
(item, item_span): (ast::AttrItem, Span), (item, item_span): (ast::AttrItem, Span),
) -> Attribute { ) -> Attribute {
let orig_tokens = attr.get_tokens();
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]` // We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We // and producing an attribute of the form `#[attr]`. We
// have captured tokens for `attr` itself, but we need to // have captured tokens for `attr` itself, but we need to
@ -302,7 +300,7 @@ fn expand_cfg_attr_item(
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]` // for `attr` when we expand it to `#[attr]`
let mut orig_trees = orig_tokens.trees(); let mut orig_trees = attr.token_trees().into_iter();
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) = let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) =
orig_trees.next().unwrap().clone() orig_trees.next().unwrap().clone()
else { else {