Factor out AttrsTarget flattening code.

This commit does the following.
- Pulls the code out of `AttrTokenStream::to_token_trees` into a new
  function `attrs_and_tokens_to_token_trees`.
- Simplifies `TokenStream::from_ast` by calling the new function. This
  is nicer than the old way, which created a temporary
  `AttrTokenStream` containing a single `AttrsTarget` (which required
  some cloning) just to call `to_token_trees` on it. (It is good to
  remove this use of `AttrsTarget` which isn't related to `cfg_attr`
  expansion.)
This commit is contained in:
Nicholas Nethercote 2024-07-10 15:47:02 +10:00
parent d8b6aa6d0d
commit d6ebbbfcb2
2 changed files with 72 additions and 65 deletions

View File

@ -16,7 +16,7 @@
use crate::ast::{AttrStyle, StmtKind}; use crate::ast::{AttrStyle, StmtKind};
use crate::ast_traits::{HasAttrs, HasTokens}; use crate::ast_traits::{HasAttrs, HasTokens};
use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind}; use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use crate::AttrVec; use crate::{AttrVec, Attribute};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, Lrc}; use rustc_data_structures::sync::{self, Lrc};
@ -179,11 +179,10 @@ pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
AttrTokenStream(Lrc::new(tokens)) AttrTokenStream(Lrc::new(tokens))
} }
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. /// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
/// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened' /// conversion, any `AttrTokenTree::AttrsTarget` gets "flattened" back to a
/// back to a `TokenStream` of the form `outer_attr attr_target`. /// `TokenStream`, as described in the comment on
/// If there are inner attributes, they are inserted into the proper /// `attrs_and_tokens_to_token_trees`.
/// place in the attribute target tokens.
pub fn to_token_trees(&self) -> Vec<TokenTree> { pub fn to_token_trees(&self) -> Vec<TokenTree> {
let mut res = Vec::with_capacity(self.0.len()); let mut res = Vec::with_capacity(self.0.len());
for tree in self.0.iter() { for tree in self.0.iter() {
@ -200,16 +199,43 @@ pub fn to_token_trees(&self) -> Vec<TokenTree> {
)) ))
} }
AttrTokenTree::AttrsTarget(target) => { AttrTokenTree::AttrsTarget(target) => {
let idx = target attrs_and_tokens_to_token_trees(&target.attrs, &target.tokens, &mut res);
.attrs }
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer)); }
let (outer_attrs, inner_attrs) = target.attrs.split_at(idx); }
res
}
}
let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees(); // Converts multiple attributes and the tokens for a target AST node into token trees, and appends
// them to `res`.
//
// Example: if the AST node is "fn f() { blah(); }", then:
// - Simple if no attributes are present, e.g. "fn f() { blah(); }"
// - Simple if only outer attribute are present, e.g. "#[outer1] #[outer2] fn f() { blah(); }"
// - Trickier if inner attributes are present, because they must be moved within the AST node's
// tokens, e.g. "#[outer] fn f() { #![inner] blah() }"
fn attrs_and_tokens_to_token_trees(
attrs: &[Attribute],
target_tokens: &LazyAttrTokenStream,
res: &mut Vec<TokenTree>,
) {
let idx = attrs.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
let (outer_attrs, inner_attrs) = attrs.split_at(idx);
// Add outer attribute tokens.
for attr in outer_attrs {
res.extend(attr.token_trees());
}
// Add target AST node tokens.
res.extend(target_tokens.to_attr_token_stream().to_token_trees());
// Insert inner attribute tokens.
if !inner_attrs.is_empty() { if !inner_attrs.is_empty() {
let mut found = false; let mut found = false;
// Check the last two trees (to account for a trailing semi) // Check the last two trees (to account for a trailing semi)
for tree in target_tokens.iter_mut().rev().take(2) { for tree in res.iter_mut().rev().take(2) {
if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree { if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree {
// Inner attributes are only supported on extern blocks, functions, // Inner attributes are only supported on extern blocks, functions,
// impls, and modules. All of these have their inner attributes // impls, and modules. All of these have their inner attributes
@ -224,7 +250,6 @@ pub fn to_token_trees(&self) -> Vec<TokenTree> {
// Support for custom attributes in this position is not // Support for custom attributes in this position is not
// properly implemented - we always synthesize fake tokens, // properly implemented - we always synthesize fake tokens,
// so we never reach this code. // so we never reach this code.
let mut tts = vec![]; let mut tts = vec![];
for inner_attr in inner_attrs { for inner_attr in inner_attrs {
tts.extend(inner_attr.token_trees()); tts.extend(inner_attr.token_trees());
@ -236,20 +261,7 @@ pub fn to_token_trees(&self) -> Vec<TokenTree> {
break; break;
} }
} }
assert!(found, "Failed to find trailing delimited group in: {res:?}");
assert!(
found,
"Failed to find trailing delimited group in: {target_tokens:?}"
);
}
for attr in outer_attrs {
res.extend(attr.token_trees());
}
res.extend(target_tokens);
}
}
}
res
} }
} }
@ -438,18 +450,10 @@ pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
} }
pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream { pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
let Some(tokens) = node.tokens() else { let tokens = node.tokens().unwrap_or_else(|| panic!("missing tokens for node: {:?}", node));
panic!("missing tokens for node: {:?}", node); let mut tts = vec![];
}; attrs_and_tokens_to_token_trees(node.attrs(), tokens, &mut tts);
let attrs = node.attrs(); TokenStream::new(tts)
let attr_stream = if attrs.is_empty() {
tokens.to_attr_token_stream()
} else {
let target =
AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)])
};
TokenStream::new(attr_stream.to_token_trees())
} }
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {

View File

@ -283,7 +283,10 @@ pub(crate) fn expand_cfg_attr(&self, cfg_attr: &Attribute, recursive: bool) -> V
.flat_map(|item| self.process_cfg_attr(&self.expand_cfg_attr_item(cfg_attr, item))) .flat_map(|item| self.process_cfg_attr(&self.expand_cfg_attr_item(cfg_attr, item)))
.collect() .collect()
} else { } else {
expanded_attrs.into_iter().map(|item| self.expand_cfg_attr_item(cfg_attr, item)).collect() expanded_attrs
.into_iter()
.map(|item| self.expand_cfg_attr_item(cfg_attr, item))
.collect()
} }
} }