Rollup merge of #65455 - nnethercote:avoid-unnecessary-TokenTree-to-TokenStream-conversions, r=petrochenkov
Avoid unnecessary `TokenTree` to `TokenStream` conversions A `TokenStream` contains any number of `TokenTrees`. Therefore, a single `TokenTree` can be promoted to a `TokenStream`. But doing so costs two allocations: one for the single-element `Vec`, and one for the `Lrc`. (An `IsJoint` value also must be added; the default is `NonJoint`.) The current code converts `TokenTree`s to `TokenStream`s unnecessarily in a few places. This PR removes some of these unnecessary conversions, both simplifying the code and speeding it up. r? @petrochenkov
This commit is contained in:
commit
f5f5c9e993
@ -22,7 +22,7 @@ use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::ThinVec;
|
||||
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
use crate::GLOBALS;
|
||||
|
||||
use log::debug;
|
||||
@ -463,7 +463,7 @@ pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: Symbol) -> Option
|
||||
}
|
||||
|
||||
impl MetaItem {
|
||||
fn tokens(&self) -> TokenStream {
|
||||
fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
|
||||
let mut idents = vec![];
|
||||
let mut last_pos = BytePos(0 as u32);
|
||||
for (i, segment) in self.path.segments.iter().enumerate() {
|
||||
@ -477,8 +477,8 @@ impl MetaItem {
|
||||
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
|
||||
last_pos = segment.ident.span.hi();
|
||||
}
|
||||
self.kind.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
|
||||
TokenStream::new(idents)
|
||||
idents.extend(self.kind.token_trees_and_joints(self.span));
|
||||
idents
|
||||
}
|
||||
|
||||
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
||||
@ -537,13 +537,14 @@ impl MetaItem {
|
||||
}
|
||||
|
||||
impl MetaItemKind {
|
||||
pub fn tokens(&self, span: Span) -> TokenStream {
|
||||
pub fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
|
||||
match *self {
|
||||
MetaItemKind::Word => TokenStream::default(),
|
||||
MetaItemKind::Word => vec![],
|
||||
MetaItemKind::NameValue(ref lit) => {
|
||||
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
|
||||
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
||||
TokenStream::new(vec)
|
||||
vec![
|
||||
TokenTree::token(token::Eq, span).into(),
|
||||
lit.token_tree().into(),
|
||||
]
|
||||
}
|
||||
MetaItemKind::List(ref list) => {
|
||||
let mut tokens = Vec::new();
|
||||
@ -551,17 +552,26 @@ impl MetaItemKind {
|
||||
if i > 0 {
|
||||
tokens.push(TokenTree::token(token::Comma, span).into());
|
||||
}
|
||||
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
|
||||
tokens.extend(item.token_trees_and_joints())
|
||||
}
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(span),
|
||||
token::Paren,
|
||||
TokenStream::new(tokens).into(),
|
||||
).into()
|
||||
vec![
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(span),
|
||||
token::Paren,
|
||||
TokenStream::new(tokens).into(),
|
||||
).into()
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Premature conversions of `TokenTree`s to `TokenStream`s can hurt
|
||||
// performance. Do not use this function if `token_trees_and_joints()` can
|
||||
// be used instead.
|
||||
pub fn tokens(&self, span: Span) -> TokenStream {
|
||||
TokenStream::new(self.token_trees_and_joints(span))
|
||||
}
|
||||
|
||||
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
|
||||
where I: Iterator<Item = TokenTree>,
|
||||
{
|
||||
@ -603,10 +613,10 @@ impl NestedMetaItem {
|
||||
}
|
||||
}
|
||||
|
||||
fn tokens(&self) -> TokenStream {
|
||||
fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
|
||||
match *self {
|
||||
NestedMetaItem::MetaItem(ref item) => item.tokens(),
|
||||
NestedMetaItem::Literal(ref lit) => lit.tokens(),
|
||||
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(),
|
||||
NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()],
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
use crate::ast::{self, Lit, LitKind};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{TokenStream, TokenTree};
|
||||
use crate::tokenstream::TokenTree;
|
||||
|
||||
use log::debug;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
@ -216,13 +216,13 @@ impl Lit {
|
||||
Lit { token: kind.to_lit_token(), kind, span }
|
||||
}
|
||||
|
||||
/// Losslessly convert an AST literal into a token stream.
|
||||
crate fn tokens(&self) -> TokenStream {
|
||||
/// Losslessly convert an AST literal into a token tree.
|
||||
crate fn token_tree(&self) -> TokenTree {
|
||||
let token = match self.token.kind {
|
||||
token::Bool => token::Ident(self.token.symbol, false),
|
||||
_ => token::Literal(self.token),
|
||||
};
|
||||
TokenTree::token(token, self.span).into()
|
||||
TokenTree::token(token, self.span)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,10 +285,10 @@ impl TokenCursor {
|
||||
token::NoDelim,
|
||||
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
|
||||
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
|
||||
.iter().cloned().collect::<TokenStream>().into()
|
||||
.iter().cloned().collect::<TokenStream>()
|
||||
} else {
|
||||
[TokenTree::token(token::Pound, sp), body]
|
||||
.iter().cloned().collect::<TokenStream>().into()
|
||||
.iter().cloned().collect::<TokenStream>()
|
||||
},
|
||||
)));
|
||||
|
||||
|
@ -6,7 +6,6 @@ use crate::tokenstream::{TokenStream, TokenTree};
|
||||
use crate::source_map::Span;
|
||||
|
||||
use log::debug;
|
||||
use smallvec::smallvec;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum InnerAttributeParsePolicy<'a> {
|
||||
@ -193,15 +192,15 @@ impl<'a> Parser<'a> {
|
||||
is_interpolated_expr = true;
|
||||
}
|
||||
}
|
||||
let tokens = if is_interpolated_expr {
|
||||
let token_tree = if is_interpolated_expr {
|
||||
// We need to accept arbitrary interpolated expressions to continue
|
||||
// supporting things like `doc = $expr` that work on stable.
|
||||
// Non-literal interpolated expressions are rejected after expansion.
|
||||
self.parse_token_tree().into()
|
||||
self.parse_token_tree()
|
||||
} else {
|
||||
self.parse_unsuffixed_lit()?.tokens()
|
||||
self.parse_unsuffixed_lit()?.token_tree()
|
||||
};
|
||||
TokenStream::from_streams(smallvec![eq.into(), tokens])
|
||||
TokenStream::new(vec![eq.into(), token_tree.into()])
|
||||
} else {
|
||||
TokenStream::default()
|
||||
};
|
||||
|
@ -202,9 +202,9 @@ impl From<TokenTree> for TreeAndJoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
|
||||
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
|
||||
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndJoint>>())
|
||||
}
|
||||
}
|
||||
|
||||
@ -271,10 +271,6 @@ impl TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
|
||||
vec.extend(self.0.iter().cloned());
|
||||
}
|
||||
|
||||
pub fn trees(&self) -> Cursor {
|
||||
self.clone().into_trees()
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user