From 53379a7b65055fc272db1178f68c9cef9b4aa3bc Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 09:39:28 +1000 Subject: [PATCH 1/8] Simplify the `ttdelim_span` test. The existing code is a very complex and inefficient way to the get the span of the last token. --- compiler/rustc_expand/src/parse/tests.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index 8b37728b60f..f891850963e 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -294,9 +294,7 @@ fn parse_expr_from_source_str( .unwrap(); let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") }; - let tts: Vec<_> = mac.args.tokens.clone().into_trees().collect(); - - let span = tts.iter().rev().next().unwrap().span(); + let span = mac.args.tokens.trees().last().unwrap().span(); match sess.source_map().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), From a9d84592995c18b33f69e85735210c50438596e1 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 10:40:39 +1000 Subject: [PATCH 2/8] Replace `into_trees` with `trees` in a test. There's no need for token tree cloning here. --- compiler/rustc_expand/src/parse/tests.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index f891850963e..541f2686235 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -63,9 +63,8 @@ fn bad_path_expr_1() { #[test] fn string_to_tts_macro() { create_default_session_globals_then(|| { - let tts: Vec<_> = - string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).into_trees().collect(); - let tts: &[TokenTree] = &tts[..]; + let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()); + let tts = &stream.trees().collect::>()[..]; match tts { [ From 103bd4a8208d813a958f3d3653c687be577ec3c2 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 10:00:04 +1000 Subject: [PATCH 3/8] Use `TokenStream::trees` instead of `into_trees` for attributes. This avoids cloning some token trees. A couple of `clone` calls were inserted, but only on some paths, and the next commit will remove them. --- compiler/rustc_ast/src/attr/mod.rs | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 15fe29580c2..466765f2dbf 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -285,12 +285,12 @@ pub fn value_str(&self) -> Option { self.kind.value_str() } - fn from_tokens(tokens: &mut iter::Peekable) -> Option + fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option where - I: Iterator, + I: Iterator, { // FIXME: Share code with `parse_path`. - let path = match tokens.next().map(TokenTree::uninterpolate) { + let path = match tokens.next().map(|tt| TokenTree::uninterpolate(tt.clone())) { Some(TokenTree::Token( Token { kind: kind @ (token::Ident(..) | token::ModSep), span }, _, @@ -309,7 +309,7 @@ fn from_tokens(tokens: &mut iter::Peekable) -> Option }; loop { if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = - tokens.next().map(TokenTree::uninterpolate) + tokens.next().map(|tt| TokenTree::uninterpolate(tt.clone())) { segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { @@ -354,7 +354,7 @@ pub fn value_str(&self) -> Option { } fn list_from_tokens(tokens: TokenStream) -> Option> { - let mut tokens = tokens.into_trees().peekable(); + let mut tokens = tokens.trees().peekable(); let mut result = ThinVec::new(); while tokens.peek().is_some() { let item = NestedMetaItem::from_tokens(&mut tokens)?; @@ -367,12 +367,12 @@ fn list_from_tokens(tokens: TokenStream) -> Option> { Some(result) } - fn name_value_from_tokens( - tokens: &mut impl Iterator, + fn name_value_from_tokens<'a>( + tokens: &mut impl Iterator, ) -> Option { match tokens.next() { Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => { - MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees()) + MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) } Some(TokenTree::Token(token, _)) => { MetaItemLit::from_token(&token).map(MetaItemKind::NameValue) @@ -381,8 +381,8 @@ fn name_value_from_tokens( } } - fn from_tokens( - tokens: &mut iter::Peekable>, + fn from_tokens<'a>( + tokens: &mut iter::Peekable>, ) -> Option { match tokens.peek() { Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => { @@ -501,9 +501,9 @@ pub fn is_meta_item(&self) -> bool { self.meta_item().is_some() } - fn from_tokens(tokens: &mut iter::Peekable) -> Option + fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option where - I: Iterator, + I: Iterator, { match tokens.peek() { Some(TokenTree::Token(token, _)) @@ -513,9 +513,8 @@ fn from_tokens(tokens: &mut iter::Peekable) -> Option return Some(NestedMetaItem::Lit(lit)); } Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => { - let inner_tokens = inner_tokens.clone(); tokens.next(); - return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable()); + return NestedMetaItem::from_tokens(&mut inner_tokens.trees().peekable()); } _ => {} } From 55a732461dcd104311b24cc048dcef2ed0c9d57c Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 10:10:32 +1000 Subject: [PATCH 4/8] Make `TokenTree::uninterpolate` take `&self` and return a `Cow`. Making it similar to `Token::uninterpolate`. This avoids some more token tree cloning. --- compiler/rustc_ast/src/attr/mod.rs | 14 +++++++------- compiler/rustc_ast/src/tokenstream.rs | 12 +++++++----- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 466765f2dbf..8b9bb1df5dc 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -290,12 +290,12 @@ fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option I: Iterator, { // FIXME: Share code with `parse_path`. - let path = match tokens.next().map(|tt| TokenTree::uninterpolate(tt.clone())) { - Some(TokenTree::Token( - Token { kind: kind @ (token::Ident(..) | token::ModSep), span }, + let path = match tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() { + Some(&TokenTree::Token( + Token { kind: ref kind @ (token::Ident(..) | token::ModSep), span }, _, )) => 'arm: { - let mut segments = if let token::Ident(name, _) = kind { + let mut segments = if let &token::Ident(name, _) = kind { if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) = tokens.peek() { @@ -308,8 +308,8 @@ fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option thin_vec![PathSegment::path_root(span)] }; loop { - if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = - tokens.next().map(|tt| TokenTree::uninterpolate(tt.clone())) + if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = + tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() { segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { @@ -326,7 +326,7 @@ fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option let span = span.with_hi(segments.last().unwrap().ident.span.hi()); Path { span, segments, tokens: None } } - Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &*nt { + Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &**nt { token::Nonterminal::NtMeta(item) => return item.meta(item.path.span), token::Nonterminal::NtPath(path) => (**path).clone(), _ => return None, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index ca4a739abd7..c6f0643147d 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -25,6 +25,7 @@ use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; +use std::borrow::Cow; use std::{fmt, iter, mem}; /// When the main Rust parser encounters a syntax-extension invocation, it @@ -98,12 +99,13 @@ pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree { TokenTree::Token(Token::new(kind, span), Spacing::Joint) } - pub fn uninterpolate(self) -> TokenTree { + pub fn uninterpolate(&self) -> Cow<'_, TokenTree> { match self { - TokenTree::Token(token, spacing) => { - TokenTree::Token(token.uninterpolate().into_owned(), spacing) - } - tt => tt, + TokenTree::Token(token, spacing) => match token.uninterpolate() { + Cow::Owned(token) => Cow::Owned(TokenTree::Token(token, *spacing)), + Cow::Borrowed(_) => Cow::Borrowed(self), + }, + _ => Cow::Borrowed(self), } } } From 853f453d576ab18e2282b2d8c64cd7e37dd30382 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 10:50:06 +1000 Subject: [PATCH 5/8] Avoid some token cloning in `filter_tokens_from_list`. Now the cloning only happens on some paths, instead of all paths. --- src/librustdoc/clean/mod.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index d14953f1bb7..bf6b1184639 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2347,19 +2347,19 @@ fn get_all_import_attributes<'hir>( } fn filter_tokens_from_list( - args_tokens: TokenStream, + args_tokens: &TokenStream, should_retain: impl Fn(&TokenTree) -> bool, ) -> Vec { let mut tokens = Vec::with_capacity(args_tokens.len()); let mut skip_next_comma = false; - for token in args_tokens.into_trees() { + for token in args_tokens.trees() { match token { TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => { skip_next_comma = false; } - token if should_retain(&token) => { + token if should_retain(token) => { skip_next_comma = false; - tokens.push(token); + tokens.push(token.clone()); } _ => { skip_next_comma = true; @@ -2417,7 +2417,7 @@ fn add_without_unwanted_attributes<'hir>( match normal.item.args { ast::AttrArgs::Delimited(ref mut args) => { let tokens = - filter_tokens_from_list(args.tokens.clone(), |token| { + filter_tokens_from_list(&args.tokens, |token| { !matches!( token, TokenTree::Token( From d2f7f67921ac718dcd0547e47f03630a30617743 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 11:04:26 +1000 Subject: [PATCH 6/8] Avoid some token tree cloning in decl macro parsing. By changing `into_trees` into `trees`. Some of the subsequent paths require explicit clones, but not all. --- compiler/rustc_expand/src/mbe/macro_rules.rs | 4 +- compiler/rustc_expand/src/mbe/quoted.rs | 42 ++++++++++---------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 102bae2a744..7398a124fdb 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -500,7 +500,7 @@ pub fn compile_declarative_macro( .map(|m| { if let MatchedTokenTree(tt) = m { let tt = mbe::quoted::parse( - TokenStream::new(vec![tt.clone()]), + &TokenStream::new(vec![tt.clone()]), true, &sess.parse_sess, def.id, @@ -524,7 +524,7 @@ pub fn compile_declarative_macro( .map(|m| { if let MatchedTokenTree(tt) = m { return mbe::quoted::parse( - TokenStream::new(vec![tt.clone()]), + &TokenStream::new(vec![tt.clone()]), false, &sess.parse_sess, def.id, diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 40bfa3715be..ac862ae8c4f 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -36,7 +36,7 @@ /// /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`. pub(super) fn parse( - input: tokenstream::TokenStream, + input: &tokenstream::TokenStream, parsing_patterns: bool, sess: &ParseSess, node_id: NodeId, @@ -48,7 +48,7 @@ pub(super) fn parse( // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming // additional trees if need be. - let mut trees = input.into_trees(); + let mut trees = input.trees(); while let Some(tree) = trees.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). @@ -56,7 +56,7 @@ pub(super) fn parse( match tree { TokenTree::MetaVar(start_sp, ident) if parsing_patterns => { let span = match trees.next() { - Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => { + Some(&tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => { match trees.next() { Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() { Some((frag, _)) => { @@ -96,10 +96,10 @@ pub(super) fn parse( } _ => token.span, }, - tree => tree.as_ref().map_or(span, tokenstream::TokenTree::span), + tree => tree.map_or(span, tokenstream::TokenTree::span), } } - tree => tree.as_ref().map_or(start_sp, tokenstream::TokenTree::span), + tree => tree.map_or(start_sp, tokenstream::TokenTree::span), }; result.push(TokenTree::MetaVarDecl(span, ident, None)); @@ -134,9 +134,9 @@ fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess, /// - `parsing_patterns`: same as [parse]. /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`: language features so we can do feature gating. -fn parse_tree( - tree: tokenstream::TokenTree, - outer_trees: &mut impl Iterator, +fn parse_tree<'a>( + tree: &'a tokenstream::TokenTree, + outer_trees: &mut impl Iterator, parsing_patterns: bool, sess: &ParseSess, node_id: NodeId, @@ -146,13 +146,13 @@ fn parse_tree( // Depending on what `tree` is, we could be parsing different parts of a macro match tree { // `tree` is a `$` token. Look at the next token in `trees` - tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => { + &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => { // FIXME: Handle `Invisible`-delimited groups in a more systematic way // during parsing. let mut next = outer_trees.next(); - let mut trees: Box>; + let mut trees: Box>; if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next { - trees = Box::new(tts.into_trees()); + trees = Box::new(tts.trees()); next = trees.next(); } else { trees = Box::new(outer_trees); @@ -160,7 +160,7 @@ fn parse_tree( match next { // `tree` is followed by a delimited set of token trees. - Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => { + Some(&tokenstream::TokenTree::Delimited(delim_span, delim, ref tts)) => { if parsing_patterns { if delim != Delimiter::Parenthesis { span_dollar_dollar_or_metavar_in_the_lhs_err( @@ -228,7 +228,7 @@ fn parse_tree( } // `tree` is followed by another `$`. This is an escaped `$`. - Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => { + Some(&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => { if parsing_patterns { span_dollar_dollar_or_metavar_in_the_lhs_err( sess, @@ -256,11 +256,11 @@ fn parse_tree( } // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token), + tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()), // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // descend into the delimited set and further parse it. - tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( + &tokenstream::TokenTree::Delimited(span, delim, ref tts) => TokenTree::Delimited( span, Delimited { delim, @@ -286,16 +286,16 @@ fn kleene_op(token: &Token) -> Option { /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token -fn parse_kleene_op( - input: &mut impl Iterator, +fn parse_kleene_op<'a>( + input: &mut impl Iterator, span: Span, ) -> Result, Span> { match input.next() { Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) { Some(op) => Ok(Ok((op, token.span))), - None => Ok(Err(token)), + None => Ok(Err(token.clone())), }, - tree => Err(tree.as_ref().map_or(span, tokenstream::TokenTree::span)), + tree => Err(tree.map_or(span, tokenstream::TokenTree::span)), } } @@ -311,8 +311,8 @@ fn parse_kleene_op( /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. -fn parse_sep_and_kleene_op( - input: &mut impl Iterator, +fn parse_sep_and_kleene_op<'a>( + input: &mut impl Iterator, span: Span, sess: &ParseSess, ) -> (Option, KleeneToken) { From ee6ed603733c5b61690a920c4eaa4806f1d0f213 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 11:40:36 +1000 Subject: [PATCH 7/8] Avoid `into_trees` usage in rustfmt. Token tree cloning is only needed in one place. --- src/tools/rustfmt/src/macros.rs | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs index e9a298a2769..4f45d0c7402 100644 --- a/src/tools/rustfmt/src/macros.rs +++ b/src/tools/rustfmt/src/macros.rs @@ -13,7 +13,7 @@ use std::panic::{catch_unwind, AssertUnwindSafe}; use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind}; -use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor}; +use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; use rustc_ast::{ast, ptr}; use rustc_ast_pretty::pprust; use rustc_span::{ @@ -394,7 +394,7 @@ pub(crate) fn rewrite_macro_def( } let ts = def.body.tokens.clone(); - let mut parser = MacroParser::new(ts.into_trees()); + let mut parser = MacroParser::new(ts.trees()); let parsed_def = match parser.parse() { Some(def) => def, None => return snippet, @@ -736,9 +736,9 @@ fn add_other(&mut self) { self.buf.clear(); } - fn add_meta_variable(&mut self, iter: &mut TokenTreeCursor) -> Option<()> { + fn add_meta_variable(&mut self, iter: &mut RefTokenTreeCursor<'_>) -> Option<()> { match iter.next() { - Some(TokenTree::Token( + Some(&TokenTree::Token( Token { kind: TokenKind::Ident(name, _), .. @@ -768,7 +768,7 @@ fn add_repeat( &mut self, inner: Vec, delim: Delimiter, - iter: &mut TokenTreeCursor, + iter: &mut RefTokenTreeCursor<'_>, ) -> Option<()> { let mut buffer = String::new(); let mut first = true; @@ -868,11 +868,11 @@ fn need_space_prefix(&self) -> bool { /// Returns a collection of parsed macro def's arguments. fn parse(mut self, tokens: TokenStream) -> Option> { - let mut iter = tokens.into_trees(); + let mut iter = tokens.trees(); while let Some(tok) = iter.next() { match tok { - TokenTree::Token( + &TokenTree::Token( Token { kind: TokenKind::Dollar, span, @@ -901,7 +901,7 @@ fn parse(mut self, tokens: TokenStream) -> Option> { self.add_meta_variable(&mut iter)?; } TokenTree::Token(ref t, _) => self.update_buffer(t), - TokenTree::Delimited(_delimited_span, delimited, ref tts) => { + &TokenTree::Delimited(_delimited_span, delimited, ref tts) => { if !self.buf.is_empty() { if next_space(&self.last_tok.kind) == SpaceState::Always { self.add_separator(); @@ -1119,12 +1119,12 @@ pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> D // A very simple parser that just parses a macros 2.0 definition into its branches. // Currently we do not attempt to parse any further than that. -struct MacroParser { - toks: TokenTreeCursor, +struct MacroParser<'a> { + toks: RefTokenTreeCursor<'a>, } -impl MacroParser { - const fn new(toks: TokenTreeCursor) -> Self { +impl<'a> MacroParser<'a> { + const fn new(toks: RefTokenTreeCursor<'a>) -> Self { Self { toks } } @@ -1143,9 +1143,9 @@ fn parse_branch(&mut self) -> Option { let tok = self.toks.next()?; let (lo, args_paren_kind) = match tok { TokenTree::Token(..) => return None, - TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d), + &TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d), }; - let args = TokenStream::new(vec![tok]); + let args = TokenStream::new(vec![tok.clone()]); match self.toks.next()? { TokenTree::Token( Token { From 4ebf2be8bb91e41fdf6c7c337482c72317508cef Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 27 Jul 2023 11:48:55 +1000 Subject: [PATCH 8/8] Remove `Iterator` impl for `TokenTreeCursor`. This is surprising, but the new comment explains why. It's a logical conclusion in the drive to avoid `TokenTree` clones. `TokenTreeCursor` is now only used within `Parser`. It's still needed due to `replace_prev_and_rewind`. --- compiler/rustc_ast/src/tokenstream.rs | 21 +++++++------------ compiler/rustc_expand/src/config.rs | 6 +++--- .../rustc_expand/src/proc_macro_server.rs | 4 ++-- 3 files changed, 13 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index c6f0643147d..348c37c480f 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -597,26 +597,21 @@ fn next(&mut self) -> Option<&'t TokenTree> { } } -/// Owning by-value iterator over a [`TokenStream`], that produces `TokenTree` +/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree` /// items. -// FIXME: Many uses of this can be replaced with by-reference iterator to avoid clones. +/// +/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to +/// return `&T` from `next`; the need for an explicit lifetime in the `Item` +/// associated type gets in the way. Instead, use `next_ref` (which doesn't +/// involve associated types) for getting individual elements, or +/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for` +/// loop. #[derive(Clone)] pub struct TokenTreeCursor { pub stream: TokenStream, index: usize, } -impl Iterator for TokenTreeCursor { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.stream.0.get(self.index).map(|tree| { - self.index += 1; - tree.clone() - }) - } -} - impl TokenTreeCursor { fn new(stream: TokenStream) -> Self { TokenTreeCursor { stream, index: 0 } diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 4ec5ac22e90..aeb4f6e861b 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -365,9 +365,9 @@ fn expand_cfg_attr_item( // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // for `attr` when we expand it to `#[attr]` - let mut orig_trees = orig_tokens.into_trees(); + let mut orig_trees = orig_tokens.trees(); let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) = - orig_trees.next().unwrap() + orig_trees.next().unwrap().clone() else { panic!("Bad tokens for attribute {:?}", attr); }; @@ -377,7 +377,7 @@ fn expand_cfg_attr_item( if attr.style == AttrStyle::Inner { // For inner attributes, we do the same thing for the `!` in `#![some_attr]` let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) = - orig_trees.next().unwrap() + orig_trees.next().unwrap().clone() else { panic!("Bad tokens for attribute {:?}", attr); }; diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index ecd2315112a..ac73b5d72b7 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -94,10 +94,10 @@ fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self { // Estimate the capacity as `stream.len()` rounded up to the next power // of two to limit the number of required reallocations. let mut trees = Vec::with_capacity(stream.len().next_power_of_two()); - let mut cursor = stream.into_trees(); + let mut cursor = stream.trees(); while let Some(tree) = cursor.next() { - let (Token { kind, span }, joint) = match tree { + let (Token { kind, span }, joint) = match tree.clone() { tokenstream::TokenTree::Delimited(span, delim, tts) => { let delimiter = pm::Delimiter::from_internal(delim); trees.push(TokenTree::Group(Group {