Auto merge of #76696 - Aaron1011:tokenstream-avoid-clone, r=petrochenkov

Avoid cloning the contents of a `TokenStream` in a few places
This commit is contained in:
bors 2020-10-20 05:45:08 +00:00
commit 554633534c
3 changed files with 12 additions and 11 deletions

View File

@ -125,7 +125,7 @@ fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
/// instead of a representation of the abstract syntax tree.
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct TokenStream(pub Lrc<Vec<TreeAndSpacing>>);
pub struct TokenStream(pub(crate) Lrc<Vec<TreeAndSpacing>>);
pub type TreeAndSpacing = (TokenTree, Spacing);
@ -286,12 +286,12 @@ pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
t1.next().is_none() && t2.next().is_none()
}
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
pub fn map_enumerated<F: FnMut(usize, &TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
TokenStream(Lrc::new(
self.0
.iter()
.enumerate()
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
.map(|(i, (tree, is_joint))| (f(i, tree), *is_joint))
.collect(),
))
}
@ -394,8 +394,8 @@ pub fn append(&mut self, new_stream: TokenStream) {
self.index = index;
}
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree.clone())
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}
}

View File

@ -288,7 +288,8 @@ fn generic_extension<'cx>(
// Replace all the tokens for the corresponding positions in the macro, to maintain
// proper positions in error reporting, while maintaining the macro_backtrace.
if rhs_spans.len() == tts.len() {
tts = tts.map_enumerated(|i, mut tt| {
tts = tts.map_enumerated(|i, tt| {
let mut tt = tt.clone();
let mut sp = rhs_spans[i];
sp = sp.with_ctxt(tt.span().ctxt());
tt.set_span(sp);

View File

@ -833,15 +833,15 @@ pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R
}
let frame = &self.token_cursor.frame;
looker(&match frame.tree_cursor.look_ahead(dist - 1) {
match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token) => token,
TokenTree::Token(token) => looker(token),
TokenTree::Delimited(dspan, delim, _) => {
Token::new(token::OpenDelim(delim), dspan.open)
looker(&Token::new(token::OpenDelim(*delim), dspan.open))
}
},
None => Token::new(token::CloseDelim(frame.delim), frame.span.close),
})
None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
}
}
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.