refactor: add chunks method to TokenStream to obviate rustdoc clones

This commit is contained in:
Caleb Cartwright 2023-05-12 16:57:29 -05:00
parent 2c41369acc
commit 00c3f7552e
2 changed files with 5 additions and 2 deletions

View File

@ -551,6 +551,10 @@ pub fn push_stream(&mut self, stream: TokenStream) {
vec_mut.extend(stream_iter); vec_mut.extend(stream_iter);
} }
} }
pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
self.0.chunks(chunk_size)
}
} }
/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree` /// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`

View File

@ -594,9 +594,8 @@ pub(super) fn display_macro_source(
def_id: DefId, def_id: DefId,
vis: ty::Visibility<DefId>, vis: ty::Visibility<DefId>,
) -> String { ) -> String {
let tts: Vec<_> = def.body.tokens.clone().into_trees().collect();
// Extract the spans of all matchers. They represent the "interface" of the macro. // Extract the spans of all matchers. They represent the "interface" of the macro.
let matchers = tts.chunks(4).map(|arm| &arm[0]); let matchers = def.body.tokens.chunks(4).map(|arm| &arm[0]);
if def.macro_rules { if def.macro_rules {
format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";")) format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";"))