Skip to content

Commit 00c3f75

Browse files
refactor: add chunks method to TokenStream to obviate rustdoc clones
1 parent 2c41369 commit 00c3f75

File tree

2 files changed

+5
-2
lines changed

2 files changed

+5
-2
lines changed

compiler/rustc_ast/src/tokenstream.rs

+4
Original file line numberDiff line numberDiff line change
@@ -551,6 +551,10 @@ impl TokenStream {
551551
vec_mut.extend(stream_iter);
552552
}
553553
}
554+
555+
pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
556+
self.0.chunks(chunk_size)
557+
}
554558
}
555559

556560
/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`

src/librustdoc/clean/utils.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -594,9 +594,8 @@ pub(super) fn display_macro_source(
594594
def_id: DefId,
595595
vis: ty::Visibility<DefId>,
596596
) -> String {
597-
let tts: Vec<_> = def.body.tokens.clone().into_trees().collect();
598597
// Extract the spans of all matchers. They represent the "interface" of the macro.
599-
let matchers = tts.chunks(4).map(|arm| &arm[0]);
598+
let matchers = def.body.tokens.chunks(4).map(|arm| &arm[0]);
600599

601600
if def.macro_rules {
602601
format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";"))

0 commit comments

Comments
 (0)