Rollup merge of #124607 - nnethercote:rustc_expand-cleanups, r=compiler-errors

`rustc_expand` cleanups

Some cleanups I made while looking through this code. Nothing that requires any real domain-specific knowledge about this crate.

r? ````@michaelwoerister````
This commit is contained in:
Matthias Krüger 2024-05-03 06:04:21 +02:00 committed by GitHub
commit ad0be151af
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 133 additions and 166 deletions

View File

@ -21,7 +21,7 @@
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
use rustc_span::edition::Edition;
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId, MacroKind};
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{FileName, Span, DUMMY_SP};
@ -32,8 +32,6 @@
use std::rc::Rc;
use thin_vec::ThinVec;
pub(crate) use rustc_span::hygiene::MacroKind;
// When adding new variants, make sure to
// adjust the `visit_*` / `flat_map_*` calls in `InvocationCollector`
// to use `assign_id!`
@ -573,35 +571,6 @@ pub fn raw_expr(sp: Span, guar: Option<ErrorGuaranteed>) -> P<ast::Expr> {
tokens: None,
})
}
/// A plain dummy pattern.
pub fn raw_pat(sp: Span) -> ast::Pat {
ast::Pat { id: ast::DUMMY_NODE_ID, kind: PatKind::Wild, span: sp, tokens: None }
}
/// A plain dummy type.
pub fn raw_ty(sp: Span) -> P<ast::Ty> {
// FIXME(nnethercote): you might expect `ast::TyKind::Dummy` to be used here, but some
// values produced here end up being lowered to HIR, which `ast::TyKind::Dummy` does not
// support, so we use an empty tuple instead.
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
kind: ast::TyKind::Tup(ThinVec::new()),
span: sp,
tokens: None,
})
}
/// A plain dummy crate.
pub fn raw_crate() -> ast::Crate {
ast::Crate {
attrs: Default::default(),
items: Default::default(),
spans: Default::default(),
id: ast::DUMMY_NODE_ID,
is_placeholder: Default::default(),
}
}
}
impl MacResult for DummyResult {
@ -610,7 +579,12 @@ fn make_expr(self: Box<DummyResult>) -> Option<P<ast::Expr>> {
}
fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
Some(P(DummyResult::raw_pat(self.span)))
Some(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
kind: PatKind::Wild,
span: self.span,
tokens: None,
}))
}
fn make_items(self: Box<DummyResult>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
@ -638,7 +612,15 @@ fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
}
fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
Some(DummyResult::raw_ty(self.span))
// FIXME(nnethercote): you might expect `ast::TyKind::Dummy` to be used here, but some
// values produced here end up being lowered to HIR, which `ast::TyKind::Dummy` does not
// support, so we use an empty tuple instead.
Some(P(ast::Ty {
id: ast::DUMMY_NODE_ID,
kind: ast::TyKind::Tup(ThinVec::new()),
span: self.span,
tokens: None,
}))
}
fn make_arms(self: Box<DummyResult>) -> Option<SmallVec<[ast::Arm; 1]>> {
@ -670,7 +652,13 @@ fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
}
fn make_crate(self: Box<DummyResult>) -> Option<ast::Crate> {
Some(DummyResult::raw_crate())
Some(ast::Crate {
attrs: Default::default(),
items: Default::default(),
spans: Default::default(),
id: ast::DUMMY_NODE_ID,
is_placeholder: Default::default(),
})
}
}

View File

@ -175,20 +175,6 @@ pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
}
pub fn stmt_let_pat(&self, sp: Span, pat: P<ast::Pat>, ex: P<ast::Expr>) -> ast::Stmt {
let local = P(ast::Local {
pat,
ty: None,
id: ast::DUMMY_NODE_ID,
kind: LocalKind::Init(ex),
span: sp,
colon_sp: None,
attrs: AttrVec::new(),
tokens: None,
});
self.stmt_local(local, sp)
}
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
self.stmt_let_ty(sp, mutbl, ident, None, ex)
}
@ -278,10 +264,6 @@ pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
}
pub fn expr_field(&self, span: Span, expr: P<Expr>, field: Ident) -> P<ast::Expr> {
self.expr(span, ast::ExprKind::Field(expr, field))
}
pub fn expr_macro_call(&self, span: Span, call: P<ast::MacCall>) -> P<ast::Expr> {
self.expr(span, ast::ExprKind::MacCall(call))
}
@ -394,11 +376,6 @@ pub fn expr_str(&self, span: Span, s: Symbol) -> P<ast::Expr> {
self.expr(span, ast::ExprKind::Lit(lit))
}
pub fn expr_char(&self, span: Span, ch: char) -> P<ast::Expr> {
let lit = token::Lit::new(token::Char, literal::escape_char_symbol(ch), None);
self.expr(span, ast::ExprKind::Lit(lit))
}
pub fn expr_byte_str(&self, span: Span, bytes: Vec<u8>) -> P<ast::Expr> {
let lit = token::Lit::new(token::ByteStr, literal::escape_byte_str_symbol(&bytes), None);
self.expr(span, ast::ExprKind::Lit(lit))
@ -414,10 +391,6 @@ pub fn expr_array_ref(&self, sp: Span, exprs: ThinVec<P<ast::Expr>>) -> P<ast::E
self.expr_addr_of(sp, self.expr_array(sp, exprs))
}
pub fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
self.expr(sp, ast::ExprKind::Cast(expr, ty))
}
pub fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
self.expr_call_global(sp, some, thin_vec![expr])

View File

@ -99,10 +99,11 @@ fn feature_list(attr: &Attribute) -> ThinVec<ast::NestedMetaItem> {
// If the declared feature is unstable, record it.
if let Some(f) = UNSTABLE_FEATURES.iter().find(|f| name == f.feature.name) {
(f.set_enabled)(&mut features);
// When the ICE comes from core, alloc or std (approximation of the standard library), there's a chance
// that the person hitting the ICE may be using -Zbuild-std or similar with an untested target.
// The bug is probably in the standard library and not the compiler in that case, but that doesn't
// really matter - we want a bug report.
// When the ICE comes from core, alloc or std (approximation of the standard
// library), there's a chance that the person hitting the ICE may be using
// -Zbuild-std or similar with an untested target. The bug is probably in the
// standard library and not the compiler in that case, but that doesn't really
// matter - we want a bug report.
if features.internal(name)
&& ![sym::core, sym::alloc, sym::std].contains(&crate_name)
{

View File

@ -4,7 +4,6 @@
IncompleteParse, RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported,
UnsupportedKeyValue, WrongFragmentKind,
};
use crate::hygiene::SyntaxContext;
use crate::mbe::diagnostics::annotate_err_with_kind;
use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod};
use crate::placeholders::{placeholder, PlaceholderExpander};
@ -32,6 +31,7 @@
use rustc_session::lint::BuiltinLintDiag;
use rustc_session::parse::feature_err;
use rustc_session::{Limit, Session};
use rustc_span::hygiene::SyntaxContext;
use rustc_span::symbol::{sym, Ident};
use rustc_span::{ErrorGuaranteed, FileName, LocalExpnId, Span};
@ -87,7 +87,7 @@ fn make_from<'a>(self, result: Box<dyn MacResult + 'a>) -> Option<AstFragment> {
}
impl AstFragment {
pub fn add_placeholders(&mut self, placeholders: &[NodeId]) {
fn add_placeholders(&mut self, placeholders: &[NodeId]) {
if placeholders.is_empty() {
return;
}
@ -100,14 +100,14 @@ pub fn add_placeholders(&mut self, placeholders: &[NodeId]) {
}
}
pub fn make_opt_expr(self) -> Option<P<ast::Expr>> {
pub(crate) fn make_opt_expr(self) -> Option<P<ast::Expr>> {
match self {
AstFragment::OptExpr(expr) => expr,
_ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
}
}
pub fn make_method_receiver_expr(self) -> P<ast::Expr> {
pub(crate) fn make_method_receiver_expr(self) -> P<ast::Expr> {
match self {
AstFragment::MethodReceiverExpr(expr) => expr,
_ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
@ -125,7 +125,7 @@ fn make_ast<T: InvocationCollectorNode>(self) -> T::OutputTy {
T::fragment_to_output(self)
}
pub fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
pub(crate) fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
match self {
AstFragment::OptExpr(opt_expr) => {
visit_clobber(opt_expr, |opt_expr| {
@ -372,6 +372,14 @@ pub fn span(&self) -> Span {
InvocationKind::Derive { path, .. } => path.span,
}
}
fn span_mut(&mut self) -> &mut Span {
match &mut self.kind {
InvocationKind::Bang { span, .. } => span,
InvocationKind::Attr { attr, .. } => &mut attr.span,
InvocationKind::Derive { path, .. } => &mut path.span,
}
}
}
pub struct MacroExpander<'a, 'b> {
@ -432,7 +440,8 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
break;
}
invocations = mem::take(&mut undetermined_invocations);
force = !mem::replace(&mut progress, false);
force = !progress;
progress = false;
if force && self.monotonic {
self.cx.dcx().span_delayed_bug(
invocations.last().unwrap().0.span(),
@ -471,7 +480,7 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
self.cx.force_mode = force;
let fragment_kind = invoc.fragment_kind;
let (expanded_fragment, new_invocations) = match self.expand_invoc(invoc, &ext.kind) {
match self.expand_invoc(invoc, &ext.kind) {
ExpandResult::Ready(fragment) => {
let mut derive_invocations = Vec::new();
let derive_placeholders = self
@ -503,12 +512,19 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
})
.unwrap_or_default();
let (fragment, collected_invocations) =
let (expanded_fragment, collected_invocations) =
self.collect_invocations(fragment, &derive_placeholders);
// We choose to expand any derive invocations associated with this macro invocation
// *before* any macro invocations collected from the output fragment
// We choose to expand any derive invocations associated with this macro
// invocation *before* any macro invocations collected from the output
// fragment.
derive_invocations.extend(collected_invocations);
(fragment, derive_invocations)
progress = true;
if expanded_fragments.len() < depth {
expanded_fragments.push(Vec::new());
}
expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
invocations.extend(derive_invocations.into_iter().rev());
}
ExpandResult::Retry(invoc) => {
if force {
@ -519,17 +535,9 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
} else {
// Cannot expand, will retry this invocation later.
undetermined_invocations.push((invoc, Some(ext)));
continue;
}
}
};
progress = true;
if expanded_fragments.len() < depth {
expanded_fragments.push(Vec::new());
}
expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
invocations.extend(new_invocations.into_iter().rev());
}
self.cx.current_expansion = orig_expansion_data;
@ -590,11 +598,7 @@ fn collect_invocations(
for (invoc, _) in invocations.iter_mut() {
let expn_id = invoc.expansion_data.id;
let parent_def = self.cx.resolver.invocation_parent(expn_id);
let span = match &mut invoc.kind {
InvocationKind::Bang { span, .. } => span,
InvocationKind::Attr { attr, .. } => &mut attr.span,
InvocationKind::Derive { path, .. } => &mut path.span,
};
let span = invoc.span_mut();
*span = span.with_parent(Some(parent_def));
}
}
@ -957,7 +961,7 @@ pub fn parse_ast_fragment<'a>(
})
}
pub fn ensure_complete_parse<'a>(
pub(crate) fn ensure_complete_parse<'a>(
parser: &Parser<'a>,
macro_path: &ast::Path,
kind_name: &str,

View File

@ -1,43 +1,39 @@
// tidy-alphabetical-start
#![allow(internal_features)]
#![allow(rustc::diagnostic_outside_of_impl)]
#![doc(rust_logo)]
#![feature(rustdoc_internals)]
#![feature(array_windows)]
#![feature(associated_type_defaults)]
#![feature(if_let_guard)]
#![feature(let_chains)]
#![feature(lint_reasons)]
#![feature(macro_metavar_expr)]
#![feature(map_try_insert)]
#![feature(proc_macro_diagnostic)]
#![feature(proc_macro_internals)]
#![feature(proc_macro_span)]
#![feature(rustdoc_internals)]
#![feature(try_blocks)]
#![feature(yeet_expr)]
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(internal_features)]
// tidy-alphabetical-end
extern crate proc_macro as pm;
mod build;
mod errors;
// FIXME(Nilstrieb) Translate macro_rules diagnostics
#[allow(rustc::untranslatable_diagnostic)]
mod mbe;
mod placeholders;
mod proc_macro_server;
pub use mbe::macro_rules::compile_declarative_macro;
pub(crate) use rustc_span::hygiene;
pub mod base;
pub mod build;
#[macro_use]
pub mod config;
pub mod errors;
pub mod expand;
pub mod module;
// FIXME(Nilstrieb) Translate proc_macro diagnostics
#[allow(rustc::untranslatable_diagnostic)]
pub mod proc_macro;
// FIXME(Nilstrieb) Translate macro_rules diagnostics
#[allow(rustc::untranslatable_diagnostic)]
pub(crate) mod mbe;
// HACK(Centril, #64197): These shouldn't really be here.
// Rather, they should be with their respective modules which are defined in other crates.
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,

View File

@ -4,12 +4,13 @@
//! official terminology: "declarative macros".
pub(crate) mod diagnostics;
pub(crate) mod macro_check;
pub(crate) mod macro_parser;
pub(crate) mod macro_rules;
pub(crate) mod metavar_expr;
pub(crate) mod quoted;
pub(crate) mod transcribe;
mod macro_check;
mod macro_parser;
mod metavar_expr;
mod quoted;
mod transcribe;
use metavar_expr::MetaVarExpr;
use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};

View File

@ -28,7 +28,8 @@ pub(super) fn failed_to_match_macro<'cx>(
) -> Box<dyn MacResult + 'cx> {
let psess = &cx.sess.psess;
// An error occurred, try the expansion again, tracking the expansion closely for better diagnostics.
// An error occurred, try the expansion again, tracking the expansion closely for better
// diagnostics.
let mut tracker = CollectTrackerAndEmitter::new(cx, sp);
let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);

View File

@ -157,8 +157,8 @@ pub(super) trait Tracker<'matcher> {
/// This is called before trying to match next MatcherLoc on the current token.
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
/// This is called after an arm has been parsed, either successfully or unsuccessfully. When this is called,
/// `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
/// This is called after an arm has been parsed, either successfully or unsuccessfully. When
/// this is called, `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
/// For tracing.
@ -169,7 +169,8 @@ fn recovery() -> Recovery {
}
}
/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to monomorphization.
/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to
/// monomorphization.
pub(super) struct NoopTracker;
impl<'matcher> Tracker<'matcher> for NoopTracker {
@ -492,7 +493,7 @@ pub fn compile_declarative_macro(
.pop()
.unwrap();
// We don't handle errors here, the driver will abort
// after parsing/expansion. we can report every error in every macro this way.
// after parsing/expansion. We can report every error in every macro this way.
check_emission(check_lhs_nt_follows(sess, def, &tt));
return tt;
}
@ -528,7 +529,7 @@ pub fn compile_declarative_macro(
check_emission(check_rhs(sess, rhs));
}
// don't abort iteration early, so that errors for multiple lhses can be reported
// Don't abort iteration early, so that errors for multiple lhses can be reported.
for lhs in &lhses {
check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(lhs)));
}

View File

@ -39,26 +39,32 @@ fn visit_span(&mut self, span: &mut Span) {
}
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
enum Frame<'a> {
Delimited {
tts: &'a [mbe::TokenTree],
idx: usize,
delim: Delimiter,
span: DelimSpan,
spacing: DelimSpacing,
},
Sequence {
tts: &'a [mbe::TokenTree],
idx: usize,
sep: Option<Token>,
kleene_op: KleeneOp,
},
struct Frame<'a> {
tts: &'a [mbe::TokenTree],
idx: usize,
kind: FrameKind,
}
enum FrameKind {
Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
Sequence { sep: Option<Token>, kleene_op: KleeneOp },
}
impl<'a> Frame<'a> {
/// Construct a new frame around the delimited set of tokens.
fn new(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span, spacing }
fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
Frame {
tts: &src.tts,
idx: 0,
kind: FrameKind::Delimited { delim: src.delim, span, spacing },
}
}
fn new_sequence(
src: &'a mbe::SequenceRepetition,
sep: Option<Token>,
kleene_op: KleeneOp,
) -> Frame<'a> {
Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
}
}
@ -66,13 +72,9 @@ impl<'a> Iterator for Frame<'a> {
type Item = &'a mbe::TokenTree;
fn next(&mut self) -> Option<&'a mbe::TokenTree> {
match self {
Frame::Delimited { tts, idx, .. } | Frame::Sequence { tts, idx, .. } => {
let res = tts.get(*idx);
*idx += 1;
res
}
}
let res = self.tts.get(self.idx);
self.idx += 1;
res
}
}
@ -111,13 +113,16 @@ pub(super) fn transcribe<'a>(
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
// choice of spacing values doesn't matter.
let mut stack: SmallVec<[Frame<'_>; 1]> =
smallvec![Frame::new(src, src_span, DelimSpacing::new(Spacing::Alone, Spacing::Alone))];
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited(
src,
src_span,
DelimSpacing::new(Spacing::Alone, Spacing::Alone)
)];
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
// `repeats` keeps track of where we are in matching at each level, with the last element being
// the most deeply nested sequence. This is used as a stack.
let mut repeats = Vec::new();
let mut repeats: Vec<(usize, usize)> = Vec::new();
// `result` contains resulting token stream from the TokenTree we just finished processing. At
// the end, this will contain the full result of transcription, but at arbitrary points during
@ -142,11 +147,12 @@ pub(super) fn transcribe<'a>(
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
// go back to the beginning of the sequence.
if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
let frame = stack.last_mut().unwrap();
if let FrameKind::Sequence { sep, .. } = &frame.kind {
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
*repeat_idx += 1;
if repeat_idx < repeat_len {
*idx = 0;
frame.idx = 0;
if let Some(sep) = sep {
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
}
@ -157,16 +163,16 @@ pub(super) fn transcribe<'a>(
// We are done with the top of the stack. Pop it. Depending on what it was, we do
// different things. Note that the outermost item must be the delimited, wrapped RHS
// that was passed in originally to `transcribe`.
match stack.pop().unwrap() {
match stack.pop().unwrap().kind {
// Done with a sequence. Pop from repeats.
Frame::Sequence { .. } => {
FrameKind::Sequence { .. } => {
repeats.pop();
}
// We are done processing a Delimited. If this is the top-level delimited, we are
// done. Otherwise, we unwind the result_stack to append what we have produced to
// any previous results.
Frame::Delimited { delim, span, mut spacing, .. } => {
FrameKind::Delimited { delim, span, mut spacing, .. } => {
// Hack to force-insert a space after `]` in certain case.
// See discussion of the `hex-literal` crate in #114571.
if delim == Delimiter::Bracket {
@ -192,7 +198,7 @@ pub(super) fn transcribe<'a>(
// We are descending into a sequence. We first make sure that the matchers in the RHS
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
// macro writer has made a mistake.
seq @ mbe::TokenTree::Sequence(_, delimited) => {
seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
match lockstep_iter_size(seq, interp, &repeats) {
LockstepIterSize::Unconstrained => {
return Err(cx
@ -233,12 +239,11 @@ pub(super) fn transcribe<'a>(
// The first time we encounter the sequence we push it to the stack. It
// then gets reused (see the beginning of the loop) until we are done
// repeating.
stack.push(Frame::Sequence {
idx: 0,
sep: seq.separator.clone(),
tts: &delimited.tts,
kleene_op: seq.kleene.op,
});
stack.push(Frame::new_sequence(
seq_rep,
seq.separator.clone(),
seq.kleene.op,
));
}
}
}
@ -294,13 +299,7 @@ pub(super) fn transcribe<'a>(
// the previous results (from outside the Delimited).
mbe::TokenTree::Delimited(mut span, spacing, delimited) => {
mut_visit::visit_delim_span(&mut span, &mut marker);
stack.push(Frame::Delimited {
tts: &delimited.tts,
delim: delimited.delim,
idx: 0,
span,
spacing: *spacing,
});
stack.push(Frame::new_delimited(delimited, span, *spacing));
result_stack.push(mem::take(&mut result));
}
@ -358,10 +357,13 @@ fn maybe_use_metavar_location(
) -> TokenTree {
let undelimited_seq = matches!(
stack.last(),
Some(Frame::Sequence {
Some(Frame {
tts: [_],
sep: None,
kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
kind: FrameKind::Sequence {
sep: None,
kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
..
},
..
})
);

View File

@ -9,7 +9,7 @@
use smallvec::{smallvec, SmallVec};
use thin_vec::ThinVec;
pub fn placeholder(
pub(crate) fn placeholder(
kind: AstFragmentKind,
id: ast::NodeId,
vis: Option<ast::Visibility>,