Remove explicit delimiter token trees from Delimited.

They were introduced by the final commit in #95159 and gave a
performance win. But since the introduction of `MatcherLoc` they are no
longer needed. This commit reverts that change, making the code a bit
simpler.
This commit is contained in:
Nicholas Nethercote 2022-04-08 17:38:28 +10:00
parent c2afaba465
commit 7450c4e3e8
6 changed files with 58 additions and 96 deletions

View File

@ -17,48 +17,24 @@
use rustc_span::symbol::Ident;
use rustc_span::Span;
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiter itself
/// might be `NoDelim`.
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiters
/// might be `NoDelim`, but they are not represented explicitly.
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
struct Delimited {
delim: token::DelimToken,
/// Note: This contains the opening and closing delimiters tokens (e.g. `(` and `)`). Note that
/// these could be `NoDelim`. These token kinds must match `delim`, and the methods below
/// debug_assert this.
all_tts: Vec<TokenTree>,
/// FIXME: #67062 has details about why this is sub-optimal.
tts: Vec<TokenTree>,
}
impl Delimited {
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter. Panics if
/// the delimiter is `NoDelim`.
fn open_tt(&self) -> &TokenTree {
let tt = self.all_tts.first().unwrap();
debug_assert!(matches!(
tt,
&TokenTree::Token(token::Token { kind: token::OpenDelim(d), .. }) if d == self.delim
));
tt
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
fn open_tt(&self, span: DelimSpan) -> TokenTree {
TokenTree::token(token::OpenDelim(self.delim), span.open)
}
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. Panics if
/// the delimiter is `NoDelim`.
fn close_tt(&self) -> &TokenTree {
let tt = self.all_tts.last().unwrap();
debug_assert!(matches!(
tt,
&TokenTree::Token(token::Token { kind: token::CloseDelim(d), .. }) if d == self.delim
));
tt
}
/// Returns the tts excluding the outer delimiters.
///
/// FIXME: #67062 has details about why this is sub-optimal.
fn inner_tts(&self) -> &[TokenTree] {
// These functions are called for the assertions within them.
let _open_tt = self.open_tt();
let _close_tt = self.close_tt();
&self.all_tts[1..self.all_tts.len() - 1]
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
fn close_tt(&self, span: DelimSpan) -> TokenTree {
TokenTree::token(token::CloseDelim(self.delim), span.close)
}
}

View File

@ -282,7 +282,7 @@ fn check_binders(
// `MetaVarExpr` can not appear in the LHS of a macro arm
TokenTree::MetaVarExpr(..) => {}
TokenTree::Delimited(_, ref del) => {
for tt in del.inner_tts() {
for tt in &del.tts {
check_binders(sess, node_id, tt, macros, binders, ops, valid);
}
}
@ -345,7 +345,7 @@ fn check_occurrences(
check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name);
}
TokenTree::Delimited(_, ref del) => {
check_nested_occurrences(sess, node_id, del.inner_tts(), macros, binders, ops, valid);
check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
}
TokenTree::Sequence(_, ref seq) => {
let ops = ops.push(seq.kleene);
@ -432,20 +432,14 @@ fn check_nested_occurrences(
{
let macro_rules = state == NestedMacroState::MacroRulesNotName;
state = NestedMacroState::Empty;
let rest = check_nested_macro(
sess,
node_id,
macro_rules,
del.inner_tts(),
&nested_macros,
valid,
);
let rest =
check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, valid);
// If we did not check the whole macro definition, then check the rest as if outside
// the macro definition.
check_nested_occurrences(
sess,
node_id,
&del.inner_tts()[rest..],
&del.tts[rest..],
macros,
binders,
ops,

View File

@ -151,9 +151,11 @@ fn inner(
TokenTree::Token(token) => {
locs.push(MatcherLoc::Token { token: token.clone() });
}
TokenTree::Delimited(_, delimited) => {
TokenTree::Delimited(span, delimited) => {
locs.push(MatcherLoc::Delimited);
inner(sess, &delimited.all_tts, locs, next_metavar, seq_depth);
inner(sess, &[delimited.open_tt(*span)], locs, next_metavar, seq_depth);
inner(sess, &delimited.tts, locs, next_metavar, seq_depth);
inner(sess, &[delimited.close_tt(*span)], locs, next_metavar, seq_depth);
}
TokenTree::Sequence(_, seq) => {
// We can't determine `idx_first_after` and construct the final
@ -293,7 +295,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
.map(|tt| match tt {
TokenTree::MetaVarDecl(..) => 1,
TokenTree::Sequence(_, seq) => seq.num_captures,
TokenTree::Delimited(_, delim) => count_metavar_decls(delim.inner_tts()),
TokenTree::Delimited(_, delim) => count_metavar_decls(&delim.tts),
TokenTree::Token(..) => 0,
TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
})

View File

@ -263,9 +263,7 @@ fn generic_extension<'cx, 'tt>(
// Ignore the delimiters on the RHS.
let rhs = match &rhses[i] {
mbe::TokenTree::Delimited(_, delimited) => {
delimited.inner_tts().to_vec().clone()
}
mbe::TokenTree::Delimited(_, delimited) => delimited.tts.to_vec(),
_ => cx.span_bug(sp, "malformed macro rhs"),
};
let arm_span = rhses[i].span();
@ -470,17 +468,16 @@ pub fn compile_declarative_macro(
.iter()
.map(|m| {
if let MatchedTokenTree(ref tt) = *m {
let mut tts = vec![];
mbe::quoted::parse(
let tt = mbe::quoted::parse(
tt.clone().into(),
true,
&sess.parse_sess,
def.id,
features,
edition,
&mut tts,
);
let tt = tts.pop().unwrap();
)
.pop()
.unwrap();
valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def, &tt);
return tt;
}
@ -495,17 +492,16 @@ pub fn compile_declarative_macro(
.iter()
.map(|m| {
if let MatchedTokenTree(ref tt) = *m {
let mut tts = vec![];
mbe::quoted::parse(
return mbe::quoted::parse(
tt.clone().into(),
false,
&sess.parse_sess,
def.id,
features,
edition,
&mut tts,
);
return tts.pop().unwrap();
)
.pop()
.unwrap();
}
sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
})
@ -544,7 +540,7 @@ pub fn compile_declarative_macro(
// Ignore the delimiters around the matcher.
match lhs {
mbe::TokenTree::Delimited(_, delimited) => {
mbe::macro_parser::compute_locs(&sess.parse_sess, delimited.inner_tts())
mbe::macro_parser::compute_locs(&sess.parse_sess, &delimited.tts)
}
_ => sess.parse_sess.span_diagnostic.span_bug(def.span, "malformed macro lhs"),
}
@ -576,7 +572,7 @@ fn check_lhs_nt_follows(
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
if let mbe::TokenTree::Delimited(_, delimited) = lhs {
check_matcher(sess, features, def, delimited.inner_tts())
check_matcher(sess, features, def, &delimited.tts)
} else {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
sess.span_diagnostic.span_err(lhs.span(), msg);
@ -597,7 +593,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarExpr(..) => (),
TokenTree::Delimited(_, ref del) => {
if !check_lhs_no_empty_seq(sess, del.inner_tts()) {
if !check_lhs_no_empty_seq(sess, &del.tts) {
return false;
}
}
@ -692,9 +688,9 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
| TokenTree::MetaVarExpr(..) => {
first.replace_with(tt.clone());
}
TokenTree::Delimited(_span, ref delimited) => {
build_recur(sets, delimited.inner_tts());
first.replace_with(delimited.open_tt().clone());
TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts);
first.replace_with(delimited.open_tt(span));
}
TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts);
@ -758,8 +754,8 @@ fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
first.add_one(tt.clone());
return first;
}
TokenTree::Delimited(_span, ref delimited) => {
first.add_one(delimited.open_tt().clone());
TokenTree::Delimited(span, ref delimited) => {
first.add_one(delimited.open_tt(span));
return first;
}
TokenTree::Sequence(sp, ref seq_rep) => {
@ -945,9 +941,9 @@ fn check_matcher_core(
suffix_first = build_suffix_first();
}
}
TokenTree::Delimited(_span, ref d) => {
let my_suffix = TokenSet::singleton(d.close_tt().clone());
check_matcher_core(sess, features, def, first_sets, d.inner_tts(), &my_suffix);
TokenTree::Delimited(span, ref d) => {
let my_suffix = TokenSet::singleton(d.close_tt(span));
check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens
last.replace_with_irrelevant();

View File

@ -45,8 +45,10 @@ pub(super) fn parse(
node_id: NodeId,
features: &Features,
edition: Edition,
result: &mut Vec<TokenTree>,
) {
) -> Vec<TokenTree> {
// Will contain the final collection of `self::TokenTree`
let mut result = Vec::new();
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be.
let mut trees = input.trees();
@ -113,6 +115,7 @@ pub(super) fn parse(
_ => result.push(tree),
}
}
result
}
/// Asks for the `macro_metavar_expr` feature if it is not already declared
@ -205,8 +208,7 @@ fn parse_tree(
// If we didn't find a metavar expression above, then we must have a
// repetition sequence in the macro (e.g. `$(pat)*`). Parse the
// contents of the sequence itself
let mut sequence = vec![];
parse(tts, parsing_patterns, sess, node_id, features, edition, &mut sequence);
let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
// Get the Kleene operator and optional separator
let (separator, kleene) =
parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess);
@ -269,15 +271,13 @@ fn parse_tree(
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it.
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let mut all_tts = vec![];
// Add the explicit open and close delimiters, which
// `tokenstream::TokenTree::Delimited` lacks.
all_tts.push(TokenTree::token(token::OpenDelim(delim), span.open));
parse(tts, parsing_patterns, sess, node_id, features, edition, &mut all_tts);
all_tts.push(TokenTree::token(token::CloseDelim(delim), span.close));
TokenTree::Delimited(span, Lrc::new(Delimited { delim, all_tts }))
}
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited {
delim,
tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
}),
),
}
}

View File

@ -10,7 +10,7 @@
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::{Span, DUMMY_SP};
use rustc_span::Span;
use smallvec::{smallvec, SmallVec};
use std::mem;
@ -34,14 +34,8 @@ enum Frame {
impl Frame {
/// Construct a new frame around the delimited set of tokens.
fn new(mut tts: Vec<mbe::TokenTree>) -> Frame {
// Need to add empty delimiters.
let open_tt = mbe::TokenTree::token(token::OpenDelim(token::NoDelim), DUMMY_SP);
let close_tt = mbe::TokenTree::token(token::CloseDelim(token::NoDelim), DUMMY_SP);
tts.insert(0, open_tt);
tts.push(close_tt);
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, all_tts: tts });
fn new(tts: Vec<mbe::TokenTree>) -> Frame {
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
}
}
@ -52,7 +46,7 @@ impl Iterator for Frame {
fn next(&mut self) -> Option<mbe::TokenTree> {
match *self {
Frame::Delimited { ref forest, ref mut idx, .. } => {
let res = forest.inner_tts().get(*idx).cloned();
let res = forest.tts.get(*idx).cloned();
*idx += 1;
res
}
@ -388,7 +382,7 @@ fn lockstep_iter_size(
use mbe::TokenTree;
match *tree {
TokenTree::Delimited(_, ref delimited) => {
delimited.inner_tts().iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
size.with(lockstep_iter_size(tt, interpolations, repeats))
})
}