Auto merge of #128725 - nnethercote:fix-assoc-expr-collect-problems, r=petrochenkov

Fix problems with assoc expr token collection

There are several cases involving assoc exprs and attributes where the current code does the wrong thing. This PR adds some tests that demonstrate the problems and then fixes them.

r? `@petrochenkov`
This commit is contained in:
bors 2024-08-16 01:44:09 +00:00
commit be0ea0c99a
12 changed files with 485 additions and 300 deletions

View File

@ -8,7 +8,10 @@ use rustc_span::{sym, BytePos, Span};
use thin_vec::ThinVec;
use tracing::debug;
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle};
use super::{
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
UsePreAttrPos,
};
use crate::{errors, fluent_generated as fluent, maybe_whole};
// Public for rustfmt usage
@ -257,7 +260,8 @@ impl<'a> Parser<'a> {
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
let do_parse = |this: &mut Self, _empty_attrs| {
// Attr items don't have attributes.
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
let is_unsafe = this.eat_keyword(kw::Unsafe);
let unsafety = if is_unsafe {
let unsafe_span = this.prev_token.span;
@ -273,10 +277,12 @@ impl<'a> Parser<'a> {
if is_unsafe {
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
}
Ok((ast::AttrItem { unsafety, path, args, tokens: None }, false))
};
// Attr items don't have attributes.
self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse)
Ok((
ast::AttrItem { unsafety, path, args, tokens: None },
Trailing::No,
UsePreAttrPos::No,
))
})
}
/// Parses attributes that appear after the opening of an item. These should
@ -309,8 +315,8 @@ impl<'a> Parser<'a> {
};
if let Some(attr) = attr {
// If we are currently capturing tokens (i.e. we are within a call to
// `Parser::collect_tokens_trailing_tokens`) record the token positions of this
// inner attribute, for possible later processing in a `LazyAttrTokenStream`.
// `Parser::collect_tokens`) record the token positions of this inner attribute,
// for possible later processing in a `LazyAttrTokenStream`.
if let Capturing::Yes = self.capture_state.capturing {
let end_pos = self.num_bump_calls;
let parser_range = ParserRange(start_pos..end_pos);

View File

@ -12,9 +12,23 @@ use rustc_span::{sym, Span, DUMMY_SP};
use super::{
Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange,
TokenCursor,
TokenCursor, Trailing,
};
// When collecting tokens, this fully captures the start point. Usually its
// just after outer attributes, but occasionally it's before.
#[derive(Clone, Debug)]
pub(super) struct CollectPos {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
start_pos: u32,
}
pub(super) enum UsePreAttrPos {
No,
Yes,
}
/// A wrapper type to ensure that the parser handles outer attributes correctly.
/// When we parse outer attributes, we need to ensure that we capture tokens
/// for the attribute target. This allows us to perform cfg-expansion on
@ -22,30 +36,32 @@ use super::{
///
/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
/// Parsing code can only get access to the underlying attributes
/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`.
/// by passing an `AttrWrapper` to `collect_tokens`.
/// This makes it difficult to accidentally construct an AST node
/// (which stores an `ast::AttrVec`) without first collecting tokens.
///
/// This struct has its own module, to ensure that the parser code
/// cannot directly access the `attrs` field.
#[derive(Debug, Clone)]
pub struct AttrWrapper {
pub(super) struct AttrWrapper {
attrs: AttrVec,
// The start of the outer attributes in the parser's token stream.
// This lets us create a `NodeReplacement` for the entire attribute
// target, including outer attributes.
start_pos: u32,
// target, including outer attributes. `None` if there are no outer
// attributes.
start_pos: Option<u32>,
}
impl AttrWrapper {
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
AttrWrapper { attrs, start_pos }
}
pub fn empty() -> AttrWrapper {
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
AttrWrapper { attrs, start_pos: Some(start_pos) }
}
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
pub(super) fn empty() -> AttrWrapper {
AttrWrapper { attrs: AttrVec::new(), start_pos: None }
}
pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
psess.dcx().span_delayed_bug(
self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
"AttrVec is taken for recovery but no error is produced",
@ -56,12 +72,12 @@ impl AttrWrapper {
/// Prepend `self.attrs` to `attrs`.
// FIXME: require passing an NT to prevent misuse of this method
pub(crate) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
pub(super) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
mem::swap(attrs, &mut self.attrs);
attrs.extend(self.attrs);
}
pub fn is_empty(&self) -> bool {
pub(super) fn is_empty(&self) -> bool {
self.attrs.is_empty()
}
}
@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
}
// From a value of this type we can reconstruct the `TokenStream` seen by the
// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by
// `f` callback passed to a call to `Parser::collect_tokens`, by
// replaying the getting of the tokens. This saves us producing a `TokenStream`
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
// passed to a proc macro. In practice, token stream creation happens rarely
@ -166,16 +182,30 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
}
impl<'a> Parser<'a> {
pub(super) fn collect_pos(&self) -> CollectPos {
CollectPos {
start_token: (self.token.clone(), self.token_spacing),
cursor_snapshot: self.token_cursor.clone(),
start_pos: self.num_bump_calls,
}
}
/// Parses code with `f`. If appropriate, it records the tokens (in
/// `LazyAttrTokenStream` form) that were parsed in the result, accessible
/// via the `HasTokens` trait. The second (bool) part of the callback's
/// via the `HasTokens` trait. The `Trailing` part of the callback's
/// result indicates if an extra token should be captured, e.g. a comma or
/// semicolon.
/// semicolon. The `UsePreAttrPos` part of the callback's result indicates
/// if we should use `pre_attr_pos` as the collection start position (only
/// required in a few cases).
///
/// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
/// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
/// details.
///
/// `pre_attr_pos` is the position before the outer attributes (or the node
/// itself, if no outer attributes are present). It is only needed if `f`
/// can return `UsePreAttrPos::Yes`.
///
/// Note: If your callback consumes an opening delimiter (including the
/// case where `self.token` is an opening delimiter on entry to this
/// function), you must also consume the corresponding closing delimiter.
@ -197,11 +227,12 @@ impl<'a> Parser<'a> {
/// } // 32..33
/// } // 33..34
/// ```
pub fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
&mut self,
pre_attr_pos: Option<CollectPos>,
attrs: AttrWrapper,
force_collect: ForceCollect,
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, bool)>,
f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
) -> PResult<'a, R> {
// We must collect if anything could observe the collected tokens, i.e.
// if any of the following conditions hold.
@ -220,23 +251,20 @@ impl<'a> Parser<'a> {
return Ok(f(self, attrs.attrs)?.0);
}
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = self.token_cursor.clone();
let start_pos = self.num_bump_calls;
let mut collect_pos = self.collect_pos();
let has_outer_attrs = !attrs.attrs.is_empty();
let parser_replacements_start = self.capture_state.parser_replacements.len();
// We set and restore `Capturing::Yes` on either side of the call to
// `f`, so we can distinguish the outermost call to
// `collect_tokens_trailing_token` (e.g. parsing `m` in the example
// above) from any inner (indirectly recursive) calls (e.g. parsing `g`
// in the example above). This distinction is used below and in
// `Parser::parse_inner_attributes`.
let (mut ret, capture_trailing) = {
// `f`, so we can distinguish the outermost call to `collect_tokens`
// (e.g. parsing `m` in the example above) from any inner (indirectly
// recursive) calls (e.g. parsing `g` in the example above). This
// distinction is used below and in `Parser::parse_inner_attributes`.
let (mut ret, capture_trailing, use_pre_attr_pos) = {
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
let ret_and_trailing = f(self, attrs.attrs);
let res = f(self, attrs.attrs);
self.capture_state.capturing = prev_capturing;
ret_and_trailing?
res?
};
// When we're not in `capture_cfg` mode, then skip collecting and
@ -279,10 +307,18 @@ impl<'a> Parser<'a> {
return Ok(ret);
}
// Replace the post-attribute collection start position with the
// pre-attribute position supplied, if `f` indicated it is necessary.
// (The caller is responsible for providing a non-`None` `pre_attr_pos`
// if this is a possibility.)
if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
collect_pos = pre_attr_pos.unwrap();
}
let parser_replacements_end = self.capture_state.parser_replacements.len();
assert!(
!(self.break_last_token && capture_trailing),
!(self.break_last_token && matches!(capture_trailing, Trailing::Yes)),
"Cannot set break_last_token and have trailing token"
);
@ -294,7 +330,7 @@ impl<'a> Parser<'a> {
// `AttrTokenStream`, we will create the proper token.
+ self.break_last_token as u32;
let num_calls = end_pos - start_pos;
let num_calls = end_pos - collect_pos.start_pos;
// Take the captured `ParserRange`s for any inner attributes that we parsed in
// `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
@ -328,7 +364,9 @@ impl<'a> Parser<'a> {
.iter()
.cloned()
.chain(inner_attr_parser_replacements.iter().cloned())
.map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data))
.map(|(parser_range, data)| {
(NodeRange::new(parser_range, collect_pos.start_pos), data)
})
.collect()
};
@ -355,9 +393,9 @@ impl<'a> Parser<'a> {
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
start_token,
start_token: collect_pos.start_token,
cursor_snapshot: collect_pos.cursor_snapshot,
num_calls,
cursor_snapshot,
break_last_token: self.break_last_token,
node_replacements,
});
@ -368,9 +406,9 @@ impl<'a> Parser<'a> {
}
// If `capture_cfg` is set and we're inside a recursive call to
// `collect_tokens_trailing_token`, then we need to register a replace range
// if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
// on the captured token stream.
// `collect_tokens`, then we need to register a replace range if we
// have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager
// cfg-expansion on the captured token stream.
if self.capture_cfg
&& matches!(self.capture_state.capturing, Capturing::Yes)
&& has_cfg_or_cfg_attr(ret.attrs())
@ -389,7 +427,8 @@ impl<'a> Parser<'a> {
// Set things up so that the entire AST node that we just parsed, including attributes,
// will be replaced with `target` in the lazy token stream. This will allow us to
// cfg-expand this AST node.
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let start_pos =
if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
self.capture_state
.parser_replacements
@ -490,7 +529,6 @@ mod size_asserts {
use super::*;
// tidy-alphabetical-start
static_assert_size!(AttrWrapper, 16);
static_assert_size!(LazyAttrTokenStreamImpl, 96);
// tidy-alphabetical-end
}

View File

@ -2487,7 +2487,8 @@ impl<'a> Parser<'a> {
pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
let start = self.token.span;
let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
let (expr, _) =
self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
err.span_label(
start.shrink_to_lo(),
"while parsing a const generic argument starting here",
@ -2613,7 +2614,7 @@ impl<'a> Parser<'a> {
let attrs = self.parse_outer_attributes()?;
self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
})() {
Ok(expr) => {
Ok((expr, _)) => {
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
if snapshot.token == token::EqEq {
err.span_suggestion(
@ -2671,7 +2672,7 @@ impl<'a> Parser<'a> {
})() {
// Since we don't know the exact reason why we failed to parse the type or the
// expression, employ a simple heuristic to weed out some pathological cases.
Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => {
self.restore_snapshot(snapshot);
Some(expr)
}

View File

@ -36,7 +36,7 @@ use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenType, Trailing,
SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
};
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
@ -59,15 +59,30 @@ impl<'a> Parser<'a> {
self.current_closure.take();
let attrs = self.parse_outer_attributes()?;
self.parse_expr_res(Restrictions::empty(), attrs)
self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0)
}
/// Parses an expression, forcing tokens to be collected.
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
self.current_closure.take();
// If the expression is associative (e.g. `1 + 2`), then any preceding
// outer attribute actually belongs to the first inner sub-expression.
// In which case we must use the pre-attr pos to include the attribute
// in the collected tokens for the outer expression.
let pre_attr_pos = self.collect_pos();
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
self.collect_tokens(
Some(pre_attr_pos),
AttrWrapper::empty(),
ForceCollect::Yes,
|this, _empty_attrs| {
let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?;
let use_pre_attr_pos =
if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No };
Ok((expr, Trailing::No, use_pre_attr_pos))
},
)
}
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
@ -77,7 +92,7 @@ impl<'a> Parser<'a> {
fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
match self.parse_expr_res(restrictions, attrs) {
Ok(expr) => Ok(expr),
Ok((expr, _)) => Ok(expr),
Err(err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
@ -104,18 +119,20 @@ impl<'a> Parser<'a> {
&mut self,
r: Restrictions,
attrs: AttrWrapper,
) -> PResult<'a, P<Expr>> {
) -> PResult<'a, (P<Expr>, bool)> {
self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
}
/// Parses an associative expression with operators of at least `min_prec` precedence.
/// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator
/// followed by a subexpression (e.g. `1 + 2`).
pub(super) fn parse_expr_assoc_with(
&mut self,
min_prec: usize,
attrs: AttrWrapper,
) -> PResult<'a, P<Expr>> {
) -> PResult<'a, (P<Expr>, bool)> {
let lhs = if self.token.is_range_separator() {
return self.parse_expr_prefix_range(attrs);
return self.parse_expr_prefix_range(attrs).map(|res| (res, false));
} else {
self.parse_expr_prefix(attrs)?
};
@ -123,15 +140,17 @@ impl<'a> Parser<'a> {
}
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
/// of at least `min_prec` precedence.
/// of at least `min_prec` precedence. The `bool` in the return value indicates if something
/// was actually parsed.
pub(super) fn parse_expr_assoc_rest_with(
&mut self,
min_prec: usize,
starts_stmt: bool,
mut lhs: P<Expr>,
) -> PResult<'a, P<Expr>> {
) -> PResult<'a, (P<Expr>, bool)> {
let mut parsed_something = false;
if !self.should_continue_as_assoc_expr(&lhs) {
return Ok(lhs);
return Ok((lhs, parsed_something));
}
self.expected_tokens.push(TokenType::Operator);
@ -156,10 +175,11 @@ impl<'a> Parser<'a> {
self.err_larrow_operator(self.token.span);
}
parsed_something = true;
self.bump();
if op.node.is_comparison() {
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
return Ok(expr);
return Ok((expr, parsed_something));
}
}
@ -263,7 +283,7 @@ impl<'a> Parser<'a> {
// the special cases. The code is here only for future convenience.
Fixity::None => 1,
};
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
let attrs = this.parse_outer_attributes()?;
this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
})?;
@ -319,7 +339,7 @@ impl<'a> Parser<'a> {
}
}
Ok(lhs)
Ok((lhs, parsed_something))
}
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
@ -441,7 +461,8 @@ impl<'a> Parser<'a> {
let attrs = self.parse_outer_attributes()?;
Some(
self.parse_expr_assoc_with(prec + 1, attrs)
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?
.0,
)
} else {
None
@ -498,7 +519,7 @@ impl<'a> Parser<'a> {
// RHS must be parsed with more associativity than the dots.
let attrs = this.parse_outer_attributes()?;
this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
.map(|x| (lo.to(x.span), Some(x)))
.map(|(x, _)| (lo.to(x.span), Some(x)))
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
} else {
(lo, None)
@ -2335,7 +2356,7 @@ impl<'a> Parser<'a> {
let token = self.token.clone();
let attrs = self.parse_outer_attributes()?;
match self.parse_expr_res(restrictions, attrs) {
Ok(expr) => expr,
Ok((expr, _)) => expr,
Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
}
}
@ -2445,7 +2466,7 @@ impl<'a> Parser<'a> {
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
let lo = self.token.span;
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
let ty = if this.eat(&token::Colon) {
this.parse_ty()?
@ -2462,7 +2483,8 @@ impl<'a> Parser<'a> {
id: DUMMY_NODE_ID,
is_placeholder: false,
},
this.token == token::Comma,
Trailing::from(this.token == token::Comma),
UsePreAttrPos::No,
))
})
}
@ -2583,7 +2605,7 @@ impl<'a> Parser<'a> {
/// Parses the condition of a `if` or `while` expression.
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
let mut cond =
let (mut cond, _) =
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
CondChecker::new(self).visit_expr(&mut cond);
@ -2632,7 +2654,7 @@ impl<'a> Parser<'a> {
self.expect(&token::Eq)?;
}
let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
let span = lo.to(expr.span);
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
}
@ -2766,7 +2788,7 @@ impl<'a> Parser<'a> {
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
// happen right before the return of this method.
let attrs = self.parse_outer_attributes()?;
let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
Ok(expr) => expr,
Err(expr_err) => {
// We don't know what followed the `in`, so cancel and bubble up the
@ -2801,7 +2823,7 @@ impl<'a> Parser<'a> {
}
self.check_for_for_in_in_typo(self.prev_token.span);
let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
Ok((pat, expr))
}
@ -2921,7 +2943,7 @@ impl<'a> Parser<'a> {
fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
let match_span = self.prev_token.span;
let attrs = self.parse_outer_attributes()?;
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
}
@ -3069,7 +3091,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
@ -3126,7 +3148,7 @@ impl<'a> Parser<'a> {
let arm_start_span = this.token.span;
let attrs = this.parse_outer_attributes()?;
let expr =
let (expr, _) =
this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
err.span_label(arrow_span, "while parsing the `match` arm starting here");
err
@ -3243,7 +3265,8 @@ impl<'a> Parser<'a> {
id: DUMMY_NODE_ID,
is_placeholder: false,
},
false,
Trailing::No,
UsePreAttrPos::No,
))
})
}
@ -3334,8 +3357,9 @@ impl<'a> Parser<'a> {
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err(
|mut err| {
match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
Ok((expr, _)) => Ok(expr),
Err(mut err) => {
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
let sugg_sp = self.prev_token.span.shrink_to_lo();
// Consume everything within the braces, let's avoid further parse
@ -3355,9 +3379,9 @@ impl<'a> Parser<'a> {
err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
}
}
err
},
)
Err(err)
}
}
}
pub(crate) fn is_builtin(&self) -> bool {
@ -3708,7 +3732,7 @@ impl<'a> Parser<'a> {
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
let attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
// Check if a colon exists one ahead. This means we're parsing a fieldname.
@ -3752,7 +3776,8 @@ impl<'a> Parser<'a> {
id: DUMMY_NODE_ID,
is_placeholder: false,
},
this.token == token::Comma,
Trailing::from(this.token == token::Comma),
UsePreAttrPos::No,
))
})
}
@ -3846,15 +3871,17 @@ impl<'a> Parser<'a> {
attrs: AttrWrapper,
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
) -> PResult<'a, P<Expr>> {
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let res = f(this, attrs)?;
let trailing = (this.restrictions.contains(Restrictions::STMT_EXPR)
&& this.token == token::Semi)
let trailing = Trailing::from(
this.restrictions.contains(Restrictions::STMT_EXPR)
&& this.token == token::Semi
// FIXME: pass an additional condition through from the place
// where we know we need a comma, rather than assuming that
// `#[attr] expr,` always captures a trailing comma.
|| this.token == token::Comma;
Ok((res, trailing))
|| this.token == token::Comma,
);
Ok((res, trailing, UsePreAttrPos::No))
})
}
}

View File

@ -7,7 +7,7 @@ use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
use thin_vec::ThinVec;
use super::{ForceCollect, Parser};
use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
use crate::errors::{
self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
@ -169,14 +169,12 @@ impl<'a> Parser<'a> {
let mut done = false;
while !done {
let attrs = self.parse_outer_attributes()?;
let param =
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
if this.eat_keyword_noexpect(kw::SelfUpper) {
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
// as if `Self` never existed.
this.dcx().emit_err(UnexpectedSelfInGenericParameters {
span: this.prev_token.span,
});
this.dcx()
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
// Eat a trailing comma, if it exists.
let _ = this.eat(&token::Comma);
@ -191,17 +189,15 @@ impl<'a> Parser<'a> {
(None, Vec::new())
};
if this.check_noexpect(&token::Eq)
&& this.look_ahead(1, |t| t.is_lifetime())
{
if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) {
let lo = this.token.span;
// Parse `= 'lifetime`.
this.bump(); // `=`
this.bump(); // `'lifetime`
let span = lo.to(this.prev_token.span);
this.dcx().emit_err(
UnexpectedDefaultValueForLifetimeInGenericParameters { span },
);
this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters {
span,
});
}
Some(ast::GenericParam {
@ -228,34 +224,32 @@ impl<'a> Parser<'a> {
span: where_predicate.span(),
});
// FIXME - try to continue parsing other generics?
return Ok((None, false));
}
Err(err) => {
err.cancel();
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
this.restore_snapshot(snapshot);
return Ok((None, false));
}
}
return Ok((None, Trailing::No, UsePreAttrPos::No));
} else {
// Check for trailing attributes and stop parsing.
if !attrs.is_empty() {
if !params.is_empty() {
this.dcx()
.emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
} else {
this.dcx()
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
}
}
return Ok((None, false));
return Ok((None, Trailing::No, UsePreAttrPos::No));
};
if !this.eat(&token::Comma) {
done = true;
}
// We just ate the comma, so no need to capture the trailing token.
Ok((param, false))
Ok((param, Trailing::No, UsePreAttrPos::No))
})?;
if let Some(param) = param {

View File

@ -20,7 +20,9 @@ use tracing::debug;
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing};
use super::{
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::{fluent_generated as fluent, maybe_whole};
@ -127,7 +129,7 @@ impl<'a> Parser<'a> {
Some(item.into_inner())
});
self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| {
self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
let lo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
let mut def = this.parse_defaultness();
@ -145,7 +147,7 @@ impl<'a> Parser<'a> {
let span = lo.to(this.prev_token.span);
let id = DUMMY_NODE_ID;
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
return Ok((Some(item), false));
return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
}
// At this point, we have failed to parse an item.
@ -160,7 +162,7 @@ impl<'a> Parser<'a> {
if !attrs_allowed {
this.recover_attrs_no_item(&attrs)?;
}
Ok((None, false))
Ok((None, Trailing::No, UsePreAttrPos::No))
})
}
@ -1546,15 +1548,12 @@ impl<'a> Parser<'a> {
self.recover_vcs_conflict_marker();
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
self.collect_tokens_trailing_token(
variant_attrs,
ForceCollect::No,
|this, variant_attrs| {
self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| {
let vlo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
if !this.recover_nested_adt_item(kw::Enum)? {
return Ok((None, false));
return Ok((None, Trailing::No, UsePreAttrPos::No));
}
let ident = this.parse_field_ident("enum", vlo)?;
@ -1566,7 +1565,7 @@ impl<'a> Parser<'a> {
this.bump();
this.parse_delim_args()?;
return Ok((None, this.token == token::Comma));
return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
}
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
@ -1623,9 +1622,8 @@ impl<'a> Parser<'a> {
is_placeholder: false,
};
Ok((Some(vr), this.token == token::Comma))
},
)
Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No))
})
.map_err(|mut err| {
err.help(help);
err
@ -1777,7 +1775,7 @@ impl<'a> Parser<'a> {
// Unit like structs are handled in parse_item_struct function
self.parse_paren_comma_seq(|p| {
let attrs = p.parse_outer_attributes()?;
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
let mut snapshot = None;
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
// Account for `<<<<<<<` diff markers. We can't proactively error here because
@ -1815,7 +1813,8 @@ impl<'a> Parser<'a> {
attrs,
is_placeholder: false,
},
p.token == token::Comma,
Trailing::from(p.token == token::Comma),
UsePreAttrPos::No,
))
})
})
@ -1827,10 +1826,11 @@ impl<'a> Parser<'a> {
self.recover_vcs_conflict_marker();
let attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
this.parse_single_struct_field(adt_ty, lo, vis, attrs).map(|field| (field, false))
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
.map(|field| (field, Trailing::No, UsePreAttrPos::No))
})
}
@ -2805,12 +2805,12 @@ impl<'a> Parser<'a> {
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
let lo = self.token.span;
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
if let Some(mut param) = this.parse_self_param()? {
param.attrs = attrs;
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
return Ok((res?, false));
return Ok((res?, Trailing::No, UsePreAttrPos::No));
}
let is_name_required = match this.token.kind {
@ -2826,7 +2826,7 @@ impl<'a> Parser<'a> {
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
{
let guar = err.emit();
Ok((dummy_arg(ident, guar), false))
Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No))
} else {
Err(err)
};
@ -2869,7 +2869,8 @@ impl<'a> Parser<'a> {
Ok((
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
false,
Trailing::No,
UsePreAttrPos::No,
))
})
}

View File

@ -14,7 +14,7 @@ use std::assert_matches::debug_assert_matches;
use std::ops::Range;
use std::{fmt, mem, slice};
use attr_wrapper::AttrWrapper;
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
pub(crate) use item::FnParseMode;
@ -238,6 +238,7 @@ impl NodeRange {
// is the position of the function's start token. This gives
// `NodeRange(10..15)`.
fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
assert!(parser_range.start >= start_pos && parser_range.end >= start_pos);
NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
}
}
@ -253,7 +254,7 @@ enum Capturing {
Yes,
}
// This state is used by `Parser::collect_tokens_trailing_token`.
// This state is used by `Parser::collect_tokens`.
#[derive(Clone, Debug)]
struct CaptureState {
capturing: Capturing,
@ -388,6 +389,12 @@ enum Trailing {
Yes,
}
impl From<bool> for Trailing {
fn from(b: bool) -> Trailing {
if b { Trailing::Yes } else { Trailing::No }
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(super) enum TokenDescription {
ReservedIdentifier,
@ -459,8 +466,8 @@ impl<'a> Parser<'a> {
parser.bump();
// Change this from 1 back to 0 after the bump. This eases debugging of
// `Parser::collect_tokens_trailing_token` nicer because it makes the
// token positions 0-indexed which is nicer than 1-indexed.
// `Parser::collect_tokens` because 0-indexed token positions are nicer
// than 1-indexed token positions.
parser.num_bump_calls = 0;
parser
@ -1546,11 +1553,9 @@ impl<'a> Parser<'a> {
) -> PResult<'a, R> {
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
// `ForceCollect::Yes`
self.collect_tokens_trailing_token(
AttrWrapper::empty(),
ForceCollect::Yes,
|this, _attrs| Ok((f(this)?, false)),
)
self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
})
}
/// `::{` or `::*`

View File

@ -13,7 +13,7 @@ use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{BytePos, ErrorGuaranteed, Span};
use thin_vec::{thin_vec, ThinVec};
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing};
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
use crate::errors::{
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
// Parse an associative expression such as `+ expr`, `% expr`, ...
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
if let Ok(expr) =
if let Ok((expr, _)) =
snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
{
// We got a valid expression.
@ -1302,8 +1302,7 @@ impl<'a> Parser<'a> {
}
}
let field =
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let field = match this.parse_pat_field(lo, attrs) {
Ok(field) => Ok(field),
Err(err) => {
@ -1318,7 +1317,7 @@ impl<'a> Parser<'a> {
last_non_comma_dotdot_span = Some(this.prev_token.span);
// We just ate a comma, so there's no need to capture a trailing token.
Ok((field, false))
Ok((field, Trailing::No, UsePreAttrPos::No))
})?;
fields.push(field)

View File

@ -913,7 +913,7 @@ impl<'a> Parser<'a> {
let snapshot = self.create_snapshot_for_diagnostic();
let attrs = self.parse_outer_attributes()?;
match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
Ok(expr) => {
Ok((expr, _)) => {
return Ok(Some(self.dummy_const_arg_needs_braces(
self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
expr.span,

View File

@ -21,6 +21,7 @@ use super::pat::{PatternLocation, RecoverComma};
use super::path::PathStyle;
use super::{
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
Trailing, UsePreAttrPos,
};
use crate::errors::MalformedLoopLabel;
use crate::{errors, maybe_whole};
@ -45,6 +46,7 @@ impl<'a> Parser<'a> {
capture_semi: bool,
force_collect: ForceCollect,
) -> PResult<'a, Option<Stmt>> {
let pre_attr_pos = self.collect_pos();
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
@ -65,11 +67,15 @@ impl<'a> Parser<'a> {
}
Ok(Some(if self.token.is_keyword(kw::Let) {
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
let local = this.parse_local(attrs)?;
let trailing = capture_semi && this.token == token::Semi;
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
Ok((
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
trailing,
UsePreAttrPos::No,
))
})?
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
self.recover_stmt_local_after_let(
@ -103,10 +109,18 @@ impl<'a> Parser<'a> {
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
// that starts like a path (1 token), but it fact not a path.
// Also, we avoid stealing syntax from `parse_item_`.
let stmt = self.collect_tokens_trailing_token(
//
// `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
// expression, not the statement. (But the statement attributes/tokens are obtained
// from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
// the things within `StmtKind`.)
let stmt = self.collect_tokens(
Some(pre_attr_pos),
AttrWrapper::empty(),
force_collect,
|this, _empty_attrs| Ok((this.parse_stmt_path_start(lo, attrs)?, false)),
|this, _empty_attrs| {
Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
},
);
match stmt {
Ok(stmt) => stmt,
@ -128,12 +142,15 @@ impl<'a> Parser<'a> {
self.error_outer_attrs(attrs);
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(Delimiter::Brace) {
// Remainder are line-expr stmts.
let e = self.collect_tokens_trailing_token(
// Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
// above.
let e = self.collect_tokens(
Some(pre_attr_pos),
AttrWrapper::empty(),
force_collect,
|this, _empty_attrs| {
Ok((this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?, false))
let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
},
)?;
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
@ -150,12 +167,16 @@ impl<'a> Parser<'a> {
}
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?;
if this.eat(&token::Not) {
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
return Ok((stmt_mac, this.token == token::Semi));
return Ok((
stmt_mac,
Trailing::from(this.token == token::Semi),
UsePreAttrPos::No,
));
}
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
@ -169,13 +190,17 @@ impl<'a> Parser<'a> {
this.parse_expr_dot_or_call_with(attrs, expr, lo)
})?;
// `DUMMY_SP` will get overwritten later in this function
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), false))
Ok((
this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
Trailing::No,
UsePreAttrPos::No,
))
})?;
if let StmtKind::Expr(expr) = stmt.kind {
// Perform this outside of the `collect_tokens_trailing_token` closure,
// since our outer attributes do not apply to this part of the expression
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
// Perform this outside of the `collect_tokens` closure, since our
// outer attributes do not apply to this part of the expression.
let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
this.parse_expr_assoc_rest_with(0, true, expr)
})?;
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
@ -209,7 +234,7 @@ impl<'a> Parser<'a> {
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
let e = self.maybe_recover_from_bad_qpath(e)?;
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
let e = self.parse_expr_assoc_rest_with(0, false, e)?;
let (e, _) = self.parse_expr_assoc_rest_with(0, false, e)?;
StmtKind::Expr(e)
};
Ok(self.mk_stmt(lo.to(hi), kind))
@ -239,10 +264,14 @@ impl<'a> Parser<'a> {
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
force_collect: ForceCollect,
) -> PResult<'a, Stmt> {
let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
let local = this.parse_local(attrs)?;
// FIXME - maybe capture semicolon in recovery?
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), false))
Ok((
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
Trailing::No,
UsePreAttrPos::No,
))
})?;
self.dcx()
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });

View File

@ -0,0 +1,42 @@
//@ check-pass
// This test triggered an assertion failure in token collection due to
// mishandling of attributes on associative expressions.
#![feature(cfg_eval)]
#![feature(rustc_attrs)]
#![feature(stmt_expr_attributes)]
#![allow(internal_features)]
fn main() {}
#[cfg_eval]
struct Foo1(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(aa))] 1
} ]
);
#[cfg_eval]
struct Foo12(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(bb))] 1 + 2
} ]
);
#[cfg_eval]
struct Foox(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(cc))] _x
} ]
);
#[cfg_eval]
struct Foox2(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(dd))] _x + 2
} ]
);

View File

@ -33,8 +33,6 @@ macro_rules! stmt { ($stmt:stmt) => { stringify!($stmt) }; }
macro_rules! ty { ($ty:ty) => { stringify!($ty) }; }
macro_rules! vis { ($vis:vis) => { stringify!($vis) }; }
// Use this when AST pretty-printing and TokenStream pretty-printing give
// the same result (which is preferable.)
macro_rules! c1 {
($frag:ident, [$($tt:tt)*], $s:literal) => {
// Prior to #125174:
@ -66,6 +64,8 @@ fn test_block() {
} ],
"{ let _; true }"
);
// Attributes are not allowed on vanilla blocks.
}
#[test]
@ -332,6 +332,20 @@ fn test_expr() {
// ExprKind::FormatArgs: untestable because this test works pre-expansion.
// ExprKind::Err: untestable.
// Ones involving attributes.
c1!(expr, [ #[aa] 1 ], "#[aa] 1");
c1!(expr, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
c1!(expr, [ #[aa] 1 + 2 ], "#[aa] 1 + 2");
c1!(expr, [ #[aa] x + 2 ], "#[aa] x + 2");
c1!(expr, [ #[aa] 1 / #[bb] 2 ], "#[aa] 1 / #[bb] 2");
c1!(expr, [ #[aa] x / #[bb] 2 ], "#[aa] x / #[bb] 2");
c1!(expr, [ 1 << #[bb] 2 ], "1 << #[bb] 2");
c1!(expr, [ x << #[bb] 2 ], "x << #[bb] 2");
c1!(expr, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
c1!(expr, [ #[aa] #[bb] (x + 2) ], "#[aa] #[bb] (x + 2)");
c1!(expr, [ #[aa] x[0].p ], "#[aa] x[0].p");
c1!(expr, [ #[aa] { #![bb] 0 } ], "#[aa] { #![bb] 0 }");
}
#[test]
@ -484,6 +498,11 @@ fn test_item() {
"macro_rules! stringify { () => {}; }"
);
c1!(item, [ pub macro stringify() {} ], "pub macro stringify() {}");
// Ones involving attributes.
c1!(item, [ #[aa] mod m; ], "#[aa] mod m;");
c1!(item, [ mod m { #![bb] } ], "mod m { #![bb] }");
c1!(item, [ #[aa] mod m { #![bb] } ], "#[aa] mod m { #![bb] }");
}
#[test]
@ -492,6 +511,8 @@ fn test_meta() {
c1!(meta, [ k = "v" ], "k = \"v\"");
c1!(meta, [ list(k1, k2 = "v") ], "list(k1, k2 = \"v\")");
c1!(meta, [ serde::k ], "serde::k");
// Attributes are not allowed on metas.
}
#[test]
@ -580,6 +601,8 @@ fn test_pat() {
c1!(pat, [ mac!(...) ], "mac!(...)");
c1!(pat, [ mac![...] ], "mac![...]");
c1!(pat, [ mac! { ... } ], "mac! { ... }");
// Attributes are not allowed on patterns.
}
#[test]
@ -593,6 +616,8 @@ fn test_path() {
c1!(path, [ Self::<'static> ], "Self::<'static>");
c1!(path, [ Self() ], "Self()");
c1!(path, [ Self() -> () ], "Self() -> ()");
// Attributes are not allowed on paths.
}
#[test]
@ -622,6 +647,20 @@ fn test_stmt() {
c1!(stmt, [ mac!(...) ], "mac!(...)");
c1!(stmt, [ mac![...] ], "mac![...]");
c1!(stmt, [ mac! { ... } ], "mac! { ... }");
// Ones involving attributes.
c1!(stmt, [ #[aa] 1 ], "#[aa] 1");
c1!(stmt, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
c1!(stmt, [ #[aa] 1 as u32 ], "#[aa] 1 as u32");
c1!(stmt, [ #[aa] x as u32 ], "#[aa] x as u32");
c1!(stmt, [ #[aa] 1 .. #[bb] 2 ], "#[aa] 1 .. #[bb] 2");
c1!(stmt, [ #[aa] x .. #[bb] 2 ], "#[aa] x .. #[bb] 2");
c1!(stmt, [ 1 || #[bb] 2 ], "1 || #[bb] 2");
c1!(stmt, [ x || #[bb] 2 ], "x || #[bb] 2");
c1!(stmt, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
c1!(stmt, [ #[aa] #[bb] (x + 2) ], "#[aa] #[bb] (x + 2)");
c1!(stmt, [ #[aa] x[0].p ], "#[aa] x[0].p");
c1!(stmt, [ #[aa] { #![bb] 0 } ], "#[aa] { #![bb] 0 }");
}
#[test]
@ -708,6 +747,8 @@ fn test_ty() {
// TyKind::CVarArgs
// FIXME: todo
// Attributes are not allowed on types.
}
#[test]
@ -732,6 +773,8 @@ fn test_vis() {
macro_rules! inherited_vis { ($vis:vis struct) => { vis!($vis) }; }
assert_eq!(inherited_vis!(struct), "");
assert_eq!(stringify!(), "");
// Attributes are not allowed on visibilities.
}
macro_rules! p {