From 9d31f86f0d1482b4e5084579238009cc5d98e49f Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 6 Aug 2024 17:16:40 +1000 Subject: [PATCH] Overhaul token collection. This commit does the following. - Renames `collect_tokens_trailing_token` as `collect_tokens`, because (a) it's annoying long, and (b) the `_trailing_token` bit is less accurate now that its types have changed. - In `collect_tokens`, adds a `Option` argument and a `UsePreAttrPos` in the return type of `f`. These are used in `parse_expr_force_collect` (for vanilla expressions) and in `parse_stmt_without_recovery` (for two different cases of expression statements). Together these ensure are enough to fix all the problems with token collection and assoc expressions. The changes to the `stringify.rs` test demonstrate some of these. - Adds a new test. The code in this test was causing an assertion failure prior to this commit, due to an invalid `NodeRange`. The extra complexity is annoying, but necessary to fix the existing problems. --- compiler/rustc_parse/src/parser/attr.rs | 18 +- .../rustc_parse/src/parser/attr_wrapper.rs | 95 ++++++++--- .../rustc_parse/src/parser/diagnostics.rs | 19 ++- compiler/rustc_parse/src/parser/expr.rs | 89 ++++++---- compiler/rustc_parse/src/parser/generics.rs | 160 +++++++++--------- compiler/rustc_parse/src/parser/item.rs | 146 ++++++++-------- compiler/rustc_parse/src/parser/mod.rs | 16 +- compiler/rustc_parse/src/parser/pat.rs | 33 ++-- compiler/rustc_parse/src/parser/path.rs | 2 +- compiler/rustc_parse/src/parser/stmt.rs | 62 +++++-- tests/ui/attributes/assoc-expr.rs | 42 +++++ tests/ui/macros/stringify.rs | 24 +-- 12 files changed, 414 insertions(+), 292 deletions(-) create mode 100644 tests/ui/attributes/assoc-expr.rs diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 1ae26309e44..3d06017fcf3 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -10,6 +10,7 @@ use super::{ AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing, + UsePreAttrPos, }; use crate::{errors, fluent_generated as fluent, maybe_whole}; @@ -259,7 +260,8 @@ pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerA pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> { maybe_whole!(self, NtMeta, |attr| attr.into_inner()); - let do_parse = |this: &mut Self, _empty_attrs| { + // Attr items don't have attributes. + self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| { let is_unsafe = this.eat_keyword(kw::Unsafe); let unsafety = if is_unsafe { let unsafe_span = this.prev_token.span; @@ -275,10 +277,12 @@ pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, as if is_unsafe { this.expect(&token::CloseDelim(Delimiter::Parenthesis))?; } - Ok((ast::AttrItem { unsafety, path, args, tokens: None }, Trailing::No)) - }; - // Attr items don't have attributes. - self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse) + Ok(( + ast::AttrItem { unsafety, path, args, tokens: None }, + Trailing::No, + UsePreAttrPos::No, + )) + }) } /// Parses attributes that appear after the opening of an item. These should @@ -311,8 +315,8 @@ pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> { }; if let Some(attr) = attr { // If we are currently capturing tokens (i.e. we are within a call to - // `Parser::collect_tokens_trailing_tokens`) record the token positions of this - // inner attribute, for possible later processing in a `LazyAttrTokenStream`. + // `Parser::collect_tokens`) record the token positions of this inner attribute, + // for possible later processing in a `LazyAttrTokenStream`. if let Capturing::Yes = self.capture_state.capturing { let end_pos = self.num_bump_calls; let parser_range = ParserRange(start_pos..end_pos); diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 65722f2b7e3..49df2811d52 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -15,6 +15,20 @@ TokenCursor, Trailing, }; +// When collecting tokens, this fully captures the start point. Usually its +// just after outer attributes, but occasionally it's before. +#[derive(Clone, Debug)] +pub(super) struct CollectPos { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + start_pos: u32, +} + +pub(super) enum UsePreAttrPos { + No, + Yes, +} + /// A wrapper type to ensure that the parser handles outer attributes correctly. /// When we parse outer attributes, we need to ensure that we capture tokens /// for the attribute target. This allows us to perform cfg-expansion on @@ -22,7 +36,7 @@ /// /// This wrapper prevents direct access to the underlying `ast::AttrVec`. /// Parsing code can only get access to the underlying attributes -/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`. +/// by passing an `AttrWrapper` to `collect_tokens`. /// This makes it difficult to accidentally construct an AST node /// (which stores an `ast::AttrVec`) without first collecting tokens. /// @@ -33,16 +47,18 @@ pub(super) struct AttrWrapper { attrs: AttrVec, // The start of the outer attributes in the parser's token stream. // This lets us create a `NodeReplacement` for the entire attribute - // target, including outer attributes. - start_pos: u32, + // target, including outer attributes. `None` if there are no outer + // attributes. + start_pos: Option, } impl AttrWrapper { pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper { - AttrWrapper { attrs, start_pos } + AttrWrapper { attrs, start_pos: Some(start_pos) } } + pub(super) fn empty() -> AttrWrapper { - AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX } + AttrWrapper { attrs: AttrVec::new(), start_pos: None } } pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec { @@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool { } // From a value of this type we can reconstruct the `TokenStream` seen by the -// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by +// `f` callback passed to a call to `Parser::collect_tokens`, by // replaying the getting of the tokens. This saves us producing a `TokenStream` // if it is never needed, e.g. a captured `macro_rules!` argument that is never // passed to a proc macro. In practice, token stream creation happens rarely @@ -166,16 +182,30 @@ fn to_attr_token_stream(&self) -> AttrTokenStream { } impl<'a> Parser<'a> { + pub(super) fn collect_pos(&self) -> CollectPos { + CollectPos { + start_token: (self.token.clone(), self.token_spacing), + cursor_snapshot: self.token_cursor.clone(), + start_pos: self.num_bump_calls, + } + } + /// Parses code with `f`. If appropriate, it records the tokens (in /// `LazyAttrTokenStream` form) that were parsed in the result, accessible /// via the `HasTokens` trait. The `Trailing` part of the callback's /// result indicates if an extra token should be captured, e.g. a comma or - /// semicolon. + /// semicolon. The `UsePreAttrPos` part of the callback's result indicates + /// if we should use `pre_attr_pos` as the collection start position (only + /// required in a few cases). /// /// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The /// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for /// details. /// + /// `pre_attr_pos` is the position before the outer attributes (or the node + /// itself, if no outer attributes are present). It is only needed if `f` + /// can return `UsePreAttrPos::Yes`. + /// /// Note: If your callback consumes an opening delimiter (including the /// case where `self.token` is an opening delimiter on entry to this /// function), you must also consume the corresponding closing delimiter. @@ -197,11 +227,12 @@ impl<'a> Parser<'a> { /// } // 32..33 /// } // 33..34 /// ``` - pub(super) fn collect_tokens_trailing_token( + pub(super) fn collect_tokens( &mut self, + pre_attr_pos: Option, attrs: AttrWrapper, force_collect: ForceCollect, - f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, Trailing)>, + f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>, ) -> PResult<'a, R> { // We must collect if anything could observe the collected tokens, i.e. // if any of the following conditions hold. @@ -220,23 +251,20 @@ pub(super) fn collect_tokens_trailing_token( return Ok(f(self, attrs.attrs)?.0); } - let start_token = (self.token.clone(), self.token_spacing); - let cursor_snapshot = self.token_cursor.clone(); - let start_pos = self.num_bump_calls; + let mut collect_pos = self.collect_pos(); let has_outer_attrs = !attrs.attrs.is_empty(); let parser_replacements_start = self.capture_state.parser_replacements.len(); // We set and restore `Capturing::Yes` on either side of the call to - // `f`, so we can distinguish the outermost call to - // `collect_tokens_trailing_token` (e.g. parsing `m` in the example - // above) from any inner (indirectly recursive) calls (e.g. parsing `g` - // in the example above). This distinction is used below and in - // `Parser::parse_inner_attributes`. - let (mut ret, capture_trailing) = { + // `f`, so we can distinguish the outermost call to `collect_tokens` + // (e.g. parsing `m` in the example above) from any inner (indirectly + // recursive) calls (e.g. parsing `g` in the example above). This + // distinction is used below and in `Parser::parse_inner_attributes`. + let (mut ret, capture_trailing, use_pre_attr_pos) = { let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes); - let f_res = f(self, attrs.attrs); + let res = f(self, attrs.attrs); self.capture_state.capturing = prev_capturing; - f_res? + res? }; // When we're not in `capture_cfg` mode, then skip collecting and @@ -279,6 +307,14 @@ pub(super) fn collect_tokens_trailing_token( return Ok(ret); } + // Replace the post-attribute collection start position with the + // pre-attribute position supplied, if `f` indicated it is necessary. + // (The caller is responsible for providing a non-`None` `pre_attr_pos` + // if this is a possibility.) + if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) { + collect_pos = pre_attr_pos.unwrap(); + } + let parser_replacements_end = self.capture_state.parser_replacements.len(); assert!( @@ -294,7 +330,7 @@ pub(super) fn collect_tokens_trailing_token( // `AttrTokenStream`, we will create the proper token. + self.break_last_token as u32; - let num_calls = end_pos - start_pos; + let num_calls = end_pos - collect_pos.start_pos; // Take the captured `ParserRange`s for any inner attributes that we parsed in // `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`, @@ -328,7 +364,9 @@ pub(super) fn collect_tokens_trailing_token( .iter() .cloned() .chain(inner_attr_parser_replacements.iter().cloned()) - .map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data)) + .map(|(parser_range, data)| { + (NodeRange::new(parser_range, collect_pos.start_pos), data) + }) .collect() }; @@ -355,9 +393,9 @@ pub(super) fn collect_tokens_trailing_token( // - `tokens`: lazy tokens for `g` (with its inner attr deleted). let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl { - start_token, + start_token: collect_pos.start_token, + cursor_snapshot: collect_pos.cursor_snapshot, num_calls, - cursor_snapshot, break_last_token: self.break_last_token, node_replacements, }); @@ -368,9 +406,9 @@ pub(super) fn collect_tokens_trailing_token( } // If `capture_cfg` is set and we're inside a recursive call to - // `collect_tokens_trailing_token`, then we need to register a replace range - // if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion - // on the captured token stream. + // `collect_tokens`, then we need to register a replace range if we + // have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager + // cfg-expansion on the captured token stream. if self.capture_cfg && matches!(self.capture_state.capturing, Capturing::Yes) && has_cfg_or_cfg_attr(ret.attrs()) @@ -389,7 +427,8 @@ pub(super) fn collect_tokens_trailing_token( // Set things up so that the entire AST node that we just parsed, including attributes, // will be replaced with `target` in the lazy token stream. This will allow us to // cfg-expand this AST node. - let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos }; + let start_pos = + if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos }; let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens }; self.capture_state .parser_replacements diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ba4e222a41a..ef1387c50fa 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -2487,13 +2487,14 @@ pub(super) fn handle_ambiguous_unbraced_const_arg( pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P> { let start = self.token.span; let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| { - err.span_label( - start.shrink_to_lo(), - "while parsing a const generic argument starting here", - ); - err - })?; + let (expr, _) = + self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| { + err.span_label( + start.shrink_to_lo(), + "while parsing a const generic argument starting here", + ); + err + })?; if !self.expr_is_valid_const_arg(&expr) { self.dcx().emit_err(ConstGenericWithoutBraces { span: expr.span, @@ -2613,7 +2614,7 @@ pub(super) fn recover_const_arg( let attrs = self.parse_outer_attributes()?; self.parse_expr_res(Restrictions::CONST_EXPR, attrs) })() { - Ok(expr) => { + Ok((expr, _)) => { // Find a mistake like `MyTrait`. if snapshot.token == token::EqEq { err.span_suggestion( @@ -2671,7 +2672,7 @@ pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty( })() { // Since we don't know the exact reason why we failed to parse the type or the // expression, employ a simple heuristic to weed out some pathological cases. - Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => { + Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => { self.restore_snapshot(snapshot); Some(expr) } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index e284c7a11bc..e0917ba43e4 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -36,7 +36,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, - SemiColonMode, SeqSep, TokenType, Trailing, + SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos, }; use crate::{errors, maybe_recover_from_interpolated_ty_qpath}; @@ -59,15 +59,30 @@ pub fn parse_expr(&mut self) -> PResult<'a, P> { self.current_closure.take(); let attrs = self.parse_outer_attributes()?; - self.parse_expr_res(Restrictions::empty(), attrs) + self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0) } /// Parses an expression, forcing tokens to be collected. pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P> { self.current_closure.take(); + // If the expression is associative (e.g. `1 + 2`), then any preceding + // outer attribute actually belongs to the first inner sub-expression. + // In which case we must use the pre-attr pos to include the attribute + // in the collected tokens for the outer expression. + let pre_attr_pos = self.collect_pos(); let attrs = self.parse_outer_attributes()?; - self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs)) + self.collect_tokens( + Some(pre_attr_pos), + AttrWrapper::empty(), + ForceCollect::Yes, + |this, _empty_attrs| { + let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?; + let use_pre_attr_pos = + if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No }; + Ok((expr, Trailing::No, use_pre_attr_pos)) + }, + ) } pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> { @@ -77,7 +92,7 @@ pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> { fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P> { let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { - Ok(expr) => Ok(expr), + Ok((expr, _)) => Ok(expr), Err(err) => match self.token.ident() { Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => @@ -104,18 +119,20 @@ pub(super) fn parse_expr_res( &mut self, r: Restrictions, attrs: AttrWrapper, - ) -> PResult<'a, P> { + ) -> PResult<'a, (P, bool)> { self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs)) } /// Parses an associative expression with operators of at least `min_prec` precedence. + /// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator + /// followed by a subexpression (e.g. `1 + 2`). pub(super) fn parse_expr_assoc_with( &mut self, min_prec: usize, attrs: AttrWrapper, - ) -> PResult<'a, P> { + ) -> PResult<'a, (P, bool)> { let lhs = if self.token.is_range_separator() { - return self.parse_expr_prefix_range(attrs); + return self.parse_expr_prefix_range(attrs).map(|res| (res, false)); } else { self.parse_expr_prefix(attrs)? }; @@ -123,15 +140,17 @@ pub(super) fn parse_expr_assoc_with( } /// Parses the rest of an associative expression (i.e. the part after the lhs) with operators - /// of at least `min_prec` precedence. + /// of at least `min_prec` precedence. The `bool` in the return value indicates if something + /// was actually parsed. pub(super) fn parse_expr_assoc_rest_with( &mut self, min_prec: usize, starts_stmt: bool, mut lhs: P, - ) -> PResult<'a, P> { + ) -> PResult<'a, (P, bool)> { + let mut parsed_something = false; if !self.should_continue_as_assoc_expr(&lhs) { - return Ok(lhs); + return Ok((lhs, parsed_something)); } self.expected_tokens.push(TokenType::Operator); @@ -156,10 +175,11 @@ pub(super) fn parse_expr_assoc_rest_with( self.err_larrow_operator(self.token.span); } + parsed_something = true; self.bump(); if op.node.is_comparison() { if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { - return Ok(expr); + return Ok((expr, parsed_something)); } } @@ -263,7 +283,7 @@ pub(super) fn parse_expr_assoc_rest_with( // the special cases. The code is here only for future convenience. Fixity::None => 1, }; - let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { + let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { let attrs = this.parse_outer_attributes()?; this.parse_expr_assoc_with(prec + prec_adjustment, attrs) })?; @@ -319,7 +339,7 @@ pub(super) fn parse_expr_assoc_rest_with( } } - Ok(lhs) + Ok((lhs, parsed_something)) } fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool { @@ -441,7 +461,8 @@ fn parse_expr_range( let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(prec + 1, attrs) - .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?, + .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))? + .0, ) } else { None @@ -498,7 +519,7 @@ fn parse_expr_prefix_range(&mut self, attrs: AttrWrapper) -> PResult<'a, P // RHS must be parsed with more associativity than the dots. let attrs = this.parse_outer_attributes()?; this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs) - .map(|x| (lo.to(x.span), Some(x))) + .map(|(x, _)| (lo.to(x.span), Some(x))) .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))? } else { (lo, None) @@ -2335,7 +2356,7 @@ fn parse_expr_closure(&mut self) -> PResult<'a, P> { let token = self.token.clone(); let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { - Ok(expr) => expr, + Ok((expr, _)) => expr, Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?, } } @@ -2445,7 +2466,7 @@ fn parse_fn_block_decl(&mut self) -> PResult<'a, (P, Span)> { fn parse_fn_block_param(&mut self) -> PResult<'a, Param> { let lo = self.token.span; let attrs = self.parse_outer_attributes()?; - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?; let ty = if this.eat(&token::Colon) { this.parse_ty()? @@ -2463,6 +2484,7 @@ fn parse_fn_block_param(&mut self) -> PResult<'a, Param> { is_placeholder: false, }, Trailing::from(this.token == token::Comma), + UsePreAttrPos::No, )) }) } @@ -2583,7 +2605,7 @@ fn parse_if_after_cond(&mut self, lo: Span, mut cond: P) -> PResult<'a, P< /// Parses the condition of a `if` or `while` expression. fn parse_expr_cond(&mut self) -> PResult<'a, P> { let attrs = self.parse_outer_attributes()?; - let mut cond = + let (mut cond, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?; CondChecker::new(self).visit_expr(&mut cond); @@ -2632,7 +2654,7 @@ fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P> self.expect(&token::Eq)?; } let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?; + let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?; let span = lo.to(expr.span); Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered))) } @@ -2766,7 +2788,7 @@ fn parse_for_head(&mut self) -> PResult<'a, (P, P)> { // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would // happen right before the return of this method. let attrs = self.parse_outer_attributes()?; - let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) { + let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) { Ok(expr) => expr, Err(expr_err) => { // We don't know what followed the `in`, so cancel and bubble up the @@ -2801,7 +2823,7 @@ fn parse_for_head(&mut self) -> PResult<'a, (P, P)> { } self.check_for_for_in_in_typo(self.prev_token.span); let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?; + let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?; Ok((pat, expr)) } @@ -2921,7 +2943,7 @@ pub(crate) fn eat_label(&mut self) -> Option