Change how parse_expr_force_collect works.

It now parses outer attributes before collecting tokens. This avoids the
problem where the outer attribute tokens were being stored twice -- for
the attribute tokesn, and also for the expression tokens.

Fixes #86055.
This commit is contained in:
Nicholas Nethercote 2024-06-19 17:33:46 +10:00
parent 8170acb197
commit 64c2e9ed3b
2 changed files with 6 additions and 28 deletions

View File

@ -98,9 +98,12 @@ pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
self.parse_expr_res(Restrictions::empty(), attrs)
}
/// Parses an expression, forcing tokens to be collected
/// Parses an expression, forcing tokens to be collected.
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
self.collect_tokens_no_attrs(|this| this.parse_expr())
self.current_closure.take();
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
}
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {

View File

@ -1,4 +1,4 @@
PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = #[allow(warnings)] #[allow(warnings)] 0; 0 }, }
PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = #[allow(warnings)] 0; 0 }, }
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "enum",
@ -39,31 +39,6 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Group {
delimiter: None,
stream: TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: #0 bytes(543..544),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: #0 bytes(545..550),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "warnings",
span: #0 bytes(551..559),
},
],
span: #0 bytes(550..560),
},
],
span: #0 bytes(544..561),
},
Punct {
ch: '#',
spacing: Alone,