Overhaul token collection.
This commit does the following. - Renames `collect_tokens_trailing_token` as `collect_tokens`, because (a) it's annoying long, and (b) the `_trailing_token` bit is less accurate now that its types have changed. - In `collect_tokens`, adds a `Option<CollectPos>` argument and a `UsePreAttrPos` in the return type of `f`. These are used in `parse_expr_force_collect` (for vanilla expressions) and in `parse_stmt_without_recovery` (for two different cases of expression statements). Together these ensure are enough to fix all the problems with token collection and assoc expressions. The changes to the `stringify.rs` test demonstrate some of these. - Adds a new test. The code in this test was causing an assertion failure prior to this commit, due to an invalid `NodeRange`. The extra complexity is annoying, but necessary to fix the existing problems.
This commit is contained in:
parent
fe460ac28b
commit
9d31f86f0d
@ -10,6 +10,7 @@
|
||||
|
||||
use super::{
|
||||
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
|
||||
UsePreAttrPos,
|
||||
};
|
||||
use crate::{errors, fluent_generated as fluent, maybe_whole};
|
||||
|
||||
@ -259,7 +260,8 @@ pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerA
|
||||
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
|
||||
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
|
||||
|
||||
let do_parse = |this: &mut Self, _empty_attrs| {
|
||||
// Attr items don't have attributes.
|
||||
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
|
||||
let is_unsafe = this.eat_keyword(kw::Unsafe);
|
||||
let unsafety = if is_unsafe {
|
||||
let unsafe_span = this.prev_token.span;
|
||||
@ -275,10 +277,12 @@ pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, as
|
||||
if is_unsafe {
|
||||
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
}
|
||||
Ok((ast::AttrItem { unsafety, path, args, tokens: None }, Trailing::No))
|
||||
};
|
||||
// Attr items don't have attributes.
|
||||
self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse)
|
||||
Ok((
|
||||
ast::AttrItem { unsafety, path, args, tokens: None },
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses attributes that appear after the opening of an item. These should
|
||||
@ -311,8 +315,8 @@ pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
|
||||
};
|
||||
if let Some(attr) = attr {
|
||||
// If we are currently capturing tokens (i.e. we are within a call to
|
||||
// `Parser::collect_tokens_trailing_tokens`) record the token positions of this
|
||||
// inner attribute, for possible later processing in a `LazyAttrTokenStream`.
|
||||
// `Parser::collect_tokens`) record the token positions of this inner attribute,
|
||||
// for possible later processing in a `LazyAttrTokenStream`.
|
||||
if let Capturing::Yes = self.capture_state.capturing {
|
||||
let end_pos = self.num_bump_calls;
|
||||
let parser_range = ParserRange(start_pos..end_pos);
|
||||
|
@ -15,6 +15,20 @@
|
||||
TokenCursor, Trailing,
|
||||
};
|
||||
|
||||
// When collecting tokens, this fully captures the start point. Usually its
|
||||
// just after outer attributes, but occasionally it's before.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct CollectPos {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
start_pos: u32,
|
||||
}
|
||||
|
||||
pub(super) enum UsePreAttrPos {
|
||||
No,
|
||||
Yes,
|
||||
}
|
||||
|
||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||
/// for the attribute target. This allows us to perform cfg-expansion on
|
||||
@ -22,7 +36,7 @@
|
||||
///
|
||||
/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
|
||||
/// Parsing code can only get access to the underlying attributes
|
||||
/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`.
|
||||
/// by passing an `AttrWrapper` to `collect_tokens`.
|
||||
/// This makes it difficult to accidentally construct an AST node
|
||||
/// (which stores an `ast::AttrVec`) without first collecting tokens.
|
||||
///
|
||||
@ -33,16 +47,18 @@ pub(super) struct AttrWrapper {
|
||||
attrs: AttrVec,
|
||||
// The start of the outer attributes in the parser's token stream.
|
||||
// This lets us create a `NodeReplacement` for the entire attribute
|
||||
// target, including outer attributes.
|
||||
start_pos: u32,
|
||||
// target, including outer attributes. `None` if there are no outer
|
||||
// attributes.
|
||||
start_pos: Option<u32>,
|
||||
}
|
||||
|
||||
impl AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||
AttrWrapper { attrs, start_pos }
|
||||
AttrWrapper { attrs, start_pos: Some(start_pos) }
|
||||
}
|
||||
|
||||
pub(super) fn empty() -> AttrWrapper {
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: None }
|
||||
}
|
||||
|
||||
pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||
@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||
}
|
||||
|
||||
// From a value of this type we can reconstruct the `TokenStream` seen by the
|
||||
// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by
|
||||
// `f` callback passed to a call to `Parser::collect_tokens`, by
|
||||
// replaying the getting of the tokens. This saves us producing a `TokenStream`
|
||||
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
|
||||
// passed to a proc macro. In practice, token stream creation happens rarely
|
||||
@ -166,16 +182,30 @@ fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub(super) fn collect_pos(&self) -> CollectPos {
|
||||
CollectPos {
|
||||
start_token: (self.token.clone(), self.token_spacing),
|
||||
cursor_snapshot: self.token_cursor.clone(),
|
||||
start_pos: self.num_bump_calls,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses code with `f`. If appropriate, it records the tokens (in
|
||||
/// `LazyAttrTokenStream` form) that were parsed in the result, accessible
|
||||
/// via the `HasTokens` trait. The `Trailing` part of the callback's
|
||||
/// result indicates if an extra token should be captured, e.g. a comma or
|
||||
/// semicolon.
|
||||
/// semicolon. The `UsePreAttrPos` part of the callback's result indicates
|
||||
/// if we should use `pre_attr_pos` as the collection start position (only
|
||||
/// required in a few cases).
|
||||
///
|
||||
/// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
|
||||
/// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
|
||||
/// details.
|
||||
///
|
||||
/// `pre_attr_pos` is the position before the outer attributes (or the node
|
||||
/// itself, if no outer attributes are present). It is only needed if `f`
|
||||
/// can return `UsePreAttrPos::Yes`.
|
||||
///
|
||||
/// Note: If your callback consumes an opening delimiter (including the
|
||||
/// case where `self.token` is an opening delimiter on entry to this
|
||||
/// function), you must also consume the corresponding closing delimiter.
|
||||
@ -197,11 +227,12 @@ impl<'a> Parser<'a> {
|
||||
/// } // 32..33
|
||||
/// } // 33..34
|
||||
/// ```
|
||||
pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
|
||||
&mut self,
|
||||
pre_attr_pos: Option<CollectPos>,
|
||||
attrs: AttrWrapper,
|
||||
force_collect: ForceCollect,
|
||||
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, Trailing)>,
|
||||
f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
|
||||
) -> PResult<'a, R> {
|
||||
// We must collect if anything could observe the collected tokens, i.e.
|
||||
// if any of the following conditions hold.
|
||||
@ -220,23 +251,20 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
return Ok(f(self, attrs.attrs)?.0);
|
||||
}
|
||||
|
||||
let start_token = (self.token.clone(), self.token_spacing);
|
||||
let cursor_snapshot = self.token_cursor.clone();
|
||||
let start_pos = self.num_bump_calls;
|
||||
let mut collect_pos = self.collect_pos();
|
||||
let has_outer_attrs = !attrs.attrs.is_empty();
|
||||
let parser_replacements_start = self.capture_state.parser_replacements.len();
|
||||
|
||||
// We set and restore `Capturing::Yes` on either side of the call to
|
||||
// `f`, so we can distinguish the outermost call to
|
||||
// `collect_tokens_trailing_token` (e.g. parsing `m` in the example
|
||||
// above) from any inner (indirectly recursive) calls (e.g. parsing `g`
|
||||
// in the example above). This distinction is used below and in
|
||||
// `Parser::parse_inner_attributes`.
|
||||
let (mut ret, capture_trailing) = {
|
||||
// `f`, so we can distinguish the outermost call to `collect_tokens`
|
||||
// (e.g. parsing `m` in the example above) from any inner (indirectly
|
||||
// recursive) calls (e.g. parsing `g` in the example above). This
|
||||
// distinction is used below and in `Parser::parse_inner_attributes`.
|
||||
let (mut ret, capture_trailing, use_pre_attr_pos) = {
|
||||
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||
let f_res = f(self, attrs.attrs);
|
||||
let res = f(self, attrs.attrs);
|
||||
self.capture_state.capturing = prev_capturing;
|
||||
f_res?
|
||||
res?
|
||||
};
|
||||
|
||||
// When we're not in `capture_cfg` mode, then skip collecting and
|
||||
@ -279,6 +307,14 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
return Ok(ret);
|
||||
}
|
||||
|
||||
// Replace the post-attribute collection start position with the
|
||||
// pre-attribute position supplied, if `f` indicated it is necessary.
|
||||
// (The caller is responsible for providing a non-`None` `pre_attr_pos`
|
||||
// if this is a possibility.)
|
||||
if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
|
||||
collect_pos = pre_attr_pos.unwrap();
|
||||
}
|
||||
|
||||
let parser_replacements_end = self.capture_state.parser_replacements.len();
|
||||
|
||||
assert!(
|
||||
@ -294,7 +330,7 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
// `AttrTokenStream`, we will create the proper token.
|
||||
+ self.break_last_token as u32;
|
||||
|
||||
let num_calls = end_pos - start_pos;
|
||||
let num_calls = end_pos - collect_pos.start_pos;
|
||||
|
||||
// Take the captured `ParserRange`s for any inner attributes that we parsed in
|
||||
// `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
|
||||
@ -328,7 +364,9 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(inner_attr_parser_replacements.iter().cloned())
|
||||
.map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data))
|
||||
.map(|(parser_range, data)| {
|
||||
(NodeRange::new(parser_range, collect_pos.start_pos), data)
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
@ -355,9 +393,9 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
|
||||
|
||||
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
|
||||
start_token,
|
||||
start_token: collect_pos.start_token,
|
||||
cursor_snapshot: collect_pos.cursor_snapshot,
|
||||
num_calls,
|
||||
cursor_snapshot,
|
||||
break_last_token: self.break_last_token,
|
||||
node_replacements,
|
||||
});
|
||||
@ -368,9 +406,9 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
}
|
||||
|
||||
// If `capture_cfg` is set and we're inside a recursive call to
|
||||
// `collect_tokens_trailing_token`, then we need to register a replace range
|
||||
// if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
|
||||
// on the captured token stream.
|
||||
// `collect_tokens`, then we need to register a replace range if we
|
||||
// have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager
|
||||
// cfg-expansion on the captured token stream.
|
||||
if self.capture_cfg
|
||||
&& matches!(self.capture_state.capturing, Capturing::Yes)
|
||||
&& has_cfg_or_cfg_attr(ret.attrs())
|
||||
@ -389,7 +427,8 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
// Set things up so that the entire AST node that we just parsed, including attributes,
|
||||
// will be replaced with `target` in the lazy token stream. This will allow us to
|
||||
// cfg-expand this AST node.
|
||||
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
||||
let start_pos =
|
||||
if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
|
||||
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
|
||||
self.capture_state
|
||||
.parser_replacements
|
||||
|
@ -2487,7 +2487,8 @@ pub(super) fn handle_ambiguous_unbraced_const_arg(
|
||||
pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let start = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
|
||||
let (expr, _) =
|
||||
self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
|
||||
err.span_label(
|
||||
start.shrink_to_lo(),
|
||||
"while parsing a const generic argument starting here",
|
||||
@ -2613,7 +2614,7 @@ pub(super) fn recover_const_arg(
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
|
||||
})() {
|
||||
Ok(expr) => {
|
||||
Ok((expr, _)) => {
|
||||
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
|
||||
if snapshot.token == token::EqEq {
|
||||
err.span_suggestion(
|
||||
@ -2671,7 +2672,7 @@ pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty(
|
||||
})() {
|
||||
// Since we don't know the exact reason why we failed to parse the type or the
|
||||
// expression, employ a simple heuristic to weed out some pathological cases.
|
||||
Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
|
||||
Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => {
|
||||
self.restore_snapshot(snapshot);
|
||||
Some(expr)
|
||||
}
|
||||
|
@ -36,7 +36,7 @@
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{
|
||||
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
||||
SemiColonMode, SeqSep, TokenType, Trailing,
|
||||
SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
|
||||
};
|
||||
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
|
||||
|
||||
@ -59,15 +59,30 @@ pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
|
||||
self.current_closure.take();
|
||||
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.parse_expr_res(Restrictions::empty(), attrs)
|
||||
self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0)
|
||||
}
|
||||
|
||||
/// Parses an expression, forcing tokens to be collected.
|
||||
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
|
||||
self.current_closure.take();
|
||||
|
||||
// If the expression is associative (e.g. `1 + 2`), then any preceding
|
||||
// outer attribute actually belongs to the first inner sub-expression.
|
||||
// In which case we must use the pre-attr pos to include the attribute
|
||||
// in the collected tokens for the outer expression.
|
||||
let pre_attr_pos = self.collect_pos();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
|
||||
self.collect_tokens(
|
||||
Some(pre_attr_pos),
|
||||
AttrWrapper::empty(),
|
||||
ForceCollect::Yes,
|
||||
|this, _empty_attrs| {
|
||||
let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?;
|
||||
let use_pre_attr_pos =
|
||||
if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No };
|
||||
Ok((expr, Trailing::No, use_pre_attr_pos))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
|
||||
@ -77,7 +92,7 @@ pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
|
||||
fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
match self.parse_expr_res(restrictions, attrs) {
|
||||
Ok(expr) => Ok(expr),
|
||||
Ok((expr, _)) => Ok(expr),
|
||||
Err(err) => match self.token.ident() {
|
||||
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
|
||||
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
|
||||
@ -104,18 +119,20 @@ pub(super) fn parse_expr_res(
|
||||
&mut self,
|
||||
r: Restrictions,
|
||||
attrs: AttrWrapper,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
) -> PResult<'a, (P<Expr>, bool)> {
|
||||
self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
|
||||
}
|
||||
|
||||
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
||||
/// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator
|
||||
/// followed by a subexpression (e.g. `1 + 2`).
|
||||
pub(super) fn parse_expr_assoc_with(
|
||||
&mut self,
|
||||
min_prec: usize,
|
||||
attrs: AttrWrapper,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
) -> PResult<'a, (P<Expr>, bool)> {
|
||||
let lhs = if self.token.is_range_separator() {
|
||||
return self.parse_expr_prefix_range(attrs);
|
||||
return self.parse_expr_prefix_range(attrs).map(|res| (res, false));
|
||||
} else {
|
||||
self.parse_expr_prefix(attrs)?
|
||||
};
|
||||
@ -123,15 +140,17 @@ pub(super) fn parse_expr_assoc_with(
|
||||
}
|
||||
|
||||
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
|
||||
/// of at least `min_prec` precedence.
|
||||
/// of at least `min_prec` precedence. The `bool` in the return value indicates if something
|
||||
/// was actually parsed.
|
||||
pub(super) fn parse_expr_assoc_rest_with(
|
||||
&mut self,
|
||||
min_prec: usize,
|
||||
starts_stmt: bool,
|
||||
mut lhs: P<Expr>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
) -> PResult<'a, (P<Expr>, bool)> {
|
||||
let mut parsed_something = false;
|
||||
if !self.should_continue_as_assoc_expr(&lhs) {
|
||||
return Ok(lhs);
|
||||
return Ok((lhs, parsed_something));
|
||||
}
|
||||
|
||||
self.expected_tokens.push(TokenType::Operator);
|
||||
@ -156,10 +175,11 @@ pub(super) fn parse_expr_assoc_rest_with(
|
||||
self.err_larrow_operator(self.token.span);
|
||||
}
|
||||
|
||||
parsed_something = true;
|
||||
self.bump();
|
||||
if op.node.is_comparison() {
|
||||
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
|
||||
return Ok(expr);
|
||||
return Ok((expr, parsed_something));
|
||||
}
|
||||
}
|
||||
|
||||
@ -263,7 +283,7 @@ pub(super) fn parse_expr_assoc_rest_with(
|
||||
// the special cases. The code is here only for future convenience.
|
||||
Fixity::None => 1,
|
||||
};
|
||||
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
||||
let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
||||
let attrs = this.parse_outer_attributes()?;
|
||||
this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
|
||||
})?;
|
||||
@ -319,7 +339,7 @@ pub(super) fn parse_expr_assoc_rest_with(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
Ok((lhs, parsed_something))
|
||||
}
|
||||
|
||||
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
|
||||
@ -441,7 +461,8 @@ fn parse_expr_range(
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
Some(
|
||||
self.parse_expr_assoc_with(prec + 1, attrs)
|
||||
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
|
||||
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
||||
.0,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@ -498,7 +519,7 @@ fn parse_expr_prefix_range(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>
|
||||
// RHS must be parsed with more associativity than the dots.
|
||||
let attrs = this.parse_outer_attributes()?;
|
||||
this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
|
||||
.map(|x| (lo.to(x.span), Some(x)))
|
||||
.map(|(x, _)| (lo.to(x.span), Some(x)))
|
||||
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
||||
} else {
|
||||
(lo, None)
|
||||
@ -2335,7 +2356,7 @@ fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let token = self.token.clone();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
match self.parse_expr_res(restrictions, attrs) {
|
||||
Ok(expr) => expr,
|
||||
Ok((expr, _)) => expr,
|
||||
Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
|
||||
}
|
||||
}
|
||||
@ -2445,7 +2466,7 @@ fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
|
||||
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
|
||||
let ty = if this.eat(&token::Colon) {
|
||||
this.parse_ty()?
|
||||
@ -2463,6 +2484,7 @@ fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||
is_placeholder: false,
|
||||
},
|
||||
Trailing::from(this.token == token::Comma),
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -2583,7 +2605,7 @@ fn parse_if_after_cond(&mut self, lo: Span, mut cond: P<Expr>) -> PResult<'a, P<
|
||||
/// Parses the condition of a `if` or `while` expression.
|
||||
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let mut cond =
|
||||
let (mut cond, _) =
|
||||
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
|
||||
|
||||
CondChecker::new(self).visit_expr(&mut cond);
|
||||
@ -2632,7 +2654,7 @@ fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>>
|
||||
self.expect(&token::Eq)?;
|
||||
}
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
|
||||
let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
|
||||
let span = lo.to(expr.span);
|
||||
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
|
||||
}
|
||||
@ -2766,7 +2788,7 @@ fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
|
||||
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
|
||||
// happen right before the return of this method.
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
|
||||
let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
|
||||
Ok(expr) => expr,
|
||||
Err(expr_err) => {
|
||||
// We don't know what followed the `in`, so cancel and bubble up the
|
||||
@ -2801,7 +2823,7 @@ fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
|
||||
}
|
||||
self.check_for_for_in_in_typo(self.prev_token.span);
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
Ok((pat, expr))
|
||||
}
|
||||
|
||||
@ -2921,7 +2943,7 @@ pub(crate) fn eat_label(&mut self) -> Option<Label> {
|
||||
fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let match_span = self.prev_token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
|
||||
self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
|
||||
}
|
||||
@ -3069,7 +3091,7 @@ fn parse_arm_body_missing_braces(
|
||||
|
||||
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
|
||||
|
||||
@ -3126,7 +3148,7 @@ pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
let arm_start_span = this.token.span;
|
||||
|
||||
let attrs = this.parse_outer_attributes()?;
|
||||
let expr =
|
||||
let (expr, _) =
|
||||
this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
|
||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
||||
err
|
||||
@ -3244,6 +3266,7 @@ pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
is_placeholder: false,
|
||||
},
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -3334,8 +3357,9 @@ fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Exp
|
||||
|
||||
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err(
|
||||
|mut err| {
|
||||
match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
|
||||
Ok((expr, _)) => Ok(expr),
|
||||
Err(mut err) => {
|
||||
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
|
||||
let sugg_sp = self.prev_token.span.shrink_to_lo();
|
||||
// Consume everything within the braces, let's avoid further parse
|
||||
@ -3355,9 +3379,9 @@ fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
||||
err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
|
||||
}
|
||||
}
|
||||
err
|
||||
},
|
||||
)
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_builtin(&self) -> bool {
|
||||
@ -3708,7 +3732,7 @@ fn recover_ident_into_label(&mut self, ident: Ident) -> Label {
|
||||
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.recover_vcs_conflict_marker();
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
|
||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||
@ -3753,6 +3777,7 @@ fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
||||
is_placeholder: false,
|
||||
},
|
||||
Trailing::from(this.token == token::Comma),
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -3846,7 +3871,7 @@ fn collect_tokens_for_expr(
|
||||
attrs: AttrWrapper,
|
||||
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let res = f(this, attrs)?;
|
||||
let trailing = Trailing::from(
|
||||
this.restrictions.contains(Restrictions::STMT_EXPR)
|
||||
@ -3856,7 +3881,7 @@ fn collect_tokens_for_expr(
|
||||
// `#[attr] expr,` always captures a trailing comma.
|
||||
|| this.token == token::Comma,
|
||||
);
|
||||
Ok((res, trailing))
|
||||
Ok((res, trailing, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
use rustc_span::Span;
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
use super::{ForceCollect, Parser, Trailing};
|
||||
use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
|
||||
use crate::errors::{
|
||||
self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
|
||||
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
|
||||
@ -169,14 +169,12 @@ pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::Generi
|
||||
let mut done = false;
|
||||
while !done {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let param =
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
if this.eat_keyword_noexpect(kw::SelfUpper) {
|
||||
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
|
||||
// as if `Self` never existed.
|
||||
this.dcx().emit_err(UnexpectedSelfInGenericParameters {
|
||||
span: this.prev_token.span,
|
||||
});
|
||||
this.dcx()
|
||||
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
|
||||
|
||||
// Eat a trailing comma, if it exists.
|
||||
let _ = this.eat(&token::Comma);
|
||||
@ -191,17 +189,15 @@ pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::Generi
|
||||
(None, Vec::new())
|
||||
};
|
||||
|
||||
if this.check_noexpect(&token::Eq)
|
||||
&& this.look_ahead(1, |t| t.is_lifetime())
|
||||
{
|
||||
if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) {
|
||||
let lo = this.token.span;
|
||||
// Parse `= 'lifetime`.
|
||||
this.bump(); // `=`
|
||||
this.bump(); // `'lifetime`
|
||||
let span = lo.to(this.prev_token.span);
|
||||
this.dcx().emit_err(
|
||||
UnexpectedDefaultValueForLifetimeInGenericParameters { span },
|
||||
);
|
||||
this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters {
|
||||
span,
|
||||
});
|
||||
}
|
||||
|
||||
Some(ast::GenericParam {
|
||||
@ -228,34 +224,32 @@ pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::Generi
|
||||
span: where_predicate.span(),
|
||||
});
|
||||
// FIXME - try to continue parsing other generics?
|
||||
return Ok((None, Trailing::No));
|
||||
}
|
||||
Err(err) => {
|
||||
err.cancel();
|
||||
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
||||
this.restore_snapshot(snapshot);
|
||||
return Ok((None, Trailing::No));
|
||||
}
|
||||
}
|
||||
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||
} else {
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
||||
this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
||||
} else {
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
|
||||
}
|
||||
}
|
||||
return Ok((None, Trailing::No));
|
||||
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||
};
|
||||
|
||||
if !this.eat(&token::Comma) {
|
||||
done = true;
|
||||
}
|
||||
// We just ate the comma, so no need to capture the trailing token.
|
||||
Ok((param, Trailing::No))
|
||||
Ok((param, Trailing::No, UsePreAttrPos::No))
|
||||
})?;
|
||||
|
||||
if let Some(param) = param {
|
||||
|
@ -20,7 +20,9 @@
|
||||
|
||||
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing};
|
||||
use super::{
|
||||
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
|
||||
};
|
||||
use crate::errors::{self, MacroExpandsToAdtField};
|
||||
use crate::{fluent_generated as fluent, maybe_whole};
|
||||
|
||||
@ -127,7 +129,7 @@ pub(super) fn parse_item_common(
|
||||
Some(item.into_inner())
|
||||
});
|
||||
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| {
|
||||
self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
|
||||
let lo = this.token.span;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
let mut def = this.parse_defaultness();
|
||||
@ -145,7 +147,7 @@ pub(super) fn parse_item_common(
|
||||
let span = lo.to(this.prev_token.span);
|
||||
let id = DUMMY_NODE_ID;
|
||||
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
|
||||
return Ok((Some(item), Trailing::No));
|
||||
return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
|
||||
}
|
||||
|
||||
// At this point, we have failed to parse an item.
|
||||
@ -160,7 +162,7 @@ pub(super) fn parse_item_common(
|
||||
if !attrs_allowed {
|
||||
this.recover_attrs_no_item(&attrs)?;
|
||||
}
|
||||
Ok((None, Trailing::No))
|
||||
Ok((None, Trailing::No, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
|
||||
@ -1546,15 +1548,12 @@ fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
|
||||
self.recover_vcs_conflict_marker();
|
||||
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
|
||||
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
|
||||
self.collect_tokens_trailing_token(
|
||||
variant_attrs,
|
||||
ForceCollect::No,
|
||||
|this, variant_attrs| {
|
||||
self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| {
|
||||
let vlo = this.token.span;
|
||||
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
if !this.recover_nested_adt_item(kw::Enum)? {
|
||||
return Ok((None, Trailing::No));
|
||||
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
@ -1566,7 +1565,7 @@ fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
|
||||
this.bump();
|
||||
this.parse_delim_args()?;
|
||||
|
||||
return Ok((None, Trailing::from(this.token == token::Comma)));
|
||||
return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
|
||||
}
|
||||
|
||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
@ -1623,9 +1622,8 @@ fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
|
||||
is_placeholder: false,
|
||||
};
|
||||
|
||||
Ok((Some(vr), Trailing::from(this.token == token::Comma)))
|
||||
},
|
||||
)
|
||||
Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No))
|
||||
})
|
||||
.map_err(|mut err| {
|
||||
err.help(help);
|
||||
err
|
||||
@ -1777,7 +1775,7 @@ pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef
|
||||
// Unit like structs are handled in parse_item_struct function
|
||||
self.parse_paren_comma_seq(|p| {
|
||||
let attrs = p.parse_outer_attributes()?;
|
||||
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
|
||||
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
|
||||
let mut snapshot = None;
|
||||
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||
@ -1816,6 +1814,7 @@ pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef
|
||||
is_placeholder: false,
|
||||
},
|
||||
Trailing::from(p.token == token::Comma),
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
})
|
||||
@ -1827,11 +1826,11 @@ fn parse_field_def(&mut self, adt_ty: &str) -> PResult<'a, FieldDef> {
|
||||
self.recover_vcs_conflict_marker();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.recover_vcs_conflict_marker();
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
|
||||
.map(|field| (field, Trailing::No))
|
||||
.map(|field| (field, Trailing::No, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
|
||||
@ -2806,12 +2805,12 @@ pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinV
|
||||
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
||||
if let Some(mut param) = this.parse_self_param()? {
|
||||
param.attrs = attrs;
|
||||
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
|
||||
return Ok((res?, Trailing::No));
|
||||
return Ok((res?, Trailing::No, UsePreAttrPos::No));
|
||||
}
|
||||
|
||||
let is_name_required = match this.token.kind {
|
||||
@ -2827,7 +2826,7 @@ fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResu
|
||||
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||
{
|
||||
let guar = err.emit();
|
||||
Ok((dummy_arg(ident, guar), Trailing::No))
|
||||
Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No))
|
||||
} else {
|
||||
Err(err)
|
||||
};
|
||||
@ -2871,6 +2870,7 @@ fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResu
|
||||
Ok((
|
||||
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
use std::ops::Range;
|
||||
use std::{fmt, mem, slice};
|
||||
|
||||
use attr_wrapper::AttrWrapper;
|
||||
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
||||
pub use diagnostics::AttemptLocalParseRecovery;
|
||||
pub(crate) use expr::ForbiddenLetReason;
|
||||
pub(crate) use item::FnParseMode;
|
||||
@ -254,7 +254,7 @@ enum Capturing {
|
||||
Yes,
|
||||
}
|
||||
|
||||
// This state is used by `Parser::collect_tokens_trailing_token`.
|
||||
// This state is used by `Parser::collect_tokens`.
|
||||
#[derive(Clone, Debug)]
|
||||
struct CaptureState {
|
||||
capturing: Capturing,
|
||||
@ -466,8 +466,8 @@ pub fn new(
|
||||
parser.bump();
|
||||
|
||||
// Change this from 1 back to 0 after the bump. This eases debugging of
|
||||
// `Parser::collect_tokens_trailing_token` nicer because it makes the
|
||||
// token positions 0-indexed which is nicer than 1-indexed.
|
||||
// `Parser::collect_tokens` because 0-indexed token positions are nicer
|
||||
// than 1-indexed token positions.
|
||||
parser.num_bump_calls = 0;
|
||||
|
||||
parser
|
||||
@ -1553,11 +1553,9 @@ fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
|
||||
) -> PResult<'a, R> {
|
||||
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
||||
// `ForceCollect::Yes`
|
||||
self.collect_tokens_trailing_token(
|
||||
AttrWrapper::empty(),
|
||||
ForceCollect::Yes,
|
||||
|this, _attrs| Ok((f(this)?, Trailing::No)),
|
||||
)
|
||||
self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
|
||||
Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
|
||||
/// `::{` or `::*`
|
||||
|
@ -13,7 +13,7 @@
|
||||
use rustc_span::{BytePos, ErrorGuaranteed, Span};
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
|
||||
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing};
|
||||
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
|
||||
use crate::errors::{
|
||||
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
|
||||
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
|
||||
@ -403,7 +403,7 @@ fn maybe_recover_trailing_expr(
|
||||
|
||||
// Parse an associative expression such as `+ expr`, `% expr`, ...
|
||||
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
|
||||
if let Ok(expr) =
|
||||
if let Ok((expr, _)) =
|
||||
snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
|
||||
{
|
||||
// We got a valid expression.
|
||||
@ -1302,8 +1302,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, PatFieldsRest)
|
||||
}
|
||||
}
|
||||
|
||||
let field =
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let field = match this.parse_pat_field(lo, attrs) {
|
||||
Ok(field) => Ok(field),
|
||||
Err(err) => {
|
||||
@ -1318,7 +1317,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, PatFieldsRest)
|
||||
last_non_comma_dotdot_span = Some(this.prev_token.span);
|
||||
|
||||
// We just ate a comma, so there's no need to capture a trailing token.
|
||||
Ok((field, Trailing::No))
|
||||
Ok((field, Trailing::No, UsePreAttrPos::No))
|
||||
})?;
|
||||
|
||||
fields.push(field)
|
||||
|
@ -913,7 +913,7 @@ pub(super) fn parse_generic_arg(
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
|
||||
Ok(expr) => {
|
||||
Ok((expr, _)) => {
|
||||
return Ok(Some(self.dummy_const_arg_needs_braces(
|
||||
self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
|
||||
expr.span,
|
||||
|
@ -21,7 +21,7 @@
|
||||
use super::path::PathStyle;
|
||||
use super::{
|
||||
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
||||
Trailing,
|
||||
Trailing, UsePreAttrPos,
|
||||
};
|
||||
use crate::errors::MalformedLoopLabel;
|
||||
use crate::{errors, maybe_whole};
|
||||
@ -46,6 +46,7 @@ pub fn parse_stmt_without_recovery(
|
||||
capture_semi: bool,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Option<Stmt>> {
|
||||
let pre_attr_pos = self.collect_pos();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let lo = self.token.span;
|
||||
|
||||
@ -66,11 +67,15 @@ pub fn parse_stmt_without_recovery(
|
||||
}
|
||||
|
||||
Ok(Some(if self.token.is_keyword(kw::Let) {
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
||||
this.expect_keyword(kw::Let)?;
|
||||
let local = this.parse_local(attrs)?;
|
||||
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
|
||||
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
|
||||
Ok((
|
||||
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
||||
trailing,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})?
|
||||
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
|
||||
self.recover_stmt_local_after_let(
|
||||
@ -104,10 +109,18 @@ pub fn parse_stmt_without_recovery(
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
// that starts like a path (1 token), but it fact not a path.
|
||||
// Also, we avoid stealing syntax from `parse_item_`.
|
||||
let stmt = self.collect_tokens_trailing_token(
|
||||
//
|
||||
// `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
|
||||
// expression, not the statement. (But the statement attributes/tokens are obtained
|
||||
// from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
|
||||
// the things within `StmtKind`.)
|
||||
let stmt = self.collect_tokens(
|
||||
Some(pre_attr_pos),
|
||||
AttrWrapper::empty(),
|
||||
force_collect,
|
||||
|this, _empty_attrs| Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No)),
|
||||
|this, _empty_attrs| {
|
||||
Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
|
||||
},
|
||||
);
|
||||
match stmt {
|
||||
Ok(stmt) => stmt,
|
||||
@ -129,12 +142,15 @@ pub fn parse_stmt_without_recovery(
|
||||
self.error_outer_attrs(attrs);
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = self.collect_tokens_trailing_token(
|
||||
// Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
|
||||
// above.
|
||||
let e = self.collect_tokens(
|
||||
Some(pre_attr_pos),
|
||||
AttrWrapper::empty(),
|
||||
force_collect,
|
||||
|this, _empty_attrs| {
|
||||
Ok((this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?, Trailing::No))
|
||||
let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
|
||||
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
|
||||
},
|
||||
)?;
|
||||
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
|
||||
@ -151,12 +167,16 @@ pub fn parse_stmt_without_recovery(
|
||||
}
|
||||
|
||||
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
|
||||
let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let path = this.parse_path(PathStyle::Expr)?;
|
||||
|
||||
if this.eat(&token::Not) {
|
||||
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
||||
return Ok((stmt_mac, Trailing::from(this.token == token::Semi)));
|
||||
return Ok((
|
||||
stmt_mac,
|
||||
Trailing::from(this.token == token::Semi),
|
||||
UsePreAttrPos::No,
|
||||
));
|
||||
}
|
||||
|
||||
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
@ -170,13 +190,17 @@ fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a,
|
||||
this.parse_expr_dot_or_call_with(attrs, expr, lo)
|
||||
})?;
|
||||
// `DUMMY_SP` will get overwritten later in this function
|
||||
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), Trailing::No))
|
||||
Ok((
|
||||
this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})?;
|
||||
|
||||
if let StmtKind::Expr(expr) = stmt.kind {
|
||||
// Perform this outside of the `collect_tokens_trailing_token` closure,
|
||||
// since our outer attributes do not apply to this part of the expression
|
||||
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
|
||||
// Perform this outside of the `collect_tokens` closure, since our
|
||||
// outer attributes do not apply to this part of the expression.
|
||||
let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
|
||||
this.parse_expr_assoc_rest_with(0, true, expr)
|
||||
})?;
|
||||
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
|
||||
@ -210,7 +234,7 @@ fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResu
|
||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
|
||||
let e = self.maybe_recover_from_bad_qpath(e)?;
|
||||
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
|
||||
let e = self.parse_expr_assoc_rest_with(0, false, e)?;
|
||||
let (e, _) = self.parse_expr_assoc_rest_with(0, false, e)?;
|
||||
StmtKind::Expr(e)
|
||||
};
|
||||
Ok(self.mk_stmt(lo.to(hi), kind))
|
||||
@ -240,10 +264,14 @@ fn recover_stmt_local_after_let(
|
||||
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Stmt> {
|
||||
let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
||||
let local = this.parse_local(attrs)?;
|
||||
// FIXME - maybe capture semicolon in recovery?
|
||||
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), Trailing::No))
|
||||
Ok((
|
||||
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})?;
|
||||
self.dcx()
|
||||
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
||||
|
42
tests/ui/attributes/assoc-expr.rs
Normal file
42
tests/ui/attributes/assoc-expr.rs
Normal file
@ -0,0 +1,42 @@
|
||||
//@ check-pass
|
||||
// This test triggered an assertion failure in token collection due to
|
||||
// mishandling of attributes on associative expressions.
|
||||
|
||||
#![feature(cfg_eval)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
fn main() {}
|
||||
|
||||
#[cfg_eval]
|
||||
struct Foo1(
|
||||
[ bool; {
|
||||
let _x = 30;
|
||||
#[cfg_attr(unix, rustc_dummy(aa))] 1
|
||||
} ]
|
||||
);
|
||||
|
||||
#[cfg_eval]
|
||||
struct Foo12(
|
||||
[ bool; {
|
||||
let _x = 30;
|
||||
#[cfg_attr(unix, rustc_dummy(bb))] 1 + 2
|
||||
} ]
|
||||
);
|
||||
|
||||
#[cfg_eval]
|
||||
struct Foox(
|
||||
[ bool; {
|
||||
let _x = 30;
|
||||
#[cfg_attr(unix, rustc_dummy(cc))] _x
|
||||
} ]
|
||||
);
|
||||
|
||||
#[cfg_eval]
|
||||
struct Foox2(
|
||||
[ bool; {
|
||||
let _x = 30;
|
||||
#[cfg_attr(unix, rustc_dummy(dd))] _x + 2
|
||||
} ]
|
||||
);
|
@ -51,14 +51,6 @@ macro_rules! c1 {
|
||||
};
|
||||
}
|
||||
|
||||
// FIXME: temporary
|
||||
macro_rules! c2 {
|
||||
($frag:ident, [$($tt:tt)*], $s1:literal, $s2:literal) => {
|
||||
assert_eq!($frag!($($tt)*), $s1);
|
||||
assert_eq!(stringify!($($tt)*), $s2);
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_block() {
|
||||
c1!(block, [ {} ], "{}");
|
||||
@ -344,10 +336,10 @@ fn test_expr() {
|
||||
// Ones involving attributes.
|
||||
c1!(expr, [ #[aa] 1 ], "#[aa] 1");
|
||||
c1!(expr, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
|
||||
c2!(expr, [ #[aa] 1 + 2 ], "1 + 2", "#[aa] 1 + 2"); // FIXME
|
||||
c2!(expr, [ #[aa] x + 2 ], "x + 2", "#[aa] x + 2"); // FIXME
|
||||
c2!(expr, [ #[aa] 1 / #[bb] 2 ], "1 / #[bb] 2", "#[aa] 1 / #[bb] 2"); // FIXME
|
||||
c2!(expr, [ #[aa] x / #[bb] 2 ], "x / #[bb] 2", "#[aa] x / #[bb] 2"); // FIXME
|
||||
c1!(expr, [ #[aa] 1 + 2 ], "#[aa] 1 + 2");
|
||||
c1!(expr, [ #[aa] x + 2 ], "#[aa] x + 2");
|
||||
c1!(expr, [ #[aa] 1 / #[bb] 2 ], "#[aa] 1 / #[bb] 2");
|
||||
c1!(expr, [ #[aa] x / #[bb] 2 ], "#[aa] x / #[bb] 2");
|
||||
c1!(expr, [ 1 << #[bb] 2 ], "1 << #[bb] 2");
|
||||
c1!(expr, [ x << #[bb] 2 ], "x << #[bb] 2");
|
||||
c1!(expr, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
|
||||
@ -659,10 +651,10 @@ fn test_stmt() {
|
||||
// Ones involving attributes.
|
||||
c1!(stmt, [ #[aa] 1 ], "#[aa] 1");
|
||||
c1!(stmt, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
|
||||
c2!(stmt, [ #[aa] 1 as u32 ], "1 as u32", "#[aa] 1 as u32"); // FIXME
|
||||
c2!(stmt, [ #[aa] x as u32 ], "x as u32", "#[aa] x as u32"); // FIXME
|
||||
c2!(stmt, [ #[aa] 1 .. #[bb] 2 ], "1 .. #[bb] 2", "#[aa] 1 .. #[bb] 2"); // FIXME
|
||||
c2!(stmt, [ #[aa] x .. #[bb] 2 ], "x .. #[bb] 2", "#[aa] x .. #[bb] 2"); // FIXME
|
||||
c1!(stmt, [ #[aa] 1 as u32 ], "#[aa] 1 as u32");
|
||||
c1!(stmt, [ #[aa] x as u32 ], "#[aa] x as u32");
|
||||
c1!(stmt, [ #[aa] 1 .. #[bb] 2 ], "#[aa] 1 .. #[bb] 2");
|
||||
c1!(stmt, [ #[aa] x .. #[bb] 2 ], "#[aa] x .. #[bb] 2");
|
||||
c1!(stmt, [ 1 || #[bb] 2 ], "1 || #[bb] 2");
|
||||
c1!(stmt, [ x || #[bb] 2 ], "x || #[bb] 2");
|
||||
c1!(stmt, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
|
||||
|
Loading…
Reference in New Issue
Block a user