Overhaul token collection.
This commit does the following. - Renames `collect_tokens_trailing_token` as `collect_tokens`, because (a) it's annoying long, and (b) the `_trailing_token` bit is less accurate now that its types have changed. - In `collect_tokens`, adds a `Option<CollectPos>` argument and a `UsePreAttrPos` in the return type of `f`. These are used in `parse_expr_force_collect` (for vanilla expressions) and in `parse_stmt_without_recovery` (for two different cases of expression statements). Together these ensure are enough to fix all the problems with token collection and assoc expressions. The changes to the `stringify.rs` test demonstrate some of these. - Adds a new test. The code in this test was causing an assertion failure prior to this commit, due to an invalid `NodeRange`. The extra complexity is annoying, but necessary to fix the existing problems.
This commit is contained in:
parent
fe460ac28b
commit
9d31f86f0d
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
|
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
|
||||||
|
UsePreAttrPos,
|
||||||
};
|
};
|
||||||
use crate::{errors, fluent_generated as fluent, maybe_whole};
|
use crate::{errors, fluent_generated as fluent, maybe_whole};
|
||||||
|
|
||||||
@ -259,7 +260,8 @@ pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerA
|
|||||||
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
|
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
|
||||||
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
|
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
|
||||||
|
|
||||||
let do_parse = |this: &mut Self, _empty_attrs| {
|
// Attr items don't have attributes.
|
||||||
|
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
|
||||||
let is_unsafe = this.eat_keyword(kw::Unsafe);
|
let is_unsafe = this.eat_keyword(kw::Unsafe);
|
||||||
let unsafety = if is_unsafe {
|
let unsafety = if is_unsafe {
|
||||||
let unsafe_span = this.prev_token.span;
|
let unsafe_span = this.prev_token.span;
|
||||||
@ -275,10 +277,12 @@ pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, as
|
|||||||
if is_unsafe {
|
if is_unsafe {
|
||||||
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||||
}
|
}
|
||||||
Ok((ast::AttrItem { unsafety, path, args, tokens: None }, Trailing::No))
|
Ok((
|
||||||
};
|
ast::AttrItem { unsafety, path, args, tokens: None },
|
||||||
// Attr items don't have attributes.
|
Trailing::No,
|
||||||
self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse)
|
UsePreAttrPos::No,
|
||||||
|
))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses attributes that appear after the opening of an item. These should
|
/// Parses attributes that appear after the opening of an item. These should
|
||||||
@ -311,8 +315,8 @@ pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
|
|||||||
};
|
};
|
||||||
if let Some(attr) = attr {
|
if let Some(attr) = attr {
|
||||||
// If we are currently capturing tokens (i.e. we are within a call to
|
// If we are currently capturing tokens (i.e. we are within a call to
|
||||||
// `Parser::collect_tokens_trailing_tokens`) record the token positions of this
|
// `Parser::collect_tokens`) record the token positions of this inner attribute,
|
||||||
// inner attribute, for possible later processing in a `LazyAttrTokenStream`.
|
// for possible later processing in a `LazyAttrTokenStream`.
|
||||||
if let Capturing::Yes = self.capture_state.capturing {
|
if let Capturing::Yes = self.capture_state.capturing {
|
||||||
let end_pos = self.num_bump_calls;
|
let end_pos = self.num_bump_calls;
|
||||||
let parser_range = ParserRange(start_pos..end_pos);
|
let parser_range = ParserRange(start_pos..end_pos);
|
||||||
|
@ -15,6 +15,20 @@
|
|||||||
TokenCursor, Trailing,
|
TokenCursor, Trailing,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// When collecting tokens, this fully captures the start point. Usually its
|
||||||
|
// just after outer attributes, but occasionally it's before.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(super) struct CollectPos {
|
||||||
|
start_token: (Token, Spacing),
|
||||||
|
cursor_snapshot: TokenCursor,
|
||||||
|
start_pos: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) enum UsePreAttrPos {
|
||||||
|
No,
|
||||||
|
Yes,
|
||||||
|
}
|
||||||
|
|
||||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||||
/// When we parse outer attributes, we need to ensure that we capture tokens
|
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||||
/// for the attribute target. This allows us to perform cfg-expansion on
|
/// for the attribute target. This allows us to perform cfg-expansion on
|
||||||
@ -22,7 +36,7 @@
|
|||||||
///
|
///
|
||||||
/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
|
/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
|
||||||
/// Parsing code can only get access to the underlying attributes
|
/// Parsing code can only get access to the underlying attributes
|
||||||
/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`.
|
/// by passing an `AttrWrapper` to `collect_tokens`.
|
||||||
/// This makes it difficult to accidentally construct an AST node
|
/// This makes it difficult to accidentally construct an AST node
|
||||||
/// (which stores an `ast::AttrVec`) without first collecting tokens.
|
/// (which stores an `ast::AttrVec`) without first collecting tokens.
|
||||||
///
|
///
|
||||||
@ -33,16 +47,18 @@ pub(super) struct AttrWrapper {
|
|||||||
attrs: AttrVec,
|
attrs: AttrVec,
|
||||||
// The start of the outer attributes in the parser's token stream.
|
// The start of the outer attributes in the parser's token stream.
|
||||||
// This lets us create a `NodeReplacement` for the entire attribute
|
// This lets us create a `NodeReplacement` for the entire attribute
|
||||||
// target, including outer attributes.
|
// target, including outer attributes. `None` if there are no outer
|
||||||
start_pos: u32,
|
// attributes.
|
||||||
|
start_pos: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AttrWrapper {
|
impl AttrWrapper {
|
||||||
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||||
AttrWrapper { attrs, start_pos }
|
AttrWrapper { attrs, start_pos: Some(start_pos) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn empty() -> AttrWrapper {
|
pub(super) fn empty() -> AttrWrapper {
|
||||||
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
AttrWrapper { attrs: AttrVec::new(), start_pos: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||||
@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// From a value of this type we can reconstruct the `TokenStream` seen by the
|
// From a value of this type we can reconstruct the `TokenStream` seen by the
|
||||||
// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by
|
// `f` callback passed to a call to `Parser::collect_tokens`, by
|
||||||
// replaying the getting of the tokens. This saves us producing a `TokenStream`
|
// replaying the getting of the tokens. This saves us producing a `TokenStream`
|
||||||
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
|
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
|
||||||
// passed to a proc macro. In practice, token stream creation happens rarely
|
// passed to a proc macro. In practice, token stream creation happens rarely
|
||||||
@ -166,16 +182,30 @@ fn to_attr_token_stream(&self) -> AttrTokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
|
pub(super) fn collect_pos(&self) -> CollectPos {
|
||||||
|
CollectPos {
|
||||||
|
start_token: (self.token.clone(), self.token_spacing),
|
||||||
|
cursor_snapshot: self.token_cursor.clone(),
|
||||||
|
start_pos: self.num_bump_calls,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Parses code with `f`. If appropriate, it records the tokens (in
|
/// Parses code with `f`. If appropriate, it records the tokens (in
|
||||||
/// `LazyAttrTokenStream` form) that were parsed in the result, accessible
|
/// `LazyAttrTokenStream` form) that were parsed in the result, accessible
|
||||||
/// via the `HasTokens` trait. The `Trailing` part of the callback's
|
/// via the `HasTokens` trait. The `Trailing` part of the callback's
|
||||||
/// result indicates if an extra token should be captured, e.g. a comma or
|
/// result indicates if an extra token should be captured, e.g. a comma or
|
||||||
/// semicolon.
|
/// semicolon. The `UsePreAttrPos` part of the callback's result indicates
|
||||||
|
/// if we should use `pre_attr_pos` as the collection start position (only
|
||||||
|
/// required in a few cases).
|
||||||
///
|
///
|
||||||
/// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
|
/// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
|
||||||
/// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
|
/// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
|
||||||
/// details.
|
/// details.
|
||||||
///
|
///
|
||||||
|
/// `pre_attr_pos` is the position before the outer attributes (or the node
|
||||||
|
/// itself, if no outer attributes are present). It is only needed if `f`
|
||||||
|
/// can return `UsePreAttrPos::Yes`.
|
||||||
|
///
|
||||||
/// Note: If your callback consumes an opening delimiter (including the
|
/// Note: If your callback consumes an opening delimiter (including the
|
||||||
/// case where `self.token` is an opening delimiter on entry to this
|
/// case where `self.token` is an opening delimiter on entry to this
|
||||||
/// function), you must also consume the corresponding closing delimiter.
|
/// function), you must also consume the corresponding closing delimiter.
|
||||||
@ -197,11 +227,12 @@ impl<'a> Parser<'a> {
|
|||||||
/// } // 32..33
|
/// } // 32..33
|
||||||
/// } // 33..34
|
/// } // 33..34
|
||||||
/// ```
|
/// ```
|
||||||
pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
pre_attr_pos: Option<CollectPos>,
|
||||||
attrs: AttrWrapper,
|
attrs: AttrWrapper,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, Trailing)>,
|
f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
|
||||||
) -> PResult<'a, R> {
|
) -> PResult<'a, R> {
|
||||||
// We must collect if anything could observe the collected tokens, i.e.
|
// We must collect if anything could observe the collected tokens, i.e.
|
||||||
// if any of the following conditions hold.
|
// if any of the following conditions hold.
|
||||||
@ -220,23 +251,20 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
return Ok(f(self, attrs.attrs)?.0);
|
return Ok(f(self, attrs.attrs)?.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
let start_token = (self.token.clone(), self.token_spacing);
|
let mut collect_pos = self.collect_pos();
|
||||||
let cursor_snapshot = self.token_cursor.clone();
|
|
||||||
let start_pos = self.num_bump_calls;
|
|
||||||
let has_outer_attrs = !attrs.attrs.is_empty();
|
let has_outer_attrs = !attrs.attrs.is_empty();
|
||||||
let parser_replacements_start = self.capture_state.parser_replacements.len();
|
let parser_replacements_start = self.capture_state.parser_replacements.len();
|
||||||
|
|
||||||
// We set and restore `Capturing::Yes` on either side of the call to
|
// We set and restore `Capturing::Yes` on either side of the call to
|
||||||
// `f`, so we can distinguish the outermost call to
|
// `f`, so we can distinguish the outermost call to `collect_tokens`
|
||||||
// `collect_tokens_trailing_token` (e.g. parsing `m` in the example
|
// (e.g. parsing `m` in the example above) from any inner (indirectly
|
||||||
// above) from any inner (indirectly recursive) calls (e.g. parsing `g`
|
// recursive) calls (e.g. parsing `g` in the example above). This
|
||||||
// in the example above). This distinction is used below and in
|
// distinction is used below and in `Parser::parse_inner_attributes`.
|
||||||
// `Parser::parse_inner_attributes`.
|
let (mut ret, capture_trailing, use_pre_attr_pos) = {
|
||||||
let (mut ret, capture_trailing) = {
|
|
||||||
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||||
let f_res = f(self, attrs.attrs);
|
let res = f(self, attrs.attrs);
|
||||||
self.capture_state.capturing = prev_capturing;
|
self.capture_state.capturing = prev_capturing;
|
||||||
f_res?
|
res?
|
||||||
};
|
};
|
||||||
|
|
||||||
// When we're not in `capture_cfg` mode, then skip collecting and
|
// When we're not in `capture_cfg` mode, then skip collecting and
|
||||||
@ -279,6 +307,14 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
return Ok(ret);
|
return Ok(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Replace the post-attribute collection start position with the
|
||||||
|
// pre-attribute position supplied, if `f` indicated it is necessary.
|
||||||
|
// (The caller is responsible for providing a non-`None` `pre_attr_pos`
|
||||||
|
// if this is a possibility.)
|
||||||
|
if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
|
||||||
|
collect_pos = pre_attr_pos.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
let parser_replacements_end = self.capture_state.parser_replacements.len();
|
let parser_replacements_end = self.capture_state.parser_replacements.len();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
@ -294,7 +330,7 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
// `AttrTokenStream`, we will create the proper token.
|
// `AttrTokenStream`, we will create the proper token.
|
||||||
+ self.break_last_token as u32;
|
+ self.break_last_token as u32;
|
||||||
|
|
||||||
let num_calls = end_pos - start_pos;
|
let num_calls = end_pos - collect_pos.start_pos;
|
||||||
|
|
||||||
// Take the captured `ParserRange`s for any inner attributes that we parsed in
|
// Take the captured `ParserRange`s for any inner attributes that we parsed in
|
||||||
// `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
|
// `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
|
||||||
@ -328,7 +364,9 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.chain(inner_attr_parser_replacements.iter().cloned())
|
.chain(inner_attr_parser_replacements.iter().cloned())
|
||||||
.map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data))
|
.map(|(parser_range, data)| {
|
||||||
|
(NodeRange::new(parser_range, collect_pos.start_pos), data)
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -355,9 +393,9 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
|
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
|
||||||
|
|
||||||
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
|
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
|
||||||
start_token,
|
start_token: collect_pos.start_token,
|
||||||
|
cursor_snapshot: collect_pos.cursor_snapshot,
|
||||||
num_calls,
|
num_calls,
|
||||||
cursor_snapshot,
|
|
||||||
break_last_token: self.break_last_token,
|
break_last_token: self.break_last_token,
|
||||||
node_replacements,
|
node_replacements,
|
||||||
});
|
});
|
||||||
@ -368,9 +406,9 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If `capture_cfg` is set and we're inside a recursive call to
|
// If `capture_cfg` is set and we're inside a recursive call to
|
||||||
// `collect_tokens_trailing_token`, then we need to register a replace range
|
// `collect_tokens`, then we need to register a replace range if we
|
||||||
// if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
|
// have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager
|
||||||
// on the captured token stream.
|
// cfg-expansion on the captured token stream.
|
||||||
if self.capture_cfg
|
if self.capture_cfg
|
||||||
&& matches!(self.capture_state.capturing, Capturing::Yes)
|
&& matches!(self.capture_state.capturing, Capturing::Yes)
|
||||||
&& has_cfg_or_cfg_attr(ret.attrs())
|
&& has_cfg_or_cfg_attr(ret.attrs())
|
||||||
@ -389,7 +427,8 @@ pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
|||||||
// Set things up so that the entire AST node that we just parsed, including attributes,
|
// Set things up so that the entire AST node that we just parsed, including attributes,
|
||||||
// will be replaced with `target` in the lazy token stream. This will allow us to
|
// will be replaced with `target` in the lazy token stream. This will allow us to
|
||||||
// cfg-expand this AST node.
|
// cfg-expand this AST node.
|
||||||
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
let start_pos =
|
||||||
|
if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
|
||||||
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
|
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
|
||||||
self.capture_state
|
self.capture_state
|
||||||
.parser_replacements
|
.parser_replacements
|
||||||
|
@ -2487,13 +2487,14 @@ pub(super) fn handle_ambiguous_unbraced_const_arg(
|
|||||||
pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
|
pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let start = self.token.span;
|
let start = self.token.span;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
|
let (expr, _) =
|
||||||
err.span_label(
|
self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
|
||||||
start.shrink_to_lo(),
|
err.span_label(
|
||||||
"while parsing a const generic argument starting here",
|
start.shrink_to_lo(),
|
||||||
);
|
"while parsing a const generic argument starting here",
|
||||||
err
|
);
|
||||||
})?;
|
err
|
||||||
|
})?;
|
||||||
if !self.expr_is_valid_const_arg(&expr) {
|
if !self.expr_is_valid_const_arg(&expr) {
|
||||||
self.dcx().emit_err(ConstGenericWithoutBraces {
|
self.dcx().emit_err(ConstGenericWithoutBraces {
|
||||||
span: expr.span,
|
span: expr.span,
|
||||||
@ -2613,7 +2614,7 @@ pub(super) fn recover_const_arg(
|
|||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
|
self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
|
||||||
})() {
|
})() {
|
||||||
Ok(expr) => {
|
Ok((expr, _)) => {
|
||||||
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
|
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
|
||||||
if snapshot.token == token::EqEq {
|
if snapshot.token == token::EqEq {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
@ -2671,7 +2672,7 @@ pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty(
|
|||||||
})() {
|
})() {
|
||||||
// Since we don't know the exact reason why we failed to parse the type or the
|
// Since we don't know the exact reason why we failed to parse the type or the
|
||||||
// expression, employ a simple heuristic to weed out some pathological cases.
|
// expression, employ a simple heuristic to weed out some pathological cases.
|
||||||
Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
|
Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => {
|
||||||
self.restore_snapshot(snapshot);
|
self.restore_snapshot(snapshot);
|
||||||
Some(expr)
|
Some(expr)
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,7 @@
|
|||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{
|
use super::{
|
||||||
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
||||||
SemiColonMode, SeqSep, TokenType, Trailing,
|
SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
|
||||||
};
|
};
|
||||||
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
|
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
|
||||||
|
|
||||||
@ -59,15 +59,30 @@ pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
|
|||||||
self.current_closure.take();
|
self.current_closure.take();
|
||||||
|
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.parse_expr_res(Restrictions::empty(), attrs)
|
self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses an expression, forcing tokens to be collected.
|
/// Parses an expression, forcing tokens to be collected.
|
||||||
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
|
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
self.current_closure.take();
|
self.current_closure.take();
|
||||||
|
|
||||||
|
// If the expression is associative (e.g. `1 + 2`), then any preceding
|
||||||
|
// outer attribute actually belongs to the first inner sub-expression.
|
||||||
|
// In which case we must use the pre-attr pos to include the attribute
|
||||||
|
// in the collected tokens for the outer expression.
|
||||||
|
let pre_attr_pos = self.collect_pos();
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
|
self.collect_tokens(
|
||||||
|
Some(pre_attr_pos),
|
||||||
|
AttrWrapper::empty(),
|
||||||
|
ForceCollect::Yes,
|
||||||
|
|this, _empty_attrs| {
|
||||||
|
let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?;
|
||||||
|
let use_pre_attr_pos =
|
||||||
|
if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No };
|
||||||
|
Ok((expr, Trailing::No, use_pre_attr_pos))
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
|
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
|
||||||
@ -77,7 +92,7 @@ pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
|
|||||||
fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
|
fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
match self.parse_expr_res(restrictions, attrs) {
|
match self.parse_expr_res(restrictions, attrs) {
|
||||||
Ok(expr) => Ok(expr),
|
Ok((expr, _)) => Ok(expr),
|
||||||
Err(err) => match self.token.ident() {
|
Err(err) => match self.token.ident() {
|
||||||
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
|
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
|
||||||
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
|
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
|
||||||
@ -104,18 +119,20 @@ pub(super) fn parse_expr_res(
|
|||||||
&mut self,
|
&mut self,
|
||||||
r: Restrictions,
|
r: Restrictions,
|
||||||
attrs: AttrWrapper,
|
attrs: AttrWrapper,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, (P<Expr>, bool)> {
|
||||||
self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
|
self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
||||||
|
/// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator
|
||||||
|
/// followed by a subexpression (e.g. `1 + 2`).
|
||||||
pub(super) fn parse_expr_assoc_with(
|
pub(super) fn parse_expr_assoc_with(
|
||||||
&mut self,
|
&mut self,
|
||||||
min_prec: usize,
|
min_prec: usize,
|
||||||
attrs: AttrWrapper,
|
attrs: AttrWrapper,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, (P<Expr>, bool)> {
|
||||||
let lhs = if self.token.is_range_separator() {
|
let lhs = if self.token.is_range_separator() {
|
||||||
return self.parse_expr_prefix_range(attrs);
|
return self.parse_expr_prefix_range(attrs).map(|res| (res, false));
|
||||||
} else {
|
} else {
|
||||||
self.parse_expr_prefix(attrs)?
|
self.parse_expr_prefix(attrs)?
|
||||||
};
|
};
|
||||||
@ -123,15 +140,17 @@ pub(super) fn parse_expr_assoc_with(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
|
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
|
||||||
/// of at least `min_prec` precedence.
|
/// of at least `min_prec` precedence. The `bool` in the return value indicates if something
|
||||||
|
/// was actually parsed.
|
||||||
pub(super) fn parse_expr_assoc_rest_with(
|
pub(super) fn parse_expr_assoc_rest_with(
|
||||||
&mut self,
|
&mut self,
|
||||||
min_prec: usize,
|
min_prec: usize,
|
||||||
starts_stmt: bool,
|
starts_stmt: bool,
|
||||||
mut lhs: P<Expr>,
|
mut lhs: P<Expr>,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, (P<Expr>, bool)> {
|
||||||
|
let mut parsed_something = false;
|
||||||
if !self.should_continue_as_assoc_expr(&lhs) {
|
if !self.should_continue_as_assoc_expr(&lhs) {
|
||||||
return Ok(lhs);
|
return Ok((lhs, parsed_something));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.expected_tokens.push(TokenType::Operator);
|
self.expected_tokens.push(TokenType::Operator);
|
||||||
@ -156,10 +175,11 @@ pub(super) fn parse_expr_assoc_rest_with(
|
|||||||
self.err_larrow_operator(self.token.span);
|
self.err_larrow_operator(self.token.span);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
parsed_something = true;
|
||||||
self.bump();
|
self.bump();
|
||||||
if op.node.is_comparison() {
|
if op.node.is_comparison() {
|
||||||
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
|
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
|
||||||
return Ok(expr);
|
return Ok((expr, parsed_something));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -263,7 +283,7 @@ pub(super) fn parse_expr_assoc_rest_with(
|
|||||||
// the special cases. The code is here only for future convenience.
|
// the special cases. The code is here only for future convenience.
|
||||||
Fixity::None => 1,
|
Fixity::None => 1,
|
||||||
};
|
};
|
||||||
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
||||||
let attrs = this.parse_outer_attributes()?;
|
let attrs = this.parse_outer_attributes()?;
|
||||||
this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
|
this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
|
||||||
})?;
|
})?;
|
||||||
@ -319,7 +339,7 @@ pub(super) fn parse_expr_assoc_rest_with(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(lhs)
|
Ok((lhs, parsed_something))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
|
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
|
||||||
@ -441,7 +461,8 @@ fn parse_expr_range(
|
|||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
Some(
|
Some(
|
||||||
self.parse_expr_assoc_with(prec + 1, attrs)
|
self.parse_expr_assoc_with(prec + 1, attrs)
|
||||||
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
|
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
||||||
|
.0,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@ -498,7 +519,7 @@ fn parse_expr_prefix_range(&mut self, attrs: AttrWrapper) -> PResult<'a, P<Expr>
|
|||||||
// RHS must be parsed with more associativity than the dots.
|
// RHS must be parsed with more associativity than the dots.
|
||||||
let attrs = this.parse_outer_attributes()?;
|
let attrs = this.parse_outer_attributes()?;
|
||||||
this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
|
this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
|
||||||
.map(|x| (lo.to(x.span), Some(x)))
|
.map(|(x, _)| (lo.to(x.span), Some(x)))
|
||||||
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
||||||
} else {
|
} else {
|
||||||
(lo, None)
|
(lo, None)
|
||||||
@ -2335,7 +2356,7 @@ fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
|
|||||||
let token = self.token.clone();
|
let token = self.token.clone();
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
match self.parse_expr_res(restrictions, attrs) {
|
match self.parse_expr_res(restrictions, attrs) {
|
||||||
Ok(expr) => expr,
|
Ok((expr, _)) => expr,
|
||||||
Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
|
Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2445,7 +2466,7 @@ fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
|
|||||||
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
|
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
|
||||||
let ty = if this.eat(&token::Colon) {
|
let ty = if this.eat(&token::Colon) {
|
||||||
this.parse_ty()?
|
this.parse_ty()?
|
||||||
@ -2463,6 +2484,7 @@ fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
|||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
},
|
},
|
||||||
Trailing::from(this.token == token::Comma),
|
Trailing::from(this.token == token::Comma),
|
||||||
|
UsePreAttrPos::No,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -2583,7 +2605,7 @@ fn parse_if_after_cond(&mut self, lo: Span, mut cond: P<Expr>) -> PResult<'a, P<
|
|||||||
/// Parses the condition of a `if` or `while` expression.
|
/// Parses the condition of a `if` or `while` expression.
|
||||||
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let mut cond =
|
let (mut cond, _) =
|
||||||
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
|
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
|
||||||
|
|
||||||
CondChecker::new(self).visit_expr(&mut cond);
|
CondChecker::new(self).visit_expr(&mut cond);
|
||||||
@ -2632,7 +2654,7 @@ fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>>
|
|||||||
self.expect(&token::Eq)?;
|
self.expect(&token::Eq)?;
|
||||||
}
|
}
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
|
let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
|
||||||
let span = lo.to(expr.span);
|
let span = lo.to(expr.span);
|
||||||
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
|
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
|
||||||
}
|
}
|
||||||
@ -2766,7 +2788,7 @@ fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
|
|||||||
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
|
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
|
||||||
// happen right before the return of this method.
|
// happen right before the return of this method.
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
|
let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
|
||||||
Ok(expr) => expr,
|
Ok(expr) => expr,
|
||||||
Err(expr_err) => {
|
Err(expr_err) => {
|
||||||
// We don't know what followed the `in`, so cancel and bubble up the
|
// We don't know what followed the `in`, so cancel and bubble up the
|
||||||
@ -2801,7 +2823,7 @@ fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
|
|||||||
}
|
}
|
||||||
self.check_for_for_in_in_typo(self.prev_token.span);
|
self.check_for_for_in_in_typo(self.prev_token.span);
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||||
Ok((pat, expr))
|
Ok((pat, expr))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2921,7 +2943,7 @@ pub(crate) fn eat_label(&mut self) -> Option<Label> {
|
|||||||
fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let match_span = self.prev_token.span;
|
let match_span = self.prev_token.span;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||||
|
|
||||||
self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
|
self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
|
||||||
}
|
}
|
||||||
@ -3069,7 +3091,7 @@ fn parse_arm_body_missing_braces(
|
|||||||
|
|
||||||
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
let lo = this.token.span;
|
let lo = this.token.span;
|
||||||
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
|
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
|
||||||
|
|
||||||
@ -3126,7 +3148,7 @@ pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
|||||||
let arm_start_span = this.token.span;
|
let arm_start_span = this.token.span;
|
||||||
|
|
||||||
let attrs = this.parse_outer_attributes()?;
|
let attrs = this.parse_outer_attributes()?;
|
||||||
let expr =
|
let (expr, _) =
|
||||||
this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
|
this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
|
||||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
||||||
err
|
err
|
||||||
@ -3244,6 +3266,7 @@ pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
|||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
},
|
},
|
||||||
Trailing::No,
|
Trailing::No,
|
||||||
|
UsePreAttrPos::No,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -3334,8 +3357,9 @@ fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Exp
|
|||||||
|
|
||||||
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err(
|
match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
|
||||||
|mut err| {
|
Ok((expr, _)) => Ok(expr),
|
||||||
|
Err(mut err) => {
|
||||||
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
|
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
|
||||||
let sugg_sp = self.prev_token.span.shrink_to_lo();
|
let sugg_sp = self.prev_token.span.shrink_to_lo();
|
||||||
// Consume everything within the braces, let's avoid further parse
|
// Consume everything within the braces, let's avoid further parse
|
||||||
@ -3355,9 +3379,9 @@ fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
|||||||
err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
|
err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
err
|
Err(err)
|
||||||
},
|
}
|
||||||
)
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_builtin(&self) -> bool {
|
pub(crate) fn is_builtin(&self) -> bool {
|
||||||
@ -3708,7 +3732,7 @@ fn recover_ident_into_label(&mut self, ident: Ident) -> Label {
|
|||||||
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.recover_vcs_conflict_marker();
|
self.recover_vcs_conflict_marker();
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
let lo = this.token.span;
|
let lo = this.token.span;
|
||||||
|
|
||||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||||
@ -3753,6 +3777,7 @@ fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
|||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
},
|
},
|
||||||
Trailing::from(this.token == token::Comma),
|
Trailing::from(this.token == token::Comma),
|
||||||
|
UsePreAttrPos::No,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -3846,7 +3871,7 @@ fn collect_tokens_for_expr(
|
|||||||
attrs: AttrWrapper,
|
attrs: AttrWrapper,
|
||||||
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
|
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, P<Expr>> {
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
let res = f(this, attrs)?;
|
let res = f(this, attrs)?;
|
||||||
let trailing = Trailing::from(
|
let trailing = Trailing::from(
|
||||||
this.restrictions.contains(Restrictions::STMT_EXPR)
|
this.restrictions.contains(Restrictions::STMT_EXPR)
|
||||||
@ -3856,7 +3881,7 @@ fn collect_tokens_for_expr(
|
|||||||
// `#[attr] expr,` always captures a trailing comma.
|
// `#[attr] expr,` always captures a trailing comma.
|
||||||
|| this.token == token::Comma,
|
|| this.token == token::Comma,
|
||||||
);
|
);
|
||||||
Ok((res, trailing))
|
Ok((res, trailing, UsePreAttrPos::No))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
|
|
||||||
use super::{ForceCollect, Parser, Trailing};
|
use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
|
self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
|
||||||
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
|
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
|
||||||
@ -169,94 +169,88 @@ pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::Generi
|
|||||||
let mut done = false;
|
let mut done = false;
|
||||||
while !done {
|
while !done {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let param =
|
let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
if this.eat_keyword_noexpect(kw::SelfUpper) {
|
||||||
if this.eat_keyword_noexpect(kw::SelfUpper) {
|
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
|
||||||
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
|
// as if `Self` never existed.
|
||||||
// as if `Self` never existed.
|
this.dcx()
|
||||||
this.dcx().emit_err(UnexpectedSelfInGenericParameters {
|
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
|
||||||
span: this.prev_token.span,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Eat a trailing comma, if it exists.
|
// Eat a trailing comma, if it exists.
|
||||||
let _ = this.eat(&token::Comma);
|
let _ = this.eat(&token::Comma);
|
||||||
}
|
}
|
||||||
|
|
||||||
let param = if this.check_lifetime() {
|
let param = if this.check_lifetime() {
|
||||||
let lifetime = this.expect_lifetime();
|
let lifetime = this.expect_lifetime();
|
||||||
// Parse lifetime parameter.
|
// Parse lifetime parameter.
|
||||||
let (colon_span, bounds) = if this.eat(&token::Colon) {
|
let (colon_span, bounds) = if this.eat(&token::Colon) {
|
||||||
(Some(this.prev_token.span), this.parse_lt_param_bounds())
|
(Some(this.prev_token.span), this.parse_lt_param_bounds())
|
||||||
} else {
|
|
||||||
(None, Vec::new())
|
|
||||||
};
|
|
||||||
|
|
||||||
if this.check_noexpect(&token::Eq)
|
|
||||||
&& this.look_ahead(1, |t| t.is_lifetime())
|
|
||||||
{
|
|
||||||
let lo = this.token.span;
|
|
||||||
// Parse `= 'lifetime`.
|
|
||||||
this.bump(); // `=`
|
|
||||||
this.bump(); // `'lifetime`
|
|
||||||
let span = lo.to(this.prev_token.span);
|
|
||||||
this.dcx().emit_err(
|
|
||||||
UnexpectedDefaultValueForLifetimeInGenericParameters { span },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(ast::GenericParam {
|
|
||||||
ident: lifetime.ident,
|
|
||||||
id: lifetime.id,
|
|
||||||
attrs,
|
|
||||||
bounds,
|
|
||||||
kind: ast::GenericParamKind::Lifetime,
|
|
||||||
is_placeholder: false,
|
|
||||||
colon_span,
|
|
||||||
})
|
|
||||||
} else if this.check_keyword(kw::Const) {
|
|
||||||
// Parse const parameter.
|
|
||||||
Some(this.parse_const_param(attrs)?)
|
|
||||||
} else if this.check_ident() {
|
|
||||||
// Parse type parameter.
|
|
||||||
Some(this.parse_ty_param(attrs)?)
|
|
||||||
} else if this.token.can_begin_type() {
|
|
||||||
// Trying to write an associated type bound? (#26271)
|
|
||||||
let snapshot = this.create_snapshot_for_diagnostic();
|
|
||||||
match this.parse_ty_where_predicate() {
|
|
||||||
Ok(where_predicate) => {
|
|
||||||
this.dcx().emit_err(errors::BadAssocTypeBounds {
|
|
||||||
span: where_predicate.span(),
|
|
||||||
});
|
|
||||||
// FIXME - try to continue parsing other generics?
|
|
||||||
return Ok((None, Trailing::No));
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
err.cancel();
|
|
||||||
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
|
||||||
this.restore_snapshot(snapshot);
|
|
||||||
return Ok((None, Trailing::No));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Check for trailing attributes and stop parsing.
|
(None, Vec::new())
|
||||||
if !attrs.is_empty() {
|
|
||||||
if !params.is_empty() {
|
|
||||||
this.dcx()
|
|
||||||
.emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
|
||||||
} else {
|
|
||||||
this.dcx()
|
|
||||||
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Ok((None, Trailing::No));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if !this.eat(&token::Comma) {
|
if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) {
|
||||||
done = true;
|
let lo = this.token.span;
|
||||||
|
// Parse `= 'lifetime`.
|
||||||
|
this.bump(); // `=`
|
||||||
|
this.bump(); // `'lifetime`
|
||||||
|
let span = lo.to(this.prev_token.span);
|
||||||
|
this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters {
|
||||||
|
span,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
// We just ate the comma, so no need to capture the trailing token.
|
|
||||||
Ok((param, Trailing::No))
|
Some(ast::GenericParam {
|
||||||
})?;
|
ident: lifetime.ident,
|
||||||
|
id: lifetime.id,
|
||||||
|
attrs,
|
||||||
|
bounds,
|
||||||
|
kind: ast::GenericParamKind::Lifetime,
|
||||||
|
is_placeholder: false,
|
||||||
|
colon_span,
|
||||||
|
})
|
||||||
|
} else if this.check_keyword(kw::Const) {
|
||||||
|
// Parse const parameter.
|
||||||
|
Some(this.parse_const_param(attrs)?)
|
||||||
|
} else if this.check_ident() {
|
||||||
|
// Parse type parameter.
|
||||||
|
Some(this.parse_ty_param(attrs)?)
|
||||||
|
} else if this.token.can_begin_type() {
|
||||||
|
// Trying to write an associated type bound? (#26271)
|
||||||
|
let snapshot = this.create_snapshot_for_diagnostic();
|
||||||
|
match this.parse_ty_where_predicate() {
|
||||||
|
Ok(where_predicate) => {
|
||||||
|
this.dcx().emit_err(errors::BadAssocTypeBounds {
|
||||||
|
span: where_predicate.span(),
|
||||||
|
});
|
||||||
|
// FIXME - try to continue parsing other generics?
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
err.cancel();
|
||||||
|
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
||||||
|
this.restore_snapshot(snapshot);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||||
|
} else {
|
||||||
|
// Check for trailing attributes and stop parsing.
|
||||||
|
if !attrs.is_empty() {
|
||||||
|
if !params.is_empty() {
|
||||||
|
this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
||||||
|
} else {
|
||||||
|
this.dcx()
|
||||||
|
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||||
|
};
|
||||||
|
|
||||||
|
if !this.eat(&token::Comma) {
|
||||||
|
done = true;
|
||||||
|
}
|
||||||
|
// We just ate the comma, so no need to capture the trailing token.
|
||||||
|
Ok((param, Trailing::No, UsePreAttrPos::No))
|
||||||
|
})?;
|
||||||
|
|
||||||
if let Some(param) = param {
|
if let Some(param) = param {
|
||||||
params.push(param);
|
params.push(param);
|
||||||
|
@ -20,7 +20,9 @@
|
|||||||
|
|
||||||
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
|
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing};
|
use super::{
|
||||||
|
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
|
||||||
|
};
|
||||||
use crate::errors::{self, MacroExpandsToAdtField};
|
use crate::errors::{self, MacroExpandsToAdtField};
|
||||||
use crate::{fluent_generated as fluent, maybe_whole};
|
use crate::{fluent_generated as fluent, maybe_whole};
|
||||||
|
|
||||||
@ -127,7 +129,7 @@ pub(super) fn parse_item_common(
|
|||||||
Some(item.into_inner())
|
Some(item.into_inner())
|
||||||
});
|
});
|
||||||
|
|
||||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| {
|
self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
|
||||||
let lo = this.token.span;
|
let lo = this.token.span;
|
||||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||||
let mut def = this.parse_defaultness();
|
let mut def = this.parse_defaultness();
|
||||||
@ -145,7 +147,7 @@ pub(super) fn parse_item_common(
|
|||||||
let span = lo.to(this.prev_token.span);
|
let span = lo.to(this.prev_token.span);
|
||||||
let id = DUMMY_NODE_ID;
|
let id = DUMMY_NODE_ID;
|
||||||
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
|
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
|
||||||
return Ok((Some(item), Trailing::No));
|
return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
|
||||||
}
|
}
|
||||||
|
|
||||||
// At this point, we have failed to parse an item.
|
// At this point, we have failed to parse an item.
|
||||||
@ -160,7 +162,7 @@ pub(super) fn parse_item_common(
|
|||||||
if !attrs_allowed {
|
if !attrs_allowed {
|
||||||
this.recover_attrs_no_item(&attrs)?;
|
this.recover_attrs_no_item(&attrs)?;
|
||||||
}
|
}
|
||||||
Ok((None, Trailing::No))
|
Ok((None, Trailing::No, UsePreAttrPos::No))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1546,86 +1548,82 @@ fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
|
|||||||
self.recover_vcs_conflict_marker();
|
self.recover_vcs_conflict_marker();
|
||||||
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
|
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
|
||||||
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
|
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
|
||||||
self.collect_tokens_trailing_token(
|
self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| {
|
||||||
variant_attrs,
|
let vlo = this.token.span;
|
||||||
ForceCollect::No,
|
|
||||||
|this, variant_attrs| {
|
|
||||||
let vlo = this.token.span;
|
|
||||||
|
|
||||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||||
if !this.recover_nested_adt_item(kw::Enum)? {
|
if !this.recover_nested_adt_item(kw::Enum)? {
|
||||||
return Ok((None, Trailing::No));
|
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||||
}
|
}
|
||||||
let ident = this.parse_field_ident("enum", vlo)?;
|
let ident = this.parse_field_ident("enum", vlo)?;
|
||||||
|
|
||||||
if this.token == token::Not {
|
if this.token == token::Not {
|
||||||
if let Err(err) = this.unexpected() {
|
if let Err(err) = this.unexpected() {
|
||||||
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
||||||
}
|
|
||||||
|
|
||||||
this.bump();
|
|
||||||
this.parse_delim_args()?;
|
|
||||||
|
|
||||||
return Ok((None, Trailing::from(this.token == token::Comma)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
this.bump();
|
||||||
// Parse a struct variant.
|
this.parse_delim_args()?;
|
||||||
let (fields, recovered) =
|
|
||||||
match this.parse_record_struct_body("struct", ident.span, false) {
|
return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
|
||||||
Ok((fields, recovered)) => (fields, recovered),
|
}
|
||||||
Err(mut err) => {
|
|
||||||
if this.token == token::Colon {
|
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||||
// We handle `enum` to `struct` suggestion in the caller.
|
// Parse a struct variant.
|
||||||
return Err(err);
|
let (fields, recovered) =
|
||||||
}
|
match this.parse_record_struct_body("struct", ident.span, false) {
|
||||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
|
Ok((fields, recovered)) => (fields, recovered),
|
||||||
this.bump(); // }
|
|
||||||
err.span_label(span, "while parsing this enum");
|
|
||||||
err.help(help);
|
|
||||||
let guar = err.emit();
|
|
||||||
(thin_vec![], Recovered::Yes(guar))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
VariantData::Struct { fields, recovered: recovered.into() }
|
|
||||||
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
|
||||||
let body = match this.parse_tuple_struct_body() {
|
|
||||||
Ok(body) => body,
|
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
if this.token == token::Colon {
|
if this.token == token::Colon {
|
||||||
// We handle `enum` to `struct` suggestion in the caller.
|
// We handle `enum` to `struct` suggestion in the caller.
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
|
||||||
this.bump(); // )
|
this.bump(); // }
|
||||||
err.span_label(span, "while parsing this enum");
|
err.span_label(span, "while parsing this enum");
|
||||||
err.help(help);
|
err.help(help);
|
||||||
err.emit();
|
let guar = err.emit();
|
||||||
thin_vec![]
|
(thin_vec![], Recovered::Yes(guar))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
VariantData::Tuple(body, DUMMY_NODE_ID)
|
VariantData::Struct { fields, recovered: recovered.into() }
|
||||||
} else {
|
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||||
VariantData::Unit(DUMMY_NODE_ID)
|
let body = match this.parse_tuple_struct_body() {
|
||||||
|
Ok(body) => body,
|
||||||
|
Err(mut err) => {
|
||||||
|
if this.token == token::Colon {
|
||||||
|
// We handle `enum` to `struct` suggestion in the caller.
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
||||||
|
this.bump(); // )
|
||||||
|
err.span_label(span, "while parsing this enum");
|
||||||
|
err.help(help);
|
||||||
|
err.emit();
|
||||||
|
thin_vec![]
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
VariantData::Tuple(body, DUMMY_NODE_ID)
|
||||||
|
} else {
|
||||||
|
VariantData::Unit(DUMMY_NODE_ID)
|
||||||
|
};
|
||||||
|
|
||||||
let disr_expr =
|
let disr_expr =
|
||||||
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
|
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
|
||||||
|
|
||||||
let vr = ast::Variant {
|
let vr = ast::Variant {
|
||||||
ident,
|
ident,
|
||||||
vis,
|
vis,
|
||||||
id: DUMMY_NODE_ID,
|
id: DUMMY_NODE_ID,
|
||||||
attrs: variant_attrs,
|
attrs: variant_attrs,
|
||||||
data: struct_def,
|
data: struct_def,
|
||||||
disr_expr,
|
disr_expr,
|
||||||
span: vlo.to(this.prev_token.span),
|
span: vlo.to(this.prev_token.span),
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((Some(vr), Trailing::from(this.token == token::Comma)))
|
Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No))
|
||||||
},
|
})
|
||||||
)
|
|
||||||
.map_err(|mut err| {
|
.map_err(|mut err| {
|
||||||
err.help(help);
|
err.help(help);
|
||||||
err
|
err
|
||||||
@ -1777,7 +1775,7 @@ pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef
|
|||||||
// Unit like structs are handled in parse_item_struct function
|
// Unit like structs are handled in parse_item_struct function
|
||||||
self.parse_paren_comma_seq(|p| {
|
self.parse_paren_comma_seq(|p| {
|
||||||
let attrs = p.parse_outer_attributes()?;
|
let attrs = p.parse_outer_attributes()?;
|
||||||
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
|
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
|
||||||
let mut snapshot = None;
|
let mut snapshot = None;
|
||||||
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
||||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||||
@ -1816,6 +1814,7 @@ pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef
|
|||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
},
|
},
|
||||||
Trailing::from(p.token == token::Comma),
|
Trailing::from(p.token == token::Comma),
|
||||||
|
UsePreAttrPos::No,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -1827,11 +1826,11 @@ fn parse_field_def(&mut self, adt_ty: &str) -> PResult<'a, FieldDef> {
|
|||||||
self.recover_vcs_conflict_marker();
|
self.recover_vcs_conflict_marker();
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.recover_vcs_conflict_marker();
|
self.recover_vcs_conflict_marker();
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
let lo = this.token.span;
|
let lo = this.token.span;
|
||||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||||
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
|
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
|
||||||
.map(|field| (field, Trailing::No))
|
.map(|field| (field, Trailing::No, UsePreAttrPos::No))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2806,12 +2805,12 @@ pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinV
|
|||||||
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
||||||
if let Some(mut param) = this.parse_self_param()? {
|
if let Some(mut param) = this.parse_self_param()? {
|
||||||
param.attrs = attrs;
|
param.attrs = attrs;
|
||||||
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
|
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
|
||||||
return Ok((res?, Trailing::No));
|
return Ok((res?, Trailing::No, UsePreAttrPos::No));
|
||||||
}
|
}
|
||||||
|
|
||||||
let is_name_required = match this.token.kind {
|
let is_name_required = match this.token.kind {
|
||||||
@ -2827,7 +2826,7 @@ fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResu
|
|||||||
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||||
{
|
{
|
||||||
let guar = err.emit();
|
let guar = err.emit();
|
||||||
Ok((dummy_arg(ident, guar), Trailing::No))
|
Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No))
|
||||||
} else {
|
} else {
|
||||||
Err(err)
|
Err(err)
|
||||||
};
|
};
|
||||||
@ -2871,6 +2870,7 @@ fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResu
|
|||||||
Ok((
|
Ok((
|
||||||
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
|
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
|
||||||
Trailing::No,
|
Trailing::No,
|
||||||
|
UsePreAttrPos::No,
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::{fmt, mem, slice};
|
use std::{fmt, mem, slice};
|
||||||
|
|
||||||
use attr_wrapper::AttrWrapper;
|
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
||||||
pub use diagnostics::AttemptLocalParseRecovery;
|
pub use diagnostics::AttemptLocalParseRecovery;
|
||||||
pub(crate) use expr::ForbiddenLetReason;
|
pub(crate) use expr::ForbiddenLetReason;
|
||||||
pub(crate) use item::FnParseMode;
|
pub(crate) use item::FnParseMode;
|
||||||
@ -254,7 +254,7 @@ enum Capturing {
|
|||||||
Yes,
|
Yes,
|
||||||
}
|
}
|
||||||
|
|
||||||
// This state is used by `Parser::collect_tokens_trailing_token`.
|
// This state is used by `Parser::collect_tokens`.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct CaptureState {
|
struct CaptureState {
|
||||||
capturing: Capturing,
|
capturing: Capturing,
|
||||||
@ -466,8 +466,8 @@ pub fn new(
|
|||||||
parser.bump();
|
parser.bump();
|
||||||
|
|
||||||
// Change this from 1 back to 0 after the bump. This eases debugging of
|
// Change this from 1 back to 0 after the bump. This eases debugging of
|
||||||
// `Parser::collect_tokens_trailing_token` nicer because it makes the
|
// `Parser::collect_tokens` because 0-indexed token positions are nicer
|
||||||
// token positions 0-indexed which is nicer than 1-indexed.
|
// than 1-indexed token positions.
|
||||||
parser.num_bump_calls = 0;
|
parser.num_bump_calls = 0;
|
||||||
|
|
||||||
parser
|
parser
|
||||||
@ -1553,11 +1553,9 @@ fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
|
|||||||
) -> PResult<'a, R> {
|
) -> PResult<'a, R> {
|
||||||
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
||||||
// `ForceCollect::Yes`
|
// `ForceCollect::Yes`
|
||||||
self.collect_tokens_trailing_token(
|
self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
|
||||||
AttrWrapper::empty(),
|
Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
|
||||||
ForceCollect::Yes,
|
})
|
||||||
|this, _attrs| Ok((f(this)?, Trailing::No)),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `::{` or `::*`
|
/// `::{` or `::*`
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
use rustc_span::{BytePos, ErrorGuaranteed, Span};
|
use rustc_span::{BytePos, ErrorGuaranteed, Span};
|
||||||
use thin_vec::{thin_vec, ThinVec};
|
use thin_vec::{thin_vec, ThinVec};
|
||||||
|
|
||||||
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing};
|
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
|
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
|
||||||
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
|
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
|
||||||
@ -403,7 +403,7 @@ fn maybe_recover_trailing_expr(
|
|||||||
|
|
||||||
// Parse an associative expression such as `+ expr`, `% expr`, ...
|
// Parse an associative expression such as `+ expr`, `% expr`, ...
|
||||||
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
|
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
|
||||||
if let Ok(expr) =
|
if let Ok((expr, _)) =
|
||||||
snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
|
snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
|
||||||
{
|
{
|
||||||
// We got a valid expression.
|
// We got a valid expression.
|
||||||
@ -1302,24 +1302,23 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, PatFieldsRest)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let field =
|
let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
let field = match this.parse_pat_field(lo, attrs) {
|
||||||
let field = match this.parse_pat_field(lo, attrs) {
|
Ok(field) => Ok(field),
|
||||||
Ok(field) => Ok(field),
|
Err(err) => {
|
||||||
Err(err) => {
|
if let Some(delayed_err) = delayed_err.take() {
|
||||||
if let Some(delayed_err) = delayed_err.take() {
|
delayed_err.emit();
|
||||||
delayed_err.emit();
|
|
||||||
}
|
|
||||||
return Err(err);
|
|
||||||
}
|
}
|
||||||
}?;
|
return Err(err);
|
||||||
ate_comma = this.eat(&token::Comma);
|
}
|
||||||
|
}?;
|
||||||
|
ate_comma = this.eat(&token::Comma);
|
||||||
|
|
||||||
last_non_comma_dotdot_span = Some(this.prev_token.span);
|
last_non_comma_dotdot_span = Some(this.prev_token.span);
|
||||||
|
|
||||||
// We just ate a comma, so there's no need to capture a trailing token.
|
// We just ate a comma, so there's no need to capture a trailing token.
|
||||||
Ok((field, Trailing::No))
|
Ok((field, Trailing::No, UsePreAttrPos::No))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
fields.push(field)
|
fields.push(field)
|
||||||
}
|
}
|
||||||
|
@ -913,7 +913,7 @@ pub(super) fn parse_generic_arg(
|
|||||||
let snapshot = self.create_snapshot_for_diagnostic();
|
let snapshot = self.create_snapshot_for_diagnostic();
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
|
match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
|
||||||
Ok(expr) => {
|
Ok((expr, _)) => {
|
||||||
return Ok(Some(self.dummy_const_arg_needs_braces(
|
return Ok(Some(self.dummy_const_arg_needs_braces(
|
||||||
self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
|
self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
|
||||||
expr.span,
|
expr.span,
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
use super::path::PathStyle;
|
use super::path::PathStyle;
|
||||||
use super::{
|
use super::{
|
||||||
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
||||||
Trailing,
|
Trailing, UsePreAttrPos,
|
||||||
};
|
};
|
||||||
use crate::errors::MalformedLoopLabel;
|
use crate::errors::MalformedLoopLabel;
|
||||||
use crate::{errors, maybe_whole};
|
use crate::{errors, maybe_whole};
|
||||||
@ -46,6 +46,7 @@ pub fn parse_stmt_without_recovery(
|
|||||||
capture_semi: bool,
|
capture_semi: bool,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
) -> PResult<'a, Option<Stmt>> {
|
) -> PResult<'a, Option<Stmt>> {
|
||||||
|
let pre_attr_pos = self.collect_pos();
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
|
|
||||||
@ -66,11 +67,15 @@ pub fn parse_stmt_without_recovery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(if self.token.is_keyword(kw::Let) {
|
Ok(Some(if self.token.is_keyword(kw::Let) {
|
||||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
||||||
this.expect_keyword(kw::Let)?;
|
this.expect_keyword(kw::Let)?;
|
||||||
let local = this.parse_local(attrs)?;
|
let local = this.parse_local(attrs)?;
|
||||||
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
|
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
|
||||||
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
|
Ok((
|
||||||
|
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
||||||
|
trailing,
|
||||||
|
UsePreAttrPos::No,
|
||||||
|
))
|
||||||
})?
|
})?
|
||||||
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
|
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
|
||||||
self.recover_stmt_local_after_let(
|
self.recover_stmt_local_after_let(
|
||||||
@ -104,10 +109,18 @@ pub fn parse_stmt_without_recovery(
|
|||||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||||
// that starts like a path (1 token), but it fact not a path.
|
// that starts like a path (1 token), but it fact not a path.
|
||||||
// Also, we avoid stealing syntax from `parse_item_`.
|
// Also, we avoid stealing syntax from `parse_item_`.
|
||||||
let stmt = self.collect_tokens_trailing_token(
|
//
|
||||||
|
// `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
|
||||||
|
// expression, not the statement. (But the statement attributes/tokens are obtained
|
||||||
|
// from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
|
||||||
|
// the things within `StmtKind`.)
|
||||||
|
let stmt = self.collect_tokens(
|
||||||
|
Some(pre_attr_pos),
|
||||||
AttrWrapper::empty(),
|
AttrWrapper::empty(),
|
||||||
force_collect,
|
force_collect,
|
||||||
|this, _empty_attrs| Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No)),
|
|this, _empty_attrs| {
|
||||||
|
Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
|
||||||
|
},
|
||||||
);
|
);
|
||||||
match stmt {
|
match stmt {
|
||||||
Ok(stmt) => stmt,
|
Ok(stmt) => stmt,
|
||||||
@ -129,12 +142,15 @@ pub fn parse_stmt_without_recovery(
|
|||||||
self.error_outer_attrs(attrs);
|
self.error_outer_attrs(attrs);
|
||||||
self.mk_stmt(lo, StmtKind::Empty)
|
self.mk_stmt(lo, StmtKind::Empty)
|
||||||
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
||||||
// Remainder are line-expr stmts.
|
// Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
|
||||||
let e = self.collect_tokens_trailing_token(
|
// above.
|
||||||
|
let e = self.collect_tokens(
|
||||||
|
Some(pre_attr_pos),
|
||||||
AttrWrapper::empty(),
|
AttrWrapper::empty(),
|
||||||
force_collect,
|
force_collect,
|
||||||
|this, _empty_attrs| {
|
|this, _empty_attrs| {
|
||||||
Ok((this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?, Trailing::No))
|
let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
|
||||||
|
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
|
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
|
||||||
@ -151,12 +167,16 @@ pub fn parse_stmt_without_recovery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
|
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
|
||||||
let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||||
let path = this.parse_path(PathStyle::Expr)?;
|
let path = this.parse_path(PathStyle::Expr)?;
|
||||||
|
|
||||||
if this.eat(&token::Not) {
|
if this.eat(&token::Not) {
|
||||||
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
||||||
return Ok((stmt_mac, Trailing::from(this.token == token::Semi)));
|
return Ok((
|
||||||
|
stmt_mac,
|
||||||
|
Trailing::from(this.token == token::Semi),
|
||||||
|
UsePreAttrPos::No,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
|
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||||
@ -170,13 +190,17 @@ fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a,
|
|||||||
this.parse_expr_dot_or_call_with(attrs, expr, lo)
|
this.parse_expr_dot_or_call_with(attrs, expr, lo)
|
||||||
})?;
|
})?;
|
||||||
// `DUMMY_SP` will get overwritten later in this function
|
// `DUMMY_SP` will get overwritten later in this function
|
||||||
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), Trailing::No))
|
Ok((
|
||||||
|
this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
|
||||||
|
Trailing::No,
|
||||||
|
UsePreAttrPos::No,
|
||||||
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if let StmtKind::Expr(expr) = stmt.kind {
|
if let StmtKind::Expr(expr) = stmt.kind {
|
||||||
// Perform this outside of the `collect_tokens_trailing_token` closure,
|
// Perform this outside of the `collect_tokens` closure, since our
|
||||||
// since our outer attributes do not apply to this part of the expression
|
// outer attributes do not apply to this part of the expression.
|
||||||
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
|
let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
|
||||||
this.parse_expr_assoc_rest_with(0, true, expr)
|
this.parse_expr_assoc_rest_with(0, true, expr)
|
||||||
})?;
|
})?;
|
||||||
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
|
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
|
||||||
@ -210,7 +234,7 @@ fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResu
|
|||||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
|
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
|
||||||
let e = self.maybe_recover_from_bad_qpath(e)?;
|
let e = self.maybe_recover_from_bad_qpath(e)?;
|
||||||
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
|
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
|
||||||
let e = self.parse_expr_assoc_rest_with(0, false, e)?;
|
let (e, _) = self.parse_expr_assoc_rest_with(0, false, e)?;
|
||||||
StmtKind::Expr(e)
|
StmtKind::Expr(e)
|
||||||
};
|
};
|
||||||
Ok(self.mk_stmt(lo.to(hi), kind))
|
Ok(self.mk_stmt(lo.to(hi), kind))
|
||||||
@ -240,10 +264,14 @@ fn recover_stmt_local_after_let(
|
|||||||
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
|
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
) -> PResult<'a, Stmt> {
|
) -> PResult<'a, Stmt> {
|
||||||
let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
||||||
let local = this.parse_local(attrs)?;
|
let local = this.parse_local(attrs)?;
|
||||||
// FIXME - maybe capture semicolon in recovery?
|
// FIXME - maybe capture semicolon in recovery?
|
||||||
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), Trailing::No))
|
Ok((
|
||||||
|
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
||||||
|
Trailing::No,
|
||||||
|
UsePreAttrPos::No,
|
||||||
|
))
|
||||||
})?;
|
})?;
|
||||||
self.dcx()
|
self.dcx()
|
||||||
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
||||||
|
42
tests/ui/attributes/assoc-expr.rs
Normal file
42
tests/ui/attributes/assoc-expr.rs
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
//@ check-pass
|
||||||
|
// This test triggered an assertion failure in token collection due to
|
||||||
|
// mishandling of attributes on associative expressions.
|
||||||
|
|
||||||
|
#![feature(cfg_eval)]
|
||||||
|
#![feature(rustc_attrs)]
|
||||||
|
#![feature(stmt_expr_attributes)]
|
||||||
|
#![allow(internal_features)]
|
||||||
|
|
||||||
|
fn main() {}
|
||||||
|
|
||||||
|
#[cfg_eval]
|
||||||
|
struct Foo1(
|
||||||
|
[ bool; {
|
||||||
|
let _x = 30;
|
||||||
|
#[cfg_attr(unix, rustc_dummy(aa))] 1
|
||||||
|
} ]
|
||||||
|
);
|
||||||
|
|
||||||
|
#[cfg_eval]
|
||||||
|
struct Foo12(
|
||||||
|
[ bool; {
|
||||||
|
let _x = 30;
|
||||||
|
#[cfg_attr(unix, rustc_dummy(bb))] 1 + 2
|
||||||
|
} ]
|
||||||
|
);
|
||||||
|
|
||||||
|
#[cfg_eval]
|
||||||
|
struct Foox(
|
||||||
|
[ bool; {
|
||||||
|
let _x = 30;
|
||||||
|
#[cfg_attr(unix, rustc_dummy(cc))] _x
|
||||||
|
} ]
|
||||||
|
);
|
||||||
|
|
||||||
|
#[cfg_eval]
|
||||||
|
struct Foox2(
|
||||||
|
[ bool; {
|
||||||
|
let _x = 30;
|
||||||
|
#[cfg_attr(unix, rustc_dummy(dd))] _x + 2
|
||||||
|
} ]
|
||||||
|
);
|
@ -51,14 +51,6 @@ macro_rules! c1 {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: temporary
|
|
||||||
macro_rules! c2 {
|
|
||||||
($frag:ident, [$($tt:tt)*], $s1:literal, $s2:literal) => {
|
|
||||||
assert_eq!($frag!($($tt)*), $s1);
|
|
||||||
assert_eq!(stringify!($($tt)*), $s2);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_block() {
|
fn test_block() {
|
||||||
c1!(block, [ {} ], "{}");
|
c1!(block, [ {} ], "{}");
|
||||||
@ -344,10 +336,10 @@ fn test_expr() {
|
|||||||
// Ones involving attributes.
|
// Ones involving attributes.
|
||||||
c1!(expr, [ #[aa] 1 ], "#[aa] 1");
|
c1!(expr, [ #[aa] 1 ], "#[aa] 1");
|
||||||
c1!(expr, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
|
c1!(expr, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
|
||||||
c2!(expr, [ #[aa] 1 + 2 ], "1 + 2", "#[aa] 1 + 2"); // FIXME
|
c1!(expr, [ #[aa] 1 + 2 ], "#[aa] 1 + 2");
|
||||||
c2!(expr, [ #[aa] x + 2 ], "x + 2", "#[aa] x + 2"); // FIXME
|
c1!(expr, [ #[aa] x + 2 ], "#[aa] x + 2");
|
||||||
c2!(expr, [ #[aa] 1 / #[bb] 2 ], "1 / #[bb] 2", "#[aa] 1 / #[bb] 2"); // FIXME
|
c1!(expr, [ #[aa] 1 / #[bb] 2 ], "#[aa] 1 / #[bb] 2");
|
||||||
c2!(expr, [ #[aa] x / #[bb] 2 ], "x / #[bb] 2", "#[aa] x / #[bb] 2"); // FIXME
|
c1!(expr, [ #[aa] x / #[bb] 2 ], "#[aa] x / #[bb] 2");
|
||||||
c1!(expr, [ 1 << #[bb] 2 ], "1 << #[bb] 2");
|
c1!(expr, [ 1 << #[bb] 2 ], "1 << #[bb] 2");
|
||||||
c1!(expr, [ x << #[bb] 2 ], "x << #[bb] 2");
|
c1!(expr, [ x << #[bb] 2 ], "x << #[bb] 2");
|
||||||
c1!(expr, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
|
c1!(expr, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
|
||||||
@ -659,10 +651,10 @@ fn test_stmt() {
|
|||||||
// Ones involving attributes.
|
// Ones involving attributes.
|
||||||
c1!(stmt, [ #[aa] 1 ], "#[aa] 1");
|
c1!(stmt, [ #[aa] 1 ], "#[aa] 1");
|
||||||
c1!(stmt, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
|
c1!(stmt, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
|
||||||
c2!(stmt, [ #[aa] 1 as u32 ], "1 as u32", "#[aa] 1 as u32"); // FIXME
|
c1!(stmt, [ #[aa] 1 as u32 ], "#[aa] 1 as u32");
|
||||||
c2!(stmt, [ #[aa] x as u32 ], "x as u32", "#[aa] x as u32"); // FIXME
|
c1!(stmt, [ #[aa] x as u32 ], "#[aa] x as u32");
|
||||||
c2!(stmt, [ #[aa] 1 .. #[bb] 2 ], "1 .. #[bb] 2", "#[aa] 1 .. #[bb] 2"); // FIXME
|
c1!(stmt, [ #[aa] 1 .. #[bb] 2 ], "#[aa] 1 .. #[bb] 2");
|
||||||
c2!(stmt, [ #[aa] x .. #[bb] 2 ], "x .. #[bb] 2", "#[aa] x .. #[bb] 2"); // FIXME
|
c1!(stmt, [ #[aa] x .. #[bb] 2 ], "#[aa] x .. #[bb] 2");
|
||||||
c1!(stmt, [ 1 || #[bb] 2 ], "1 || #[bb] 2");
|
c1!(stmt, [ 1 || #[bb] 2 ], "1 || #[bb] 2");
|
||||||
c1!(stmt, [ x || #[bb] 2 ], "x || #[bb] 2");
|
c1!(stmt, [ x || #[bb] 2 ], "x || #[bb] 2");
|
||||||
c1!(stmt, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
|
c1!(stmt, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
|
||||||
|
Loading…
Reference in New Issue
Block a user