Auto merge of #78296 - Aaron1011:fix/stmt-tokens, r=petrochenkov
Properly handle attributes on statements We now collect tokens for the underlying node wrapped by `StmtKind` nstead of storing tokens directly in `Stmt`. `LazyTokenStream` now supports capturing a trailing semicolon after it is initially constructed. This allows us to avoid refactoring statement parsing to wrap the parsing of the semicolon in `parse_tokens`. Attributes on item statements (e.g. `fn foo() { #[bar] struct MyStruct; }`) are now treated as item attributes, not statement attributes, which is consistent with how we handle attributes on other kinds of statements. The feature-gating code is adjusted so that proc-macro attributes are still allowed on item statements on stable. Two built-in macros (`#[global_allocator]` and `#[test]`) needed to be adjusted to support being passed `Annotatable::Stmt`.
This commit is contained in:
commit
4ae328bef4
@ -901,10 +901,39 @@ pub struct Stmt {
|
||||
pub id: NodeId,
|
||||
pub kind: StmtKind,
|
||||
pub span: Span,
|
||||
pub tokens: Option<LazyTokenStream>,
|
||||
}
|
||||
|
||||
impl Stmt {
|
||||
pub fn tokens(&self) -> Option<&LazyTokenStream> {
|
||||
match self.kind {
|
||||
StmtKind::Local(ref local) => local.tokens.as_ref(),
|
||||
StmtKind::Item(ref item) => item.tokens.as_ref(),
|
||||
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(),
|
||||
StmtKind::Empty => None,
|
||||
StmtKind::MacCall(ref mac) => mac.tokens.as_ref(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> {
|
||||
match self.kind {
|
||||
StmtKind::Local(ref mut local) => local.tokens.as_mut(),
|
||||
StmtKind::Item(ref mut item) => item.tokens.as_mut(),
|
||||
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(),
|
||||
StmtKind::Empty => None,
|
||||
StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_tokens(&mut self, tokens: Option<LazyTokenStream>) {
|
||||
match self.kind {
|
||||
StmtKind::Local(ref mut local) => local.tokens = tokens,
|
||||
StmtKind::Item(ref mut item) => item.tokens = tokens,
|
||||
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens,
|
||||
StmtKind::Empty => {}
|
||||
StmtKind::MacCall(ref mut mac) => mac.tokens = tokens,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_trailing_semicolon(&self) -> bool {
|
||||
match &self.kind {
|
||||
StmtKind::Semi(_) => true,
|
||||
@ -912,18 +941,25 @@ pub fn has_trailing_semicolon(&self) -> bool {
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a parsed `Stmt` to a `Stmt` with
|
||||
/// a trailing semicolon.
|
||||
///
|
||||
/// This only modifies the parsed AST struct, not the attached
|
||||
/// `LazyTokenStream`. The parser is responsible for calling
|
||||
/// `CreateTokenStream::add_trailing_semi` when there is actually
|
||||
/// a semicolon in the tokenstream.
|
||||
pub fn add_trailing_semicolon(mut self) -> Self {
|
||||
self.kind = match self.kind {
|
||||
StmtKind::Expr(expr) => StmtKind::Semi(expr),
|
||||
StmtKind::MacCall(mac) => {
|
||||
StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs }| MacCallStmt {
|
||||
mac,
|
||||
style: MacStmtStyle::Semicolon,
|
||||
attrs,
|
||||
StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs, tokens }| {
|
||||
MacCallStmt { mac, style: MacStmtStyle::Semicolon, attrs, tokens }
|
||||
}))
|
||||
}
|
||||
kind => kind,
|
||||
};
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
@ -963,6 +999,7 @@ pub struct MacCallStmt {
|
||||
pub mac: MacCall,
|
||||
pub style: MacStmtStyle,
|
||||
pub attrs: AttrVec,
|
||||
pub tokens: Option<LazyTokenStream>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
|
||||
@ -988,6 +1025,7 @@ pub struct Local {
|
||||
pub init: Option<P<Expr>>,
|
||||
pub span: Span,
|
||||
pub attrs: AttrVec,
|
||||
pub tokens: Option<LazyTokenStream>,
|
||||
}
|
||||
|
||||
/// An arm of a 'match'.
|
||||
|
@ -579,13 +579,14 @@ pub fn noop_visit_parenthesized_parameter_data<T: MutVisitor>(
|
||||
}
|
||||
|
||||
pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
|
||||
let Local { id, pat, ty, init, span, attrs } = local.deref_mut();
|
||||
let Local { id, pat, ty, init, span, attrs, tokens } = local.deref_mut();
|
||||
vis.visit_id(id);
|
||||
vis.visit_pat(pat);
|
||||
visit_opt(ty, |ty| vis.visit_ty(ty));
|
||||
visit_opt(init, |init| vis.visit_expr(init));
|
||||
vis.visit_span(span);
|
||||
visit_thin_attrs(attrs, vis);
|
||||
visit_lazy_tts(tokens, vis);
|
||||
}
|
||||
|
||||
pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
|
||||
@ -1328,16 +1329,12 @@ pub fn noop_filter_map_expr<T: MutVisitor>(mut e: P<Expr>, vis: &mut T) -> Optio
|
||||
}
|
||||
|
||||
pub fn noop_flat_map_stmt<T: MutVisitor>(
|
||||
Stmt { kind, mut span, mut id, mut tokens }: Stmt,
|
||||
Stmt { kind, mut span, mut id }: Stmt,
|
||||
vis: &mut T,
|
||||
) -> SmallVec<[Stmt; 1]> {
|
||||
vis.visit_id(&mut id);
|
||||
vis.visit_span(&mut span);
|
||||
visit_lazy_tts(&mut tokens, vis);
|
||||
noop_flat_map_stmt_kind(kind, vis)
|
||||
.into_iter()
|
||||
.map(|kind| Stmt { id, kind, span, tokens: tokens.clone() })
|
||||
.collect()
|
||||
noop_flat_map_stmt_kind(kind, vis).into_iter().map(|kind| Stmt { id, kind, span }).collect()
|
||||
}
|
||||
|
||||
pub fn noop_flat_map_stmt_kind<T: MutVisitor>(
|
||||
@ -1354,9 +1351,10 @@ pub fn noop_flat_map_stmt_kind<T: MutVisitor>(
|
||||
StmtKind::Semi(expr) => vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect(),
|
||||
StmtKind::Empty => smallvec![StmtKind::Empty],
|
||||
StmtKind::MacCall(mut mac) => {
|
||||
let MacCallStmt { mac: mac_, style: _, attrs } = mac.deref_mut();
|
||||
let MacCallStmt { mac: mac_, style: _, attrs, tokens } = mac.deref_mut();
|
||||
vis.visit_mac_call(mac_);
|
||||
visit_thin_attrs(attrs, vis);
|
||||
visit_lazy_tts(tokens, vis);
|
||||
smallvec![StmtKind::MacCall(mac)]
|
||||
}
|
||||
}
|
||||
|
@ -121,10 +121,14 @@ fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||
}
|
||||
|
||||
pub trait CreateTokenStream: sync::Send + sync::Sync {
|
||||
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream>;
|
||||
fn create_token_stream(&self) -> TokenStream;
|
||||
}
|
||||
|
||||
impl CreateTokenStream for TokenStream {
|
||||
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
|
||||
panic!("Cannot call `add_trailing_semi` on a `TokenStream`!");
|
||||
}
|
||||
fn create_token_stream(&self) -> TokenStream {
|
||||
self.clone()
|
||||
}
|
||||
@ -141,6 +145,13 @@ pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream {
|
||||
LazyTokenStream(Lrc::new(Box::new(inner)))
|
||||
}
|
||||
|
||||
/// Extends the captured stream by one token,
|
||||
/// which must be a trailing semicolon. This
|
||||
/// affects the `TokenStream` created by `make_tokenstream`.
|
||||
pub fn add_trailing_semi(&self) -> LazyTokenStream {
|
||||
LazyTokenStream(Lrc::new(self.0.add_trailing_semi()))
|
||||
}
|
||||
|
||||
pub fn create_token_stream(&self) -> TokenStream {
|
||||
self.0.create_token_stream()
|
||||
}
|
||||
|
@ -689,7 +689,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) {
|
||||
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => visitor.visit_expr(expr),
|
||||
StmtKind::Empty => {}
|
||||
StmtKind::MacCall(ref mac) => {
|
||||
let MacCallStmt { ref mac, style: _, ref attrs } = **mac;
|
||||
let MacCallStmt { ref mac, style: _, ref attrs, tokens: _ } = **mac;
|
||||
visitor.visit_mac_call(mac);
|
||||
for attr in attrs.iter() {
|
||||
visitor.visit_attribute(attr);
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
#![feature(bindings_after_at)]
|
||||
#![feature(iter_is_partitioned)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
pub mod ast_validation;
|
||||
pub mod feature_gate;
|
||||
|
@ -132,6 +132,7 @@ fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> as
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: sp,
|
||||
attrs: ast::AttrVec::new(),
|
||||
tokens: None,
|
||||
});
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp, tokens: None }
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
|
||||
}
|
||||
|
@ -64,7 +64,6 @@ fn expand(
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::StmtKind::Item(a.expect_item()),
|
||||
span,
|
||||
tokens: None,
|
||||
})));
|
||||
});
|
||||
} else {
|
||||
|
@ -451,7 +451,7 @@ enum AssertIntrinsic {
|
||||
Inhabited,
|
||||
ZeroValid,
|
||||
UninitValid,
|
||||
};
|
||||
}
|
||||
let panic_intrinsic = intrinsic.and_then(|i| match i {
|
||||
sym::assert_inhabited => Some(AssertIntrinsic::Inhabited),
|
||||
sym::assert_zero_valid => Some(AssertIntrinsic::ZeroValid),
|
||||
|
@ -374,7 +374,6 @@ macro_rules! make_stmts_default {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: e.span,
|
||||
kind: ast::StmtKind::Expr(e),
|
||||
tokens: None
|
||||
}]
|
||||
})
|
||||
};
|
||||
@ -617,7 +616,6 @@ fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)),
|
||||
span: self.span,
|
||||
tokens: None
|
||||
}])
|
||||
}
|
||||
|
||||
|
@ -140,12 +140,7 @@ pub fn lifetime(&self, span: Span, ident: Ident) -> ast::Lifetime {
|
||||
}
|
||||
|
||||
pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
|
||||
ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: expr.span,
|
||||
kind: ast::StmtKind::Expr(expr),
|
||||
tokens: None,
|
||||
}
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
|
||||
}
|
||||
|
||||
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
|
||||
@ -162,13 +157,9 @@ pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) ->
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: sp,
|
||||
attrs: AttrVec::new(),
|
||||
});
|
||||
ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::StmtKind::Local(local),
|
||||
span: sp,
|
||||
tokens: None,
|
||||
}
|
||||
});
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
|
||||
}
|
||||
|
||||
// Generates `let _: Type;`, which is usually used for type assertions.
|
||||
@ -180,17 +171,13 @@ pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span,
|
||||
attrs: AttrVec::new(),
|
||||
tokens: None,
|
||||
});
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span, tokens: None }
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span }
|
||||
}
|
||||
|
||||
pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
|
||||
ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::StmtKind::Item(item),
|
||||
span: sp,
|
||||
tokens: None,
|
||||
}
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(item), span: sp }
|
||||
}
|
||||
|
||||
pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
|
||||
@ -200,7 +187,6 @@ pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: expr.span,
|
||||
kind: ast::StmtKind::Expr(expr),
|
||||
tokens: None,
|
||||
}],
|
||||
)
|
||||
}
|
||||
|
@ -1274,12 +1274,6 @@ fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
|
||||
// we'll expand attributes on expressions separately
|
||||
if !stmt.is_expr() {
|
||||
let attr = if stmt.is_item() {
|
||||
// FIXME: Implement proper token collection for statements
|
||||
if let StmtKind::Item(item) = &mut stmt.kind {
|
||||
stmt.tokens = item.tokens.take()
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
self.take_first_attr(&mut stmt)
|
||||
} else {
|
||||
// Ignore derives on non-item statements for backwards compatibility.
|
||||
@ -1295,7 +1289,7 @@ fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
|
||||
}
|
||||
|
||||
if let StmtKind::MacCall(mac) = stmt.kind {
|
||||
let MacCallStmt { mac, style, attrs } = mac.into_inner();
|
||||
let MacCallStmt { mac, style, attrs, tokens: _ } = mac.into_inner();
|
||||
self.check_attributes(&attrs);
|
||||
let mut placeholder =
|
||||
self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts).make_stmts();
|
||||
@ -1312,10 +1306,10 @@ fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
|
||||
}
|
||||
|
||||
// The placeholder expander gives ids to statements, so we avoid folding the id here.
|
||||
let ast::Stmt { id, kind, span, tokens } = stmt;
|
||||
let ast::Stmt { id, kind, span } = stmt;
|
||||
noop_flat_map_stmt_kind(kind, self)
|
||||
.into_iter()
|
||||
.map(|kind| ast::Stmt { id, kind, span, tokens: tokens.clone() })
|
||||
.map(|kind| ast::Stmt { id, kind, span })
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -104,8 +104,9 @@ fn mac_placeholder() -> ast::MacCall {
|
||||
mac: mac_placeholder(),
|
||||
style: ast::MacStmtStyle::Braces,
|
||||
attrs: ast::AttrVec::new(),
|
||||
tokens: None,
|
||||
});
|
||||
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac), tokens: None }
|
||||
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac) }
|
||||
}]),
|
||||
AstFragmentKind::Arms => AstFragment::Arms(smallvec![ast::Arm {
|
||||
attrs: Default::default(),
|
||||
@ -331,12 +332,8 @@ fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
|
||||
// FIXME: We will need to preserve the original semicolon token and
|
||||
// span as part of #15701
|
||||
let empty_stmt = ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::StmtKind::Empty,
|
||||
span: DUMMY_SP,
|
||||
tokens: None,
|
||||
};
|
||||
let empty_stmt =
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Empty, span: DUMMY_SP };
|
||||
|
||||
if let Some(stmt) = stmts.pop() {
|
||||
if stmt.has_trailing_semicolon() {
|
||||
|
@ -810,7 +810,6 @@ fn block_to_stmt(b: ast::Block, resolver: &mut Resolver<'_>) -> ast::Stmt {
|
||||
id: resolver.next_node_id(),
|
||||
kind: ast::StmtKind::Expr(expr),
|
||||
span: rustc_span::DUMMY_SP,
|
||||
tokens: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -827,7 +826,6 @@ fn block_to_stmt(b: ast::Block, resolver: &mut Resolver<'_>) -> ast::Stmt {
|
||||
id: self.resolver.next_node_id(),
|
||||
span: rustc_span::DUMMY_SP,
|
||||
kind: ast::StmtKind::Expr(loop_expr),
|
||||
tokens: None,
|
||||
};
|
||||
|
||||
if self.within_static_or_const {
|
||||
|
@ -2345,7 +2345,7 @@ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &hir::Expr<'_>) {
|
||||
enum InitKind {
|
||||
Zeroed,
|
||||
Uninit,
|
||||
};
|
||||
}
|
||||
|
||||
/// Information about why a type cannot be initialized this way.
|
||||
/// Contains an error message and optionally a span to point at.
|
||||
|
@ -28,25 +28,40 @@
|
||||
|
||||
impl EarlyLintPass for RedundantSemicolons {
|
||||
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) {
|
||||
let mut after_item_stmt = false;
|
||||
let mut seq = None;
|
||||
for stmt in block.stmts.iter() {
|
||||
match (&stmt.kind, &mut seq) {
|
||||
(StmtKind::Empty, None) => seq = Some((stmt.span, false)),
|
||||
(StmtKind::Empty, Some(seq)) => *seq = (seq.0.to(stmt.span), true),
|
||||
(_, seq) => maybe_lint_redundant_semis(cx, seq),
|
||||
(_, seq) => {
|
||||
maybe_lint_redundant_semis(cx, seq, after_item_stmt);
|
||||
after_item_stmt = matches!(stmt.kind, StmtKind::Item(_));
|
||||
}
|
||||
}
|
||||
}
|
||||
maybe_lint_redundant_semis(cx, &mut seq);
|
||||
maybe_lint_redundant_semis(cx, &mut seq, after_item_stmt);
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_lint_redundant_semis(cx: &EarlyContext<'_>, seq: &mut Option<(Span, bool)>) {
|
||||
fn maybe_lint_redundant_semis(
|
||||
cx: &EarlyContext<'_>,
|
||||
seq: &mut Option<(Span, bool)>,
|
||||
after_item_stmt: bool,
|
||||
) {
|
||||
if let Some((span, multiple)) = seq.take() {
|
||||
// FIXME: Find a better way of ignoring the trailing
|
||||
// semicolon from macro expansion
|
||||
if span == rustc_span::DUMMY_SP {
|
||||
return;
|
||||
}
|
||||
|
||||
// FIXME: Lint on semicolons after item statements
|
||||
// once doing so doesn't break bootstrapping
|
||||
if after_item_stmt {
|
||||
return;
|
||||
}
|
||||
|
||||
cx.struct_span_lint(REDUNDANT_SEMICOLONS, span, |lint| {
|
||||
let (msg, rem) = if multiple {
|
||||
("unnecessary trailing semicolons", "remove these semicolons")
|
||||
|
@ -1131,7 +1131,7 @@ fn emit_ffi_unsafe_type_lint(
|
||||
fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
|
||||
struct ProhibitOpaqueTypes<'a, 'tcx> {
|
||||
cx: &'a LateContext<'tcx>,
|
||||
};
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> ty::fold::TypeVisitor<'tcx> for ProhibitOpaqueTypes<'a, 'tcx> {
|
||||
type BreakTy = Ty<'tcx>;
|
||||
|
@ -254,7 +254,7 @@ fn super_body(
|
||||
macro_rules! basic_blocks {
|
||||
(mut) => (body.basic_blocks_mut().iter_enumerated_mut());
|
||||
() => (body.basic_blocks().iter_enumerated());
|
||||
};
|
||||
}
|
||||
for (bb, data) in basic_blocks!($($mutability)?) {
|
||||
self.visit_basic_block_data(bb, data);
|
||||
}
|
||||
@ -275,7 +275,7 @@ macro_rules! basic_blocks {
|
||||
macro_rules! type_annotations {
|
||||
(mut) => (body.user_type_annotations.iter_enumerated_mut());
|
||||
() => (body.user_type_annotations.iter_enumerated());
|
||||
};
|
||||
}
|
||||
|
||||
for (index, annotation) in type_annotations!($($mutability)?) {
|
||||
self.visit_user_type_annotation(
|
||||
@ -909,7 +909,7 @@ fn visit_location(
|
||||
macro_rules! basic_blocks {
|
||||
(mut) => (body.basic_blocks_mut());
|
||||
() => (body.basic_blocks());
|
||||
};
|
||||
}
|
||||
let basic_block = & $($mutability)? basic_blocks!($($mutability)?)[location.block];
|
||||
if basic_block.statements.len() == location.statement_index {
|
||||
if let Some(ref $($mutability)? terminator) = basic_block.terminator {
|
||||
|
@ -540,7 +540,7 @@ fn polymorphize<'tcx>(
|
||||
|
||||
struct PolymorphizationFolder<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
};
|
||||
}
|
||||
|
||||
impl ty::TypeFolder<'tcx> for PolymorphizationFolder<'tcx> {
|
||||
fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
struct UsedParamsNeedSubstVisitor<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
};
|
||||
}
|
||||
|
||||
impl<'tcx> TypeVisitor<'tcx> for UsedParamsNeedSubstVisitor<'tcx> {
|
||||
type BreakTy = ();
|
||||
|
@ -6,6 +6,7 @@
|
||||
#![feature(or_patterns)]
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr::HasAttrs;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -251,29 +252,23 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||
// before we fall back to the stringification.
|
||||
|
||||
let convert_tokens =
|
||||
|tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
|
||||
|tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
|
||||
|
||||
let tokens = match *nt {
|
||||
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
|
||||
Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens),
|
||||
Nonterminal::NtStmt(ref stmt) => {
|
||||
// FIXME: We currently only collect tokens for `:stmt`
|
||||
// matchers in `macro_rules!` macros. When we start collecting
|
||||
// tokens for attributes on statements, we will need to prepend
|
||||
// attributes here
|
||||
convert_tokens(&stmt.tokens)
|
||||
}
|
||||
Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens),
|
||||
Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens),
|
||||
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
|
||||
Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()),
|
||||
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
|
||||
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
|
||||
Nonterminal::NtIdent(ident, is_raw) => {
|
||||
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
|
||||
}
|
||||
Nonterminal::NtLifetime(ident) => {
|
||||
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
|
||||
}
|
||||
Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens),
|
||||
Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens),
|
||||
Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens),
|
||||
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()),
|
||||
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()),
|
||||
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
|
||||
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
|
||||
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
|
||||
if expr.tokens.is_none() {
|
||||
|
@ -1213,14 +1213,20 @@ pub fn collect_tokens<R>(
|
||||
//
|
||||
// This also makes `Parser` very cheap to clone, since
|
||||
// there is no intermediate collection buffer to clone.
|
||||
#[derive(Clone)]
|
||||
struct LazyTokenStreamImpl {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
num_calls: usize,
|
||||
desugar_doc_comments: bool,
|
||||
trailing_semi: bool,
|
||||
}
|
||||
impl CreateTokenStream for LazyTokenStreamImpl {
|
||||
fn create_token_stream(&self) -> TokenStream {
|
||||
let mut num_calls = self.num_calls;
|
||||
if self.trailing_semi {
|
||||
num_calls += 1;
|
||||
}
|
||||
// The token produced by the final call to `next` or `next_desugared`
|
||||
// was not actually consumed by the callback. The combination
|
||||
// of chaining the initial token and using `take` produces the desired
|
||||
@ -1228,17 +1234,25 @@ fn create_token_stream(&self) -> TokenStream {
|
||||
// and omit the final token otherwise.
|
||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||
let tokens = std::iter::once(self.start_token.clone())
|
||||
.chain((0..self.num_calls).map(|_| {
|
||||
.chain((0..num_calls).map(|_| {
|
||||
if self.desugar_doc_comments {
|
||||
cursor_snapshot.next_desugared()
|
||||
} else {
|
||||
cursor_snapshot.next()
|
||||
}
|
||||
}))
|
||||
.take(self.num_calls);
|
||||
.take(num_calls);
|
||||
|
||||
make_token_stream(tokens)
|
||||
}
|
||||
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
|
||||
if self.trailing_semi {
|
||||
panic!("Called `add_trailing_semi` twice!");
|
||||
}
|
||||
let mut new = self.clone();
|
||||
new.trailing_semi = true;
|
||||
Box::new(new)
|
||||
}
|
||||
}
|
||||
|
||||
let lazy_impl = LazyTokenStreamImpl {
|
||||
@ -1246,6 +1260,7 @@ fn create_token_stream(&self) -> TokenStream {
|
||||
num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls,
|
||||
cursor_snapshot,
|
||||
desugar_doc_comments: self.desugar_doc_comments,
|
||||
trailing_semi: false,
|
||||
};
|
||||
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
|
||||
}
|
||||
|
@ -117,8 +117,8 @@ pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, Nonter
|
||||
let (stmt, tokens) = self.collect_tokens(|this| this.parse_stmt())?;
|
||||
match stmt {
|
||||
Some(mut s) => {
|
||||
if s.tokens.is_none() {
|
||||
s.tokens = tokens;
|
||||
if s.tokens().is_none() {
|
||||
s.set_tokens(tokens);
|
||||
}
|
||||
token::NtStmt(s)
|
||||
}
|
||||
|
@ -7,8 +7,10 @@
|
||||
use crate::maybe_whole;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr::HasAttrs;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, TokenKind};
|
||||
use rustc_ast::tokenstream::LazyTokenStream;
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::{AttrStyle, AttrVec, Attribute, MacCall, MacCallStmt, MacStmtStyle};
|
||||
use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, Local, Stmt, StmtKind, DUMMY_NODE_ID};
|
||||
@ -31,45 +33,75 @@ pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
|
||||
}
|
||||
|
||||
fn parse_stmt_without_recovery(&mut self) -> PResult<'a, Option<Stmt>> {
|
||||
maybe_whole!(self, NtStmt, |x| Some(x));
|
||||
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let mut attrs = self.parse_outer_attributes()?;
|
||||
let has_attrs = !attrs.is_empty();
|
||||
let lo = self.token.span;
|
||||
|
||||
let stmt = if self.eat_keyword(kw::Let) {
|
||||
self.parse_local_mk(lo, attrs.into())?
|
||||
} else if self.is_kw_followed_by_ident(kw::Mut) {
|
||||
self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
|
||||
} else if self.is_kw_followed_by_ident(kw::Auto) {
|
||||
self.bump(); // `auto`
|
||||
let msg = "write `let` instead of `auto` to introduce a new variable";
|
||||
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
||||
} else if self.is_kw_followed_by_ident(sym::var) {
|
||||
self.bump(); // `var`
|
||||
let msg = "write `let` instead of `var` to introduce a new variable";
|
||||
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
||||
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
|
||||
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
// that starts like a path (1 token), but it fact not a path.
|
||||
// Also, we avoid stealing syntax from `parse_item_`.
|
||||
self.parse_stmt_path_start(lo, attrs)?
|
||||
} else if let Some(item) = self.parse_item_common(attrs.clone(), false, true, |_| true)? {
|
||||
// FIXME: Bad copy of attrs
|
||||
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
|
||||
} else if self.eat(&token::Semi) {
|
||||
// Do not attempt to parse an expression if we're done here.
|
||||
self.error_outer_attrs(&attrs);
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(token::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
||||
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
||||
} else {
|
||||
self.error_outer_attrs(&attrs);
|
||||
return Ok(None);
|
||||
maybe_whole!(self, NtStmt, |stmt| {
|
||||
let mut stmt = stmt;
|
||||
stmt.visit_attrs(|stmt_attrs| {
|
||||
mem::swap(stmt_attrs, &mut attrs);
|
||||
stmt_attrs.extend(attrs);
|
||||
});
|
||||
Some(stmt)
|
||||
});
|
||||
|
||||
let parse_stmt_inner = |this: &mut Self| {
|
||||
let stmt = if this.eat_keyword(kw::Let) {
|
||||
this.parse_local_mk(lo, attrs.into())?
|
||||
} else if this.is_kw_followed_by_ident(kw::Mut) {
|
||||
this.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
|
||||
} else if this.is_kw_followed_by_ident(kw::Auto) {
|
||||
this.bump(); // `auto`
|
||||
let msg = "write `let` instead of `auto` to introduce a new variable";
|
||||
this.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
||||
} else if this.is_kw_followed_by_ident(sym::var) {
|
||||
this.bump(); // `var`
|
||||
let msg = "write `let` instead of `var` to introduce a new variable";
|
||||
this.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
||||
} else if this.check_path()
|
||||
&& !this.token.is_qpath_start()
|
||||
&& !this.is_path_start_item()
|
||||
{
|
||||
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
// that starts like a path (1 token), but it fact not a path.
|
||||
// Also, we avoid stealing syntax from `parse_item_`.
|
||||
this.parse_stmt_path_start(lo, attrs)?
|
||||
} else if let Some(item) =
|
||||
this.parse_item_common(attrs.clone(), false, true, |_| true)?
|
||||
{
|
||||
// FIXME: Bad copy of attrs
|
||||
this.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
|
||||
} else if this.eat(&token::Semi) {
|
||||
// Do not attempt to parse an expression if we're done here.
|
||||
this.error_outer_attrs(&attrs);
|
||||
this.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if this.token != token::CloseDelim(token::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
||||
this.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
||||
} else {
|
||||
this.error_outer_attrs(&attrs);
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(stmt))
|
||||
};
|
||||
Ok(Some(stmt))
|
||||
|
||||
let stmt = if has_attrs {
|
||||
let (mut stmt, tokens) = self.collect_tokens(parse_stmt_inner)?;
|
||||
if let Some(stmt) = &mut stmt {
|
||||
// If we already have tokens (e.g. due to encounting an `NtStmt`),
|
||||
// use those instead.
|
||||
if stmt.tokens().is_none() {
|
||||
stmt.set_tokens(tokens);
|
||||
}
|
||||
}
|
||||
stmt
|
||||
} else {
|
||||
parse_stmt_inner(self)?
|
||||
};
|
||||
Ok(stmt)
|
||||
}
|
||||
|
||||
fn parse_stmt_path_start(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, Stmt> {
|
||||
@ -107,7 +139,7 @@ fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResu
|
||||
|
||||
let kind = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof
|
||||
{
|
||||
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs }))
|
||||
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
|
||||
} else {
|
||||
// Since none of the above applied, this is an expression statement macro.
|
||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
|
||||
@ -219,7 +251,7 @@ fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
|
||||
}
|
||||
};
|
||||
let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
|
||||
Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs }))
|
||||
Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None }))
|
||||
}
|
||||
|
||||
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
|
||||
@ -376,6 +408,12 @@ pub fn parse_full_stmt(
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let add_semi_token = |tokens: Option<&mut LazyTokenStream>| {
|
||||
if let Some(tokens) = tokens {
|
||||
*tokens = tokens.add_trailing_semi();
|
||||
}
|
||||
};
|
||||
|
||||
let mut eat_semi = true;
|
||||
match stmt.kind {
|
||||
// Expression without semicolon.
|
||||
@ -417,6 +455,7 @@ pub fn parse_full_stmt(
|
||||
*expr = self.mk_expr_err(sp);
|
||||
}
|
||||
}
|
||||
StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
|
||||
StmtKind::Local(ref mut local) => {
|
||||
if let Err(e) = self.expect_semi() {
|
||||
// We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
|
||||
@ -430,13 +469,18 @@ pub fn parse_full_stmt(
|
||||
}
|
||||
}
|
||||
eat_semi = false;
|
||||
// We just checked that there's a semicolon in the tokenstream,
|
||||
// so capture it
|
||||
add_semi_token(local.tokens.as_mut());
|
||||
}
|
||||
StmtKind::Empty => eat_semi = false,
|
||||
_ => {}
|
||||
StmtKind::Empty | StmtKind::Item(_) | StmtKind::Semi(_) => eat_semi = false,
|
||||
}
|
||||
|
||||
if eat_semi && self.eat(&token::Semi) {
|
||||
stmt = stmt.add_trailing_semicolon();
|
||||
// We just checked that we have a semicolon in the tokenstream,
|
||||
// so capture it
|
||||
add_semi_token(stmt.tokens_mut());
|
||||
}
|
||||
stmt.span = stmt.span.to(self.prev_token.span);
|
||||
Ok(Some(stmt))
|
||||
@ -447,7 +491,7 @@ pub(super) fn mk_block(&self, stmts: Vec<Stmt>, rules: BlockCheckMode, span: Spa
|
||||
}
|
||||
|
||||
pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt {
|
||||
Stmt { id: DUMMY_NODE_ID, kind, span, tokens: None }
|
||||
Stmt { id: DUMMY_NODE_ID, kind, span }
|
||||
}
|
||||
|
||||
pub(super) fn mk_stmt_err(&self, span: Span) -> Stmt {
|
||||
|
@ -309,7 +309,7 @@ enum NodeKind {
|
||||
InherentImpl,
|
||||
Fn,
|
||||
Other,
|
||||
};
|
||||
}
|
||||
|
||||
let node_kind = match node {
|
||||
Node::TraitItem(item) => match item.kind {
|
||||
|
@ -477,7 +477,7 @@ pub(super) fn check_opaque_for_inheriting_lifetimes(
|
||||
struct ProhibitOpaqueVisitor<'tcx> {
|
||||
opaque_identity_ty: Ty<'tcx>,
|
||||
generics: &'tcx ty::Generics,
|
||||
};
|
||||
}
|
||||
|
||||
impl<'tcx> ty::fold::TypeVisitor<'tcx> for ProhibitOpaqueVisitor<'tcx> {
|
||||
type BreakTy = Option<Ty<'tcx>>;
|
||||
|
@ -919,7 +919,7 @@ fn test_from_iter_partially_drained_in_place_specialization() {
|
||||
|
||||
#[test]
|
||||
fn test_from_iter_specialization_with_iterator_adapters() {
|
||||
fn assert_in_place_trait<T: InPlaceIterable>(_: &T) {};
|
||||
fn assert_in_place_trait<T: InPlaceIterable>(_: &T) {}
|
||||
let src: Vec<usize> = vec![0usize; 256];
|
||||
let srcptr = src.as_ptr();
|
||||
let iter = src
|
||||
@ -1198,7 +1198,7 @@ fn drain_filter_consumed_panic() {
|
||||
struct Check {
|
||||
index: usize,
|
||||
drop_counts: Rc<Mutex<Vec<usize>>>,
|
||||
};
|
||||
}
|
||||
|
||||
impl Drop for Check {
|
||||
fn drop(&mut self) {
|
||||
@ -1250,7 +1250,7 @@ fn drain_filter_unconsumed_panic() {
|
||||
struct Check {
|
||||
index: usize,
|
||||
drop_counts: Rc<Mutex<Vec<usize>>>,
|
||||
};
|
||||
}
|
||||
|
||||
impl Drop for Check {
|
||||
fn drop(&mut self) {
|
||||
|
@ -1182,7 +1182,7 @@ fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c>
|
||||
/// ```
|
||||
/// use std::fmt;
|
||||
///
|
||||
/// struct Foo { nb: i32 };
|
||||
/// struct Foo { nb: i32 }
|
||||
///
|
||||
/// impl Foo {
|
||||
/// fn new(nb: i32) -> Foo {
|
||||
|
@ -21,7 +21,7 @@
|
||||
///
|
||||
/// let read_future = poll_fn(read_line);
|
||||
/// assert_eq!(read_future.await, "Hello, World!".to_owned());
|
||||
/// # };
|
||||
/// # }
|
||||
/// ```
|
||||
#[unstable(feature = "future_poll_fn", issue = "72302")]
|
||||
pub fn poll_fn<T, F>(f: F) -> PollFn<F>
|
||||
|
@ -348,7 +348,7 @@ pub const fn uninit() -> MaybeUninit<T> {
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// enum NotZero { One = 1, Two = 2 };
|
||||
/// enum NotZero { One = 1, Two = 2 }
|
||||
///
|
||||
/// let x = MaybeUninit::<(u8, NotZero)>::zeroed();
|
||||
/// let x = unsafe { x.assume_init() };
|
||||
|
@ -18,7 +18,7 @@ fn test() {
|
||||
struct Pair {
|
||||
fst: isize,
|
||||
snd: isize,
|
||||
};
|
||||
}
|
||||
let mut p = Pair { fst: 10, snd: 20 };
|
||||
let pptr: *mut Pair = &mut p;
|
||||
let iptr: *mut isize = pptr as *mut isize;
|
||||
|
@ -265,14 +265,14 @@ fn get_timed_out_tests(running_tests: &mut TestMap) -> Vec<TestDesc> {
|
||||
running_tests.remove(test);
|
||||
}
|
||||
timed_out
|
||||
};
|
||||
}
|
||||
|
||||
fn calc_timeout(running_tests: &TestMap) -> Option<Duration> {
|
||||
running_tests.values().min().map(|next_timeout| {
|
||||
let now = Instant::now();
|
||||
if *next_timeout >= now { *next_timeout - now } else { Duration::new(0, 0) }
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
if concurrency == 1 {
|
||||
while !remaining.is_empty() {
|
||||
|
@ -30,7 +30,7 @@ fn def_et3() -> Et3 {
|
||||
impl Tr1 for A {
|
||||
type As1 = core::ops::Range<u8>;
|
||||
fn mk(&self) -> Self::As1 { 0..10 }
|
||||
};
|
||||
}
|
||||
Box::new(A)
|
||||
}
|
||||
pub fn use_et3() {
|
||||
|
@ -33,7 +33,7 @@ fn assert_forall_tr2<T: for<'a> Tr2<'a>>(_: T) {}
|
||||
impl Tr1 for A {
|
||||
type As1 = core::ops::Range<u8>;
|
||||
fn mk(&self) -> Self::As1 { 0..10 }
|
||||
};
|
||||
}
|
||||
&A
|
||||
};
|
||||
pub fn use_et3() {
|
||||
|
@ -35,7 +35,7 @@ fn def_et3() -> Box<dyn Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>
|
||||
impl Tr1 for A {
|
||||
type As1 = core::ops::Range<u8>;
|
||||
fn mk(&self) -> Self::As1 { 0..10 }
|
||||
};
|
||||
}
|
||||
let x /* : Box<dyn Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>>> */
|
||||
= Box::new(A);
|
||||
x
|
||||
|
@ -39,7 +39,7 @@ fn assert_forall_tr2<T: for<'a> Tr2<'a>>(_: T) {}
|
||||
impl Tr1 for A {
|
||||
type As1 = core::ops::Range<u8>;
|
||||
fn mk(&self) -> Self::As1 { 0..10 }
|
||||
};
|
||||
}
|
||||
let x: impl Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>> = A;
|
||||
x
|
||||
};
|
||||
|
@ -27,7 +27,7 @@ fn def_et3() -> impl Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>>
|
||||
impl Tr1 for A {
|
||||
type As1 = core::ops::Range<u8>;
|
||||
fn mk(self) -> Self::As1 { 0..10 }
|
||||
};
|
||||
}
|
||||
A
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ fn def_et3() -> Et3 {
|
||||
impl Tr1 for A {
|
||||
type As1 = core::ops::Range<u8>;
|
||||
fn mk(self) -> Self::As1 { 0..10 }
|
||||
};
|
||||
}
|
||||
A
|
||||
}
|
||||
pub fn use_et3() {
|
||||
|
@ -15,14 +15,14 @@ impl<const N: usize> Marker<N> for Example<N> {}
|
||||
|
||||
fn make_marker() -> impl Marker<{
|
||||
#[macro_export]
|
||||
macro_rules! const_macro { () => {{ 3 }} }; inline!()
|
||||
macro_rules! const_macro { () => {{ 3 }} } inline!()
|
||||
}> {
|
||||
Example::<{ const_macro!() }>
|
||||
}
|
||||
|
||||
fn from_marker(_: impl Marker<{
|
||||
#[macro_export]
|
||||
macro_rules! inline { () => {{ 3 }} }; inline!()
|
||||
macro_rules! inline { () => {{ 3 }} } inline!()
|
||||
}>) {}
|
||||
|
||||
fn main() {
|
||||
@ -30,7 +30,7 @@ fn main() {
|
||||
#[macro_export]
|
||||
macro_rules! gimme_a_const {
|
||||
($rusty: ident) => {{ let $rusty = 3; *&$rusty }}
|
||||
};
|
||||
}
|
||||
gimme_a_const!(run)
|
||||
}>;
|
||||
|
||||
@ -42,13 +42,13 @@ macro_rules! gimme_a_const {
|
||||
|
||||
let _ok: [u8; {
|
||||
#[macro_export]
|
||||
macro_rules! const_two { () => {{ 2 }} };
|
||||
macro_rules! const_two { () => {{ 2 }} }
|
||||
const_two!()
|
||||
}];
|
||||
|
||||
let _ok = [0; {
|
||||
#[macro_export]
|
||||
macro_rules! const_three { () => {{ 3 }} };
|
||||
macro_rules! const_three { () => {{ 3 }} }
|
||||
const_three!()
|
||||
}];
|
||||
let _ok = [0; const_three!()];
|
||||
|
@ -5,6 +5,6 @@
|
||||
|
||||
pub fn main() {
|
||||
fn f() {
|
||||
};
|
||||
}
|
||||
let _: Box<fn()> = box (f as fn());
|
||||
}
|
||||
|
@ -5,11 +5,11 @@
|
||||
|
||||
pub fn main() {
|
||||
let one = || {
|
||||
enum r { a };
|
||||
enum r { a }
|
||||
r::a as usize
|
||||
};
|
||||
let two = || {
|
||||
enum r { a };
|
||||
enum r { a }
|
||||
r::a as usize
|
||||
};
|
||||
one(); two();
|
||||
|
10
src/test/ui/lint/redundant-semicolon/item-stmt-semi.rs
Normal file
10
src/test/ui/lint/redundant-semicolon/item-stmt-semi.rs
Normal file
@ -0,0 +1,10 @@
|
||||
// check-pass
|
||||
// This test should stop compiling
|
||||
// we decide to enable this lint for item statements.
|
||||
|
||||
#![deny(redundant_semicolons)]
|
||||
|
||||
fn main() {
|
||||
fn inner() {};
|
||||
struct Bar {};
|
||||
}
|
@ -3,7 +3,7 @@ pub fn main() {
|
||||
|
||||
macro_rules! mylambda_tt {
|
||||
($x:ident, $body:expr) => ({
|
||||
fn f($x: isize) -> isize { return $body; };
|
||||
fn f($x: isize) -> isize { return $body; }
|
||||
f
|
||||
})
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ mod m {
|
||||
|
||||
macro_rules! foo {
|
||||
($p:path) => ({
|
||||
fn f() -> $p { 10 };
|
||||
fn f() -> $p { 10 }
|
||||
f()
|
||||
})
|
||||
}
|
||||
|
@ -13,19 +13,28 @@
|
||||
|
||||
extern crate attr_stmt_expr;
|
||||
extern crate test_macros;
|
||||
use attr_stmt_expr::{expect_let, expect_print_stmt, expect_expr, expect_print_expr};
|
||||
use attr_stmt_expr::{expect_let, expect_my_macro_stmt, expect_expr, expect_my_macro_expr};
|
||||
use test_macros::print_attr;
|
||||
use std::println;
|
||||
|
||||
// We don't use `std::println` so that we avoid loading hygiene
|
||||
// information from libstd, which would affect the SyntaxContext ids
|
||||
macro_rules! my_macro {
|
||||
($($tt:tt)*) => { () }
|
||||
}
|
||||
|
||||
|
||||
fn print_str(string: &'static str) {
|
||||
// macros are handled a bit differently
|
||||
#[expect_print_expr]
|
||||
println!("{}", string)
|
||||
#[expect_my_macro_expr]
|
||||
my_macro!("{}", string)
|
||||
}
|
||||
|
||||
macro_rules! make_stmt {
|
||||
($stmt:stmt) => {
|
||||
$stmt
|
||||
#[print_attr]
|
||||
#[rustc_dummy]
|
||||
$stmt; // This semicolon is *not* passed to the macro,
|
||||
// since `$stmt` is already a statement.
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,6 +44,10 @@ macro_rules! second_make_stmt {
|
||||
}
|
||||
}
|
||||
|
||||
// The macro will see a semicolon here
|
||||
#[print_attr]
|
||||
struct ItemWithSemi;
|
||||
|
||||
|
||||
fn main() {
|
||||
make_stmt!(struct Foo {});
|
||||
@ -44,8 +57,8 @@ fn main() {
|
||||
let string = "Hello, world!";
|
||||
|
||||
#[print_attr]
|
||||
#[expect_print_stmt]
|
||||
println!("{}", string);
|
||||
#[expect_my_macro_stmt]
|
||||
my_macro!("{}", string);
|
||||
|
||||
#[print_attr]
|
||||
second_make_stmt!(#[allow(dead_code)] struct Bar {});
|
||||
@ -54,6 +67,12 @@ fn main() {
|
||||
#[rustc_dummy]
|
||||
struct Other {};
|
||||
|
||||
// The macro also sees a semicolon,
|
||||
// for consistency with the `ItemWithSemi` case above.
|
||||
#[print_attr]
|
||||
#[rustc_dummy]
|
||||
struct NonBracedStruct;
|
||||
|
||||
#[expect_expr]
|
||||
print_str("string")
|
||||
}
|
||||
|
@ -1,70 +1,117 @@
|
||||
PRINT-ATTR INPUT (DISPLAY): struct ItemWithSemi ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:49:1: 49:7 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "ItemWithSemi",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:49:8: 49:20 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:49:20: 49:21 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Foo { }
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#11),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#11),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#11),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:53:16: 53:22 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Foo",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:53:23: 53:26 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:53:27: 53:29 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:56:5: 56:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "expect_let",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:56:7: 56:17 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:56:6: 56:18 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "let",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:57:5: 57:8 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "string",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:57:9: 57:15 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '=',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:57:16: 57:17 (#0),
|
||||
},
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "Hello, world!",
|
||||
suffix: None,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:57:18: 57:33 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:57:33: 57:34 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ;
|
||||
PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro ! ("{}", string) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:60:5: 60:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "expect_print_stmt",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
ident: "expect_my_macro_stmt",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:60:7: 60:27 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:60:6: 60:28 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "println",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
ident: "my_macro",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:5: 61:13 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '!',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:13: 61:14 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
@ -73,36 +120,36 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
kind: Str,
|
||||
symbol: "{}",
|
||||
suffix: None,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:15: 61:19 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ',',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:19: 61:20 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "string",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:21: 61:27 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:14: 61:28 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:61:28: 61:29 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "second_make_stmt",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:5: 64:21 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '!',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:21: 64:22 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
@ -110,48 +157,104 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:23: 64:24 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "allow",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:25: 64:30 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "dead_code",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:31: 64:40 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:30: 64:41 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:24: 64:42 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:43: 64:49 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Bar",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:50: 64:53 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:54: 64:56 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:22: 64:57 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:57: 64:58 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] #[allow(dead_code)] struct Bar { }
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#32),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#32),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#32),
|
||||
},
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:23: 64:24 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "allow",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:25: 64:30 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "dead_code",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:31: 64:40 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:30: 64:41 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:24: 64:42 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:43: 64:49 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Bar",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:50: 64:53 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:64:54: 64:56 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { }
|
||||
@ -159,29 +262,60 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:67:5: 67:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:67:7: 67:18 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:67:6: 67:19 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:68:5: 68:11 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Other",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:68:12: 68:17 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:68:18: 68:20 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct NonBracedStruct ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:73:5: 73:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:73:7: 73:18 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:73:6: 73:19 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:74:5: 74:11 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "NonBracedStruct",
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:74:12: 74:27 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/allowed-attr-stmt-expr.rs:74:27: 74:28 (#0),
|
||||
},
|
||||
]
|
||||
|
@ -11,19 +11,26 @@
|
||||
extern crate attr_stmt_expr;
|
||||
|
||||
use test_macros::print_attr;
|
||||
use std::println;
|
||||
use attr_stmt_expr::{expect_let, expect_print_stmt, expect_expr, expect_print_expr};
|
||||
use attr_stmt_expr::{expect_let, expect_my_macro_stmt, expect_expr, expect_my_macro_expr};
|
||||
|
||||
// We don't use `std::println` so that we avoid loading hygiene
|
||||
// information from libstd, which would affect the SyntaxContext ids
|
||||
macro_rules! my_macro {
|
||||
($($tt:tt)*) => { () }
|
||||
}
|
||||
|
||||
fn print_str(string: &'static str) {
|
||||
// macros are handled a bit differently
|
||||
#[expect_print_expr]
|
||||
#[expect_my_macro_expr]
|
||||
//~^ ERROR attributes on expressions are experimental
|
||||
//~| HELP add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable
|
||||
println!("{}", string)
|
||||
my_macro!("{}", string)
|
||||
}
|
||||
|
||||
macro_rules! make_stmt {
|
||||
($stmt:stmt) => {
|
||||
#[print_attr]
|
||||
#[rustc_dummy]
|
||||
$stmt
|
||||
}
|
||||
}
|
||||
@ -42,8 +49,8 @@ fn main() {
|
||||
let string = "Hello, world!";
|
||||
|
||||
#[print_attr]
|
||||
#[expect_print_stmt]
|
||||
println!("{}", string);
|
||||
#[expect_my_macro_stmt]
|
||||
my_macro!("{}", string);
|
||||
|
||||
#[print_attr]
|
||||
second_make_stmt!(#[allow(dead_code)] struct Bar {});
|
||||
|
@ -1,14 +1,14 @@
|
||||
error[E0658]: attributes on expressions are experimental
|
||||
--> $DIR/attr-stmt-expr.rs:19:5
|
||||
--> $DIR/attr-stmt-expr.rs:24:5
|
||||
|
|
||||
LL | #[expect_print_expr]
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
LL | #[expect_my_macro_expr]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: see issue #15701 <https://github.com/rust-lang/rust/issues/15701> for more information
|
||||
= help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable
|
||||
|
||||
error[E0658]: attributes on expressions are experimental
|
||||
--> $DIR/attr-stmt-expr.rs:55:5
|
||||
--> $DIR/attr-stmt-expr.rs:62:5
|
||||
|
|
||||
LL | #[expect_expr]
|
||||
| ^^^^^^^^^^^^^^
|
||||
|
@ -1,70 +1,101 @@
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Foo { }
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#8),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#8),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#8),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/attr-stmt-expr.rs:45:16: 45:22 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Foo",
|
||||
span: $DIR/attr-stmt-expr.rs:45:23: 45:26 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/attr-stmt-expr.rs:45:27: 45:29 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:48:5: 48:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "expect_let",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:48:7: 48:17 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:48:6: 48:18 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "let",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:49:5: 49:8 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "string",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:49:9: 49:15 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '=',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:49:16: 49:17 (#0),
|
||||
},
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "Hello, world!",
|
||||
suffix: None,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:49:18: 49:33 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:49:33: 49:34 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ;
|
||||
PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro ! ("{}", string) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:52:5: 52:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "expect_print_stmt",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
ident: "expect_my_macro_stmt",
|
||||
span: $DIR/attr-stmt-expr.rs:52:7: 52:27 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:52:6: 52:28 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "println",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
ident: "my_macro",
|
||||
span: $DIR/attr-stmt-expr.rs:53:5: 53:13 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '!',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:53:13: 53:14 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
@ -73,36 +104,36 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
kind: Str,
|
||||
symbol: "{}",
|
||||
suffix: None,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:53:15: 53:19 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ',',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:53:19: 53:20 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "string",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:53:21: 53:27 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:53:14: 53:28 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:53:28: 53:29 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "second_make_stmt",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:5: 56:21 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '!',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:21: 56:22 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
@ -110,48 +141,104 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:23: 56:24 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "allow",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:25: 56:30 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "dead_code",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:31: 56:40 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:30: 56:41 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:24: 56:42 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:43: 56:49 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Bar",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:50: 56:53 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:54: 56:56 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:22: 56:57 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:56:57: 56:58 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] #[allow(dead_code)] struct Bar { }
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#29),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#29),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#29),
|
||||
},
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:56:23: 56:24 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "allow",
|
||||
span: $DIR/attr-stmt-expr.rs:56:25: 56:30 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "dead_code",
|
||||
span: $DIR/attr-stmt-expr.rs:56:31: 56:40 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:56:30: 56:41 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:56:24: 56:42 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/attr-stmt-expr.rs:56:43: 56:49 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Bar",
|
||||
span: $DIR/attr-stmt-expr.rs:56:50: 56:53 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/attr-stmt-expr.rs:56:54: 56:56 (#0),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { }
|
||||
@ -159,29 +246,29 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Punct {
|
||||
ch: '#',
|
||||
spacing: Alone,
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:59:5: 59:6 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "rustc_dummy",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:59:7: 59:18 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:59:6: 59:19 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:60:5: 60:11 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "Other",
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:60:12: 60:17 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
|
||||
span: $DIR/attr-stmt-expr.rs:60:18: 60:20 (#0),
|
||||
},
|
||||
]
|
||||
|
@ -15,9 +15,9 @@ pub fn expect_let(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn expect_print_stmt(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
pub fn expect_my_macro_stmt(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
assert!(attr.to_string().is_empty());
|
||||
assert_eq!(item.to_string(), "println ! (\"{}\", string) ;");
|
||||
assert_eq!(item.to_string(), "my_macro ! (\"{}\", string) ;");
|
||||
item
|
||||
}
|
||||
|
||||
@ -29,9 +29,9 @@ pub fn expect_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn expect_print_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
pub fn expect_my_macro_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
assert!(attr.to_string().is_empty());
|
||||
assert_eq!(item.to_string(), "println ! (\"{}\", string)");
|
||||
assert_eq!(item.to_string(), "my_macro ! (\"{}\", string)");
|
||||
item
|
||||
}
|
||||
|
||||
|
@ -17,10 +17,10 @@
|
||||
pub struct Foo;
|
||||
impl Foo {
|
||||
pub fn foo() {
|
||||
enum Panic { Common };
|
||||
enum Panic { Common }
|
||||
}
|
||||
pub fn bar() {
|
||||
enum Panic { Common };
|
||||
enum Panic { Common }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
fn main() {
|
||||
let try = 2;
|
||||
struct try { try: u32 };
|
||||
struct try { try: u32 }
|
||||
let try: try = try { try };
|
||||
assert_eq!(try.try, 2);
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ impl Drop for Foo {
|
||||
fn drop(&mut self) {
|
||||
unsafe { destructions -= 1 };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let _x = [Foo, Foo, Foo];
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user