Auto merge of #73084 - Aaron1011:feature/new-recursive-expand, r=petrochenkov
Re-land PR #72388: Recursively expand `TokenKind::Interpolated` in `probably_equal_for_proc_macro` PR #72388 allowed us to preserve the original `TokenStream` in more cases during proc-macro expansion, but had to be reverted due to a large number of regressions (See #72545 and #72622). These regressions fell into two categories 1. Missing handling for `Group`s with `Delimiter::None`, which are inserted during `macro_rules!` expansion (but are lost during stringification and re-parsing). A large number of these regressions were due to `syn` and `proc-macro-hack`, but several crates needed changes to their own proc-macro code. 2. Legitimate hygiene issues that were previously being masked by stringification. Some of these were relatively benign (e.g. [a compiliation error](https://github.com/paritytech/parity-scale-codec/pull/210) caused by misusing `quote_spanned!`). However, two crates had intentionally written unhygenic `macro_rules!` macros, which were able to access identifiers that were not passed as arguments (see https://github.com/rust-lang/rust/issues/72622#issuecomment-636402573). All but one of the Crater regressions have now been fixed upstream (see https://hackmd.io/ItrXWRaSSquVwoJATPx3PQ?both). The remaining crate (which has a PR pending at https://github.com/sammhicks/face-generator/pull/1) is not on `crates.io`, and is a Yew application that seems unlikely to have any reverse dependencies. As @petrochenkov mentioned in https://github.com/rust-lang/rust/issues/72545#issuecomment-638632434, not re-landing PR #72388 allows more crates to write unhygenic `macro_rules!` macros, which will eventually stop compiling. Since there is only one Crater regression remaining, since additional crates could write unhygenic `macro_rules!` macros in the time it takes that PR to be merged.
This commit is contained in:
commit
e482c86b9d
@ -11,9 +11,11 @@ use crate::tokenstream::TokenTree;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_macros::HashStable_Generic;
|
||||
use rustc_span::hygiene::ExpnKind;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{self, Span, DUMMY_SP};
|
||||
use rustc_span::{self, FileName, RealFileName, Span, DUMMY_SP};
|
||||
use std::borrow::Cow;
|
||||
use std::{fmt, mem};
|
||||
|
||||
@ -808,6 +810,31 @@ impl Nonterminal {
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// See issue #74616 for details
|
||||
pub fn ident_name_compatibility_hack(
|
||||
&self,
|
||||
orig_span: Span,
|
||||
source_map: &SourceMap,
|
||||
) -> Option<(Ident, bool)> {
|
||||
if let NtIdent(ident, is_raw) = self {
|
||||
if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind {
|
||||
let filename = source_map.span_to_filename(orig_span);
|
||||
if let FileName::Real(RealFileName::Named(path)) = filename {
|
||||
if (path.ends_with("time-macros-impl/src/lib.rs")
|
||||
&& macro_name == sym::impl_macros)
|
||||
|| (path.ends_with("js-sys/src/lib.rs") && macro_name == sym::arrays)
|
||||
{
|
||||
let snippet = source_map.span_to_snippet(orig_span);
|
||||
if snippet.as_deref() == Ok("$name") {
|
||||
return Some((*ident, *is_raw));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Nonterminal {
|
||||
|
@ -173,13 +173,19 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
}
|
||||
|
||||
Interpolated(nt) => {
|
||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
stream,
|
||||
span: DelimSpan::from_single(span),
|
||||
flatten: nt.pretty_printing_compatibility_hack(),
|
||||
})
|
||||
if let Some((name, is_raw)) =
|
||||
nt.ident_name_compatibility_hack(span, sess.source_map())
|
||||
{
|
||||
TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span))
|
||||
} else {
|
||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
stream,
|
||||
span: DelimSpan::from_single(span),
|
||||
flatten: nt.pretty_printing_compatibility_hack(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
||||
|
@ -7,8 +7,8 @@
|
||||
#![feature(or_patterns)]
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
|
||||
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
|
||||
@ -309,7 +309,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||
// modifications, including adding/removing typically non-semantic
|
||||
// tokens such as extra braces and commas, don't happen.
|
||||
if let Some(tokens) = tokens {
|
||||
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real) {
|
||||
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) {
|
||||
return tokens;
|
||||
}
|
||||
info!(
|
||||
@ -327,7 +327,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||
//
|
||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||
// different method.
|
||||
pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &TokenStream) -> bool {
|
||||
pub fn tokenstream_probably_equal_for_proc_macro(
|
||||
first: &TokenStream,
|
||||
other: &TokenStream,
|
||||
sess: &ParseSess,
|
||||
) -> bool {
|
||||
// When checking for `probably_eq`, we ignore certain tokens that aren't
|
||||
// preserved in the AST. Because they are not preserved, the pretty
|
||||
// printer arbitrarily adds or removes them when printing as token
|
||||
@ -408,9 +412,6 @@ pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &To
|
||||
}
|
||||
}
|
||||
token_trees = out.into_iter().map(TokenTree::Token).collect();
|
||||
if token_trees.len() != 1 {
|
||||
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
||||
}
|
||||
} else {
|
||||
token_trees = SmallVec::new();
|
||||
token_trees.push(tree);
|
||||
@ -418,10 +419,32 @@ pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &To
|
||||
token_trees.into_iter()
|
||||
}
|
||||
|
||||
let mut t1 = first.trees().filter(semantic_tree).flat_map(break_tokens);
|
||||
let mut t2 = other.trees().filter(semantic_tree).flat_map(break_tokens);
|
||||
let expand_nt = |tree: TokenTree| {
|
||||
if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
|
||||
// When checking tokenstreams for 'probable equality', we are comparing
|
||||
// a captured (from parsing) `TokenStream` to a reparsed tokenstream.
|
||||
// The reparsed Tokenstream will never have `None`-delimited groups,
|
||||
// since they are only ever inserted as a result of macro expansion.
|
||||
// Therefore, inserting a `None`-delimtied group here (when we
|
||||
// convert a nested `Nonterminal` to a tokenstream) would cause
|
||||
// a mismatch with the reparsed tokenstream.
|
||||
//
|
||||
// Note that we currently do not handle the case where the
|
||||
// reparsed stream has a `Parenthesis`-delimited group
|
||||
// inserted. This will cause a spurious mismatch:
|
||||
// issue #75734 tracks resolving this.
|
||||
nt_to_tokenstream(nt, sess, *span).into_trees()
|
||||
} else {
|
||||
TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees()
|
||||
}
|
||||
};
|
||||
|
||||
// Break tokens after we expand any nonterminals, so that we break tokens
|
||||
// that are produced as a result of nonterminal expansion.
|
||||
let mut t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
|
||||
let mut t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
|
||||
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
|
||||
if !tokentree_probably_equal_for_proc_macro(&t1, &t2) {
|
||||
if !tokentree_probably_equal_for_proc_macro(&t1, &t2, sess) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -433,13 +456,17 @@ pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &To
|
||||
//
|
||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||
// different method.
|
||||
fn tokentree_probably_equal_for_proc_macro(first: &TokenTree, other: &TokenTree) -> bool {
|
||||
pub fn tokentree_probably_equal_for_proc_macro(
|
||||
first: &TokenTree,
|
||||
other: &TokenTree,
|
||||
sess: &ParseSess,
|
||||
) -> bool {
|
||||
match (first, other) {
|
||||
(TokenTree::Token(token), TokenTree::Token(token2)) => {
|
||||
token_probably_equal_for_proc_macro(token, token2)
|
||||
}
|
||||
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
|
||||
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2)
|
||||
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
@ -498,7 +525,7 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
|
||||
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
|
||||
}
|
||||
|
||||
(&Interpolated(..), &Interpolated(..)) => false,
|
||||
(&Interpolated(..), &Interpolated(..)) => panic!("Unexpanded Interpolated!"),
|
||||
|
||||
_ => panic!("forgot to add a token?"),
|
||||
}
|
||||
|
@ -258,6 +258,7 @@ symbols! {
|
||||
arith_offset,
|
||||
arm_target_feature,
|
||||
array,
|
||||
arrays,
|
||||
as_str,
|
||||
asm,
|
||||
assert,
|
||||
@ -572,6 +573,7 @@ symbols! {
|
||||
ignore,
|
||||
impl_header_lifetime_elision,
|
||||
impl_lint_pass,
|
||||
impl_macros,
|
||||
impl_trait_in_bindings,
|
||||
import_shadowing,
|
||||
in_band_lifetimes,
|
||||
|
@ -0,0 +1,13 @@
|
||||
// force-host
|
||||
// no-prefer-dynamic
|
||||
|
||||
#![crate_type = "proc-macro"]
|
||||
|
||||
extern crate proc_macro;
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn my_macro(_attr: TokenStream, input: TokenStream) -> TokenStream {
|
||||
println!("Called proc_macro_hack with {:?}", input);
|
||||
input
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
// check-pass
|
||||
// aux-build:group-compat-hack.rs
|
||||
// compile-flags: -Z span-debug
|
||||
|
||||
#![no_std] // Don't load unnecessary hygiene information from std
|
||||
extern crate std;
|
||||
|
||||
#[macro_use] extern crate group_compat_hack;
|
||||
|
||||
// Tests the backwards compatibility hack added for certain macros
|
||||
// When an attribute macro named `proc_macro_hack` or `wasm_bindgen`
|
||||
// has an `NtIdent` named `$name`, we pass a plain `Ident` token in
|
||||
// place of a `None`-delimited group. This allows us to maintain
|
||||
// backwards compatibility for older versions of these crates.
|
||||
|
||||
include!("js-sys/src/lib.rs");
|
||||
include!("time-macros-impl/src/lib.rs");
|
||||
|
||||
macro_rules! other {
|
||||
($name:ident) => {
|
||||
#[my_macro] struct Three($name);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
struct Foo;
|
||||
impl_macros!(Foo);
|
||||
arrays!(Foo);
|
||||
other!(Foo);
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/time-macros-impl/src/lib.rs:5:21: 5:27 (#5) }, Ident { ident: "One", span: $DIR/time-macros-impl/src/lib.rs:5:28: 5:31 (#5) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:27:18: 27:21 (#0) }], span: $DIR/time-macros-impl/src/lib.rs:5:31: 5:38 (#5) }, Punct { ch: ';', spacing: Alone, span: $DIR/time-macros-impl/src/lib.rs:5:38: 5:39 (#5) }]
|
||||
Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/js-sys/src/lib.rs:5:21: 5:27 (#9) }, Ident { ident: "Two", span: $DIR/js-sys/src/lib.rs:5:28: 5:31 (#9) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:28:13: 28:16 (#0) }], span: $DIR/js-sys/src/lib.rs:5:31: 5:38 (#9) }, Punct { ch: ';', spacing: Alone, span: $DIR/js-sys/src/lib.rs:5:38: 5:39 (#9) }]
|
||||
Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/group-compat-hack.rs:21:21: 21:27 (#13) }, Ident { ident: "Three", span: $DIR/group-compat-hack.rs:21:28: 21:33 (#13) }, Group { delimiter: Parenthesis, stream: TokenStream [Group { delimiter: None, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:29:12: 29:15 (#0) }], span: $DIR/group-compat-hack.rs:21:34: 21:39 (#13) }], span: $DIR/group-compat-hack.rs:21:33: 21:40 (#13) }, Punct { ch: ';', spacing: Alone, span: $DIR/group-compat-hack.rs:21:40: 21:41 (#13) }]
|
@ -0,0 +1,7 @@
|
||||
// ignore-test this is not a test
|
||||
|
||||
macro_rules! arrays {
|
||||
($name:ident) => {
|
||||
#[my_macro] struct Two($name);
|
||||
}
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
// ignore-test this is not a test
|
||||
|
||||
macro_rules! impl_macros {
|
||||
($name:ident) => {
|
||||
#[my_macro] struct One($name);
|
||||
}
|
||||
}
|
@ -15,51 +15,63 @@ PRINT-ATTR INPUT (DISPLAY): const A : u8 = 0 ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "const",
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(416..421),
|
||||
},
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #0 bytes(0..0),
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #0 bytes(503..504),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(422..424),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(424..425),
|
||||
},
|
||||
Ident {
|
||||
ident: "u8",
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(426..428),
|
||||
},
|
||||
Punct {
|
||||
ch: '=',
|
||||
spacing: Alone,
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(429..430),
|
||||
},
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "0",
|
||||
suffix: None,
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(431..432),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(432..433),
|
||||
},
|
||||
]
|
||||
PRINT-DERIVE INPUT (DISPLAY): struct A { }
|
||||
PRINT-DERIVE INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(468..474),
|
||||
},
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #0 bytes(0..0),
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #0 bytes(503..504),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(475..477),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: #0 bytes(0..0),
|
||||
span: #3 bytes(478..480),
|
||||
},
|
||||
]
|
||||
|
@ -1,14 +1,13 @@
|
||||
// aux-build:first-second.rs
|
||||
// FIXME: The spans here are bad, see PR #73084
|
||||
|
||||
extern crate first_second;
|
||||
use first_second::*;
|
||||
|
||||
macro_rules! produce_it {
|
||||
($name:ident) => {
|
||||
#[first] //~ ERROR cannot find type
|
||||
#[first]
|
||||
struct $name {
|
||||
field: MissingType
|
||||
field: MissingType //~ ERROR cannot find type
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,13 @@
|
||||
error[E0412]: cannot find type `MissingType` in this scope
|
||||
--> $DIR/macro-rules-derive.rs:9:9
|
||||
--> $DIR/macro-rules-derive.rs:10:20
|
||||
|
|
||||
LL | #[first]
|
||||
| ^^^^^^^^ not found in this scope
|
||||
LL | field: MissingType
|
||||
| ^^^^^^^^^^^ not found in this scope
|
||||
...
|
||||
LL | produce_it!(MyName);
|
||||
| -------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
@ -71,7 +71,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1)
|
||||
PRINT-BANG RE-COLLECTED (DISPLAY): "hi" "hello" . len() + "world" . len() (1 + 1)
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
@ -82,50 +81,62 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "hello",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "hello",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:21:17: 21:24 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:21:24: 21:25 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:21:25: 21:28 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:21:28: 21:30 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:15:49: 15:54 (#7),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
},
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "world",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:15:55: 15:56 (#7),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "world",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:21:33: 21:40 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:21:40: 21:41 (#0),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:21:41: 21:44 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:21:44: 21:46 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:15:57: 15:62 (#7),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
|
||||
|
@ -1,6 +1,4 @@
|
||||
// aux-build:weird-hygiene.rs
|
||||
// check-pass
|
||||
// FIXME: This should actually error, see PR #73084
|
||||
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(proc_macro_hygiene)]
|
||||
@ -22,7 +20,7 @@ macro_rules! other {
|
||||
|
||||
#[derive(WeirdDerive)]
|
||||
enum MyEnum {
|
||||
Value = (stringify!($tokens + hidden_ident), 1).1
|
||||
Value = (stringify!($tokens + hidden_ident), 1).1 //~ ERROR cannot find
|
||||
}
|
||||
|
||||
inner!();
|
||||
@ -33,7 +31,7 @@ macro_rules! invoke_it {
|
||||
($token:expr) => {
|
||||
#[recollect_attr] {
|
||||
$token;
|
||||
hidden_ident
|
||||
hidden_ident //~ ERROR cannot find
|
||||
}
|
||||
}
|
||||
}
|
||||
|
25
src/test/ui/proc-macro/weird-hygiene.stderr
Normal file
25
src/test/ui/proc-macro/weird-hygiene.stderr
Normal file
@ -0,0 +1,25 @@
|
||||
error[E0425]: cannot find value `hidden_ident` in this scope
|
||||
--> $DIR/weird-hygiene.rs:23:43
|
||||
|
|
||||
LL | Value = (stringify!($tokens + hidden_ident), 1).1
|
||||
| ^^^^^^^^^^^^ not found in this scope
|
||||
...
|
||||
LL | other!(50);
|
||||
| ----------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error[E0425]: cannot find value `hidden_ident` in this scope
|
||||
--> $DIR/weird-hygiene.rs:34:13
|
||||
|
|
||||
LL | hidden_ident
|
||||
| ^^^^^^^^^^^^ not found in this scope
|
||||
...
|
||||
LL | invoke_it!(25);
|
||||
| --------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0425`.
|
Loading…
x
Reference in New Issue
Block a user