proc_macro: Stop flattening groups with dummy spans
This commit is contained in:
parent
50fc24d8a1
commit
77b0ed70b3
@ -475,7 +475,7 @@ impl MetaItem {
|
|||||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||||
Path { span, segments }
|
Path { span, segments }
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
|
Some(TokenTree::Token(Token { kind: token::Interpolated(nt, _), .. })) => match *nt {
|
||||||
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
|
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
|
||||||
token::Nonterminal::NtPath(ref path) => path.clone(),
|
token::Nonterminal::NtPath(ref path) => path.clone(),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
@ -656,7 +656,7 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
|
|||||||
*span = ident.span;
|
*span = ident.span;
|
||||||
return; // Avoid visiting the span for the second time.
|
return; // Avoid visiting the span for the second time.
|
||||||
}
|
}
|
||||||
token::Interpolated(nt) => {
|
token::Interpolated(nt, _) => {
|
||||||
let mut nt = Lrc::make_mut(nt);
|
let mut nt = Lrc::make_mut(nt);
|
||||||
vis.visit_interpolated(&mut nt);
|
vis.visit_interpolated(&mut nt);
|
||||||
}
|
}
|
||||||
|
@ -182,6 +182,15 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
|
|||||||
.contains(&name)
|
.contains(&name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A hack used to pass AST fragments to attribute and derive macros
|
||||||
|
/// as a single nonterminal token instead of a token stream.
|
||||||
|
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||||
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
|
pub enum FlattenGroup {
|
||||||
|
Yes,
|
||||||
|
No,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub enum TokenKind {
|
pub enum TokenKind {
|
||||||
/* Expression-operator symbols. */
|
/* Expression-operator symbols. */
|
||||||
@ -236,7 +245,7 @@ pub enum TokenKind {
|
|||||||
/// treat regular and interpolated lifetime identifiers in the same way.
|
/// treat regular and interpolated lifetime identifiers in the same way.
|
||||||
Lifetime(Symbol),
|
Lifetime(Symbol),
|
||||||
|
|
||||||
Interpolated(Lrc<Nonterminal>),
|
Interpolated(Lrc<Nonterminal>, FlattenGroup),
|
||||||
|
|
||||||
// Can be expanded into several tokens.
|
// Can be expanded into several tokens.
|
||||||
/// A doc comment.
|
/// A doc comment.
|
||||||
@ -343,7 +352,7 @@ impl Token {
|
|||||||
/// if they keep spans or perform edition checks.
|
/// if they keep spans or perform edition checks.
|
||||||
pub fn uninterpolated_span(&self) -> Span {
|
pub fn uninterpolated_span(&self) -> Span {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
Interpolated(nt) => nt.span(),
|
Interpolated(nt, _) => nt.span(),
|
||||||
_ => self.span,
|
_ => self.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -382,7 +391,7 @@ impl Token {
|
|||||||
ModSep | // global path
|
ModSep | // global path
|
||||||
Lifetime(..) | // labeled loop
|
Lifetime(..) | // labeled loop
|
||||||
Pound => true, // expression attributes
|
Pound => true, // expression attributes
|
||||||
Interpolated(ref nt) => match **nt {
|
Interpolated(ref nt, _) => match **nt {
|
||||||
NtLiteral(..) |
|
NtLiteral(..) |
|
||||||
NtExpr(..) |
|
NtExpr(..) |
|
||||||
NtBlock(..) |
|
NtBlock(..) |
|
||||||
@ -408,7 +417,7 @@ impl Token {
|
|||||||
Lifetime(..) | // lifetime bound in trait object
|
Lifetime(..) | // lifetime bound in trait object
|
||||||
Lt | BinOp(Shl) | // associated path
|
Lt | BinOp(Shl) | // associated path
|
||||||
ModSep => true, // global path
|
ModSep => true, // global path
|
||||||
Interpolated(ref nt) => match **nt {
|
Interpolated(ref nt, _) => match **nt {
|
||||||
NtTy(..) | NtPath(..) => true,
|
NtTy(..) | NtPath(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
@ -420,7 +429,7 @@ impl Token {
|
|||||||
pub fn can_begin_const_arg(&self) -> bool {
|
pub fn can_begin_const_arg(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
OpenDelim(Brace) => true,
|
OpenDelim(Brace) => true,
|
||||||
Interpolated(ref nt) => match **nt {
|
Interpolated(ref nt, _) => match **nt {
|
||||||
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
|
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
@ -455,7 +464,7 @@ impl Token {
|
|||||||
match self.uninterpolate().kind {
|
match self.uninterpolate().kind {
|
||||||
Literal(..) | BinOp(Minus) => true,
|
Literal(..) | BinOp(Minus) => true,
|
||||||
Ident(name, false) if name.is_bool_lit() => true,
|
Ident(name, false) if name.is_bool_lit() => true,
|
||||||
Interpolated(ref nt) => match &**nt {
|
Interpolated(ref nt, _) => match &**nt {
|
||||||
NtLiteral(_) => true,
|
NtLiteral(_) => true,
|
||||||
NtExpr(e) => match &e.kind {
|
NtExpr(e) => match &e.kind {
|
||||||
ast::ExprKind::Lit(_) => true,
|
ast::ExprKind::Lit(_) => true,
|
||||||
@ -476,7 +485,7 @@ impl Token {
|
|||||||
// otherwise returns the original token.
|
// otherwise returns the original token.
|
||||||
pub fn uninterpolate(&self) -> Cow<'_, Token> {
|
pub fn uninterpolate(&self) -> Cow<'_, Token> {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
Interpolated(nt) => match **nt {
|
Interpolated(nt, _) => match **nt {
|
||||||
NtIdent(ident, is_raw) => {
|
NtIdent(ident, is_raw) => {
|
||||||
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
|
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
|
||||||
}
|
}
|
||||||
@ -523,7 +532,7 @@ impl Token {
|
|||||||
|
|
||||||
/// Returns `true` if the token is an interpolated path.
|
/// Returns `true` if the token is an interpolated path.
|
||||||
fn is_path(&self) -> bool {
|
fn is_path(&self) -> bool {
|
||||||
if let Interpolated(ref nt) = self.kind {
|
if let Interpolated(ref nt, _) = self.kind {
|
||||||
if let NtPath(..) = **nt {
|
if let NtPath(..) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -535,7 +544,7 @@ impl Token {
|
|||||||
/// That is, is this a pre-parsed expression dropped into the token stream
|
/// That is, is this a pre-parsed expression dropped into the token stream
|
||||||
/// (which happens while parsing the result of macro expansion)?
|
/// (which happens while parsing the result of macro expansion)?
|
||||||
pub fn is_whole_expr(&self) -> bool {
|
pub fn is_whole_expr(&self) -> bool {
|
||||||
if let Interpolated(ref nt) = self.kind {
|
if let Interpolated(ref nt, _) = self.kind {
|
||||||
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
|
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -546,7 +555,7 @@ impl Token {
|
|||||||
|
|
||||||
// Is the token an interpolated block (`$b:block`)?
|
// Is the token an interpolated block (`$b:block`)?
|
||||||
pub fn is_whole_block(&self) -> bool {
|
pub fn is_whole_block(&self) -> bool {
|
||||||
if let Interpolated(ref nt) = self.kind {
|
if let Interpolated(ref nt, _) = self.kind {
|
||||||
if let NtBlock(..) = **nt {
|
if let NtBlock(..) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -724,7 +733,7 @@ impl Token {
|
|||||||
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
|
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
|
||||||
}
|
}
|
||||||
|
|
||||||
(&Interpolated(_), &Interpolated(_)) => false,
|
(&Interpolated(..), &Interpolated(..)) => false,
|
||||||
|
|
||||||
_ => panic!("forgot to add a token?"),
|
_ => panic!("forgot to add a token?"),
|
||||||
}
|
}
|
||||||
|
@ -205,7 +205,7 @@ impl Lit {
|
|||||||
token::Lit::new(token::Bool, name, None)
|
token::Lit::new(token::Bool, name, None)
|
||||||
}
|
}
|
||||||
token::Literal(lit) => lit,
|
token::Literal(lit) => lit,
|
||||||
token::Interpolated(ref nt) => {
|
token::Interpolated(ref nt, _) => {
|
||||||
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
|
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
|
||||||
if let ast::ExprKind::Lit(lit) = &expr.kind {
|
if let ast::ExprKind::Lit(lit) = &expr.kind {
|
||||||
return Ok(lit.clone());
|
return Ok(lit.clone());
|
||||||
|
@ -1027,7 +1027,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
|
|
||||||
fn lower_token(&mut self, token: Token) -> TokenStream {
|
fn lower_token(&mut self, token: Token) -> TokenStream {
|
||||||
match token.kind {
|
match token.kind {
|
||||||
token::Interpolated(nt) => {
|
token::Interpolated(nt, _) => {
|
||||||
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
|
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
|
||||||
self.lower_token_stream(tts)
|
self.lower_token_stream(tts)
|
||||||
}
|
}
|
||||||
|
@ -266,7 +266,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
|
|||||||
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
||||||
token::Unknown(s) => s.to_string(),
|
token::Unknown(s) => s.to_string(),
|
||||||
|
|
||||||
token::Interpolated(ref nt) => nonterminal_to_string(nt),
|
token::Interpolated(ref nt, _) => nonterminal_to_string(nt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ use crate::module::DirectoryOwnership;
|
|||||||
use rustc_ast::ast::{self, Attribute, NodeId, PatKind};
|
use rustc_ast::ast::{self, Attribute, NodeId, PatKind};
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token;
|
use rustc_ast::token::{self, FlattenGroup};
|
||||||
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
|
||||||
use rustc_ast::visit::{AssocCtxt, Visitor};
|
use rustc_ast::visit::{AssocCtxt, Visitor};
|
||||||
use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability};
|
use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability};
|
||||||
@ -142,7 +142,7 @@ impl Annotatable {
|
|||||||
| Annotatable::StructField(..)
|
| Annotatable::StructField(..)
|
||||||
| Annotatable::Variant(..) => panic!("unexpected annotatable"),
|
| Annotatable::Variant(..) => panic!("unexpected annotatable"),
|
||||||
};
|
};
|
||||||
TokenTree::token(token::Interpolated(Lrc::new(nt)), DUMMY_SP).into()
|
TokenTree::token(token::Interpolated(Lrc::new(nt), FlattenGroup::Yes), DUMMY_SP).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_item(self) -> P<ast::Item> {
|
pub fn expect_item(self) -> P<ast::Item> {
|
||||||
@ -374,7 +374,7 @@ where
|
|||||||
impl MutVisitor for AvoidInterpolatedIdents {
|
impl MutVisitor for AvoidInterpolatedIdents {
|
||||||
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
||||||
if let tokenstream::TokenTree::Token(token) = tt {
|
if let tokenstream::TokenTree::Token(token) = tt {
|
||||||
if let token::Interpolated(nt) = &token.kind {
|
if let token::Interpolated(nt, _) = &token.kind {
|
||||||
if let token::NtIdent(ident, is_raw) = **nt {
|
if let token::NtIdent(ident, is_raw) = **nt {
|
||||||
*tt = tokenstream::TokenTree::token(
|
*tt = tokenstream::TokenTree::token(
|
||||||
token::Ident(ident.name, is_raw),
|
token::Ident(ident.name, is_raw),
|
||||||
|
@ -785,12 +785,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
|
|||||||
sym::literal => token.can_begin_literal_maybe_minus(),
|
sym::literal => token.can_begin_literal_maybe_minus(),
|
||||||
sym::vis => match token.kind {
|
sym::vis => match token.kind {
|
||||||
// The follow-set of :vis + "priv" keyword + interpolated
|
// The follow-set of :vis + "priv" keyword + interpolated
|
||||||
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
|
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
|
||||||
_ => token.can_begin_type(),
|
_ => token.can_begin_type(),
|
||||||
},
|
},
|
||||||
sym::block => match token.kind {
|
sym::block => match token.kind {
|
||||||
token::OpenDelim(token::Brace) => true,
|
token::OpenDelim(token::Brace) => true,
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt, _) => match **nt {
|
||||||
token::NtItem(_)
|
token::NtItem(_)
|
||||||
| token::NtPat(_)
|
| token::NtPat(_)
|
||||||
| token::NtTy(_)
|
| token::NtTy(_)
|
||||||
@ -804,7 +804,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
|
|||||||
},
|
},
|
||||||
sym::path | sym::meta => match token.kind {
|
sym::path | sym::meta => match token.kind {
|
||||||
token::ModSep | token::Ident(..) => true,
|
token::ModSep | token::Ident(..) => true,
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt, _) => match **nt {
|
||||||
token::NtPath(_) | token::NtMeta(_) => true,
|
token::NtPath(_) | token::NtMeta(_) => true,
|
||||||
_ => may_be_ident(&nt),
|
_ => may_be_ident(&nt),
|
||||||
},
|
},
|
||||||
@ -823,12 +823,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
|
|||||||
token::ModSep | // path
|
token::ModSep | // path
|
||||||
token::Lt | // path (UFCS constant)
|
token::Lt | // path (UFCS constant)
|
||||||
token::BinOp(token::Shl) => true, // path (double UFCS)
|
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||||
token::Interpolated(ref nt) => may_be_ident(nt),
|
token::Interpolated(ref nt, _) => may_be_ident(nt),
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
sym::lifetime => match token.kind {
|
sym::lifetime => match token.kind {
|
||||||
token::Lifetime(_) => true,
|
token::Lifetime(_) => true,
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt, _) => match **nt {
|
||||||
token::NtLifetime(_) | token::NtTT(_) => true,
|
token::NtLifetime(_) | token::NtTT(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
|||||||
|
|
||||||
use rustc_ast::ast::MacCall;
|
use rustc_ast::ast::MacCall;
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::token::{self, NtTT, Token};
|
use rustc_ast::token::{self, FlattenGroup, NtTT, Token};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
@ -240,7 +240,10 @@ pub(super) fn transcribe<'a>(
|
|||||||
result.push(tt.clone().into());
|
result.push(tt.clone().into());
|
||||||
} else {
|
} else {
|
||||||
marker.visit_span(&mut sp);
|
marker.visit_span(&mut sp);
|
||||||
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
|
let token = TokenTree::token(
|
||||||
|
token::Interpolated(nt.clone(), FlattenGroup::No),
|
||||||
|
sp,
|
||||||
|
);
|
||||||
result.push(token.into());
|
result.push(token.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -2,7 +2,7 @@ use crate::base::{self, *};
|
|||||||
use crate::proc_macro_server;
|
use crate::proc_macro_server;
|
||||||
|
|
||||||
use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem};
|
use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem};
|
||||||
use rustc_ast::token;
|
use rustc_ast::token::{self, FlattenGroup};
|
||||||
use rustc_ast::tokenstream::{self, TokenStream};
|
use rustc_ast::tokenstream::{self, TokenStream};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::{Applicability, ErrorReported};
|
use rustc_errors::{Applicability, ErrorReported};
|
||||||
@ -102,7 +102,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let token = token::Interpolated(Lrc::new(token::NtItem(item)));
|
let token = token::Interpolated(Lrc::new(token::NtItem(item)), FlattenGroup::Yes);
|
||||||
let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
|
let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
|
||||||
|
|
||||||
let server = proc_macro_server::Rustc::new(ecx);
|
let server = proc_macro_server::Rustc::new(ecx);
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::base::ExtCtxt;
|
use crate::base::ExtCtxt;
|
||||||
|
|
||||||
use rustc_ast::ast;
|
use rustc_ast::ast;
|
||||||
use rustc_ast::token;
|
use rustc_ast::token::{self, FlattenGroup};
|
||||||
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||||
use rustc_ast::util::comments;
|
use rustc_ast::util::comments;
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
@ -60,7 +60,12 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||||||
let Token { kind, span } = match tree {
|
let Token { kind, span } = match tree {
|
||||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||||
let delimiter = Delimiter::from_internal(delim);
|
let delimiter = Delimiter::from_internal(delim);
|
||||||
return TokenTree::Group(Group { delimiter, stream: tts, span });
|
return TokenTree::Group(Group {
|
||||||
|
delimiter,
|
||||||
|
stream: tts,
|
||||||
|
span,
|
||||||
|
flatten: FlattenGroup::No,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
tokenstream::TokenTree::Token(token) => token,
|
tokenstream::TokenTree::Token(token) => token,
|
||||||
};
|
};
|
||||||
@ -167,6 +172,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||||||
delimiter: Delimiter::Bracket,
|
delimiter: Delimiter::Bracket,
|
||||||
stream,
|
stream,
|
||||||
span: DelimSpan::from_single(span),
|
span: DelimSpan::from_single(span),
|
||||||
|
flatten: FlattenGroup::No,
|
||||||
}));
|
}));
|
||||||
if style == ast::AttrStyle::Inner {
|
if style == ast::AttrStyle::Inner {
|
||||||
stack.push(tt!(Punct::new('!', false)));
|
stack.push(tt!(Punct::new('!', false)));
|
||||||
@ -174,12 +180,13 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||||||
tt!(Punct::new('#', false))
|
tt!(Punct::new('#', false))
|
||||||
}
|
}
|
||||||
|
|
||||||
Interpolated(nt) => {
|
Interpolated(nt, flatten) => {
|
||||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||||
TokenTree::Group(Group {
|
TokenTree::Group(Group {
|
||||||
delimiter: Delimiter::None,
|
delimiter: Delimiter::None,
|
||||||
stream,
|
stream,
|
||||||
span: DelimSpan::from_single(span),
|
span: DelimSpan::from_single(span),
|
||||||
|
flatten,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -195,7 +202,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
|||||||
|
|
||||||
let (ch, joint, span) = match self {
|
let (ch, joint, span) = match self {
|
||||||
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
|
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
|
||||||
TokenTree::Group(Group { delimiter, stream, span }) => {
|
TokenTree::Group(Group { delimiter, stream, span, .. }) => {
|
||||||
return tokenstream::TokenTree::Delimited(span, delimiter.to_internal(), stream)
|
return tokenstream::TokenTree::Delimited(span, delimiter.to_internal(), stream)
|
||||||
.into();
|
.into();
|
||||||
}
|
}
|
||||||
@ -283,6 +290,10 @@ pub struct Group {
|
|||||||
delimiter: Delimiter,
|
delimiter: Delimiter,
|
||||||
stream: TokenStream,
|
stream: TokenStream,
|
||||||
span: DelimSpan,
|
span: DelimSpan,
|
||||||
|
/// A hack used to pass AST fragments to attribute and derive macros
|
||||||
|
/// as a single nonterminal token instead of a token stream.
|
||||||
|
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||||
|
flatten: FlattenGroup,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
@ -437,14 +448,12 @@ impl server::TokenStreamIter for Rustc<'_> {
|
|||||||
let next = iter.cursor.next_with_joint()?;
|
let next = iter.cursor.next_with_joint()?;
|
||||||
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
||||||
})?;
|
})?;
|
||||||
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
|
// A hack used to pass AST fragments to attribute and derive macros
|
||||||
// fragment approximately converted into a token stream. This may happen, for
|
// as a single nonterminal token instead of a token stream.
|
||||||
// example, with inputs to proc macro attributes, including derives. Such "groups"
|
// Such token needs to be "unwrapped" and not represented as a delimited group.
|
||||||
// need to flattened during iteration over stream's token trees.
|
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||||
// Eventually this needs to be removed in favor of keeping original token trees
|
|
||||||
// and not doing the roundtrip through AST.
|
|
||||||
if let TokenTree::Group(ref group) = tree {
|
if let TokenTree::Group(ref group) = tree {
|
||||||
if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
|
if matches!(group.flatten, FlattenGroup::Yes) {
|
||||||
iter.cursor.append(group.stream.clone());
|
iter.cursor.append(group.stream.clone());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -456,7 +465,12 @@ impl server::TokenStreamIter for Rustc<'_> {
|
|||||||
|
|
||||||
impl server::Group for Rustc<'_> {
|
impl server::Group for Rustc<'_> {
|
||||||
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
|
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
|
||||||
Group { delimiter, stream, span: DelimSpan::from_single(server::Span::call_site(self)) }
|
Group {
|
||||||
|
delimiter,
|
||||||
|
stream,
|
||||||
|
span: DelimSpan::from_single(server::Span::call_site(self)),
|
||||||
|
flatten: FlattenGroup::No,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
|
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
|
||||||
group.delimiter
|
group.delimiter
|
||||||
|
@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// The delimiters or `=` are still put into the resulting token stream.
|
/// The delimiters or `=` are still put into the resulting token stream.
|
||||||
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
|
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
|
||||||
let item = match self.token.kind {
|
let item = match self.token.kind {
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt, _) => match **nt {
|
||||||
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
@ -254,7 +254,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
|
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
|
||||||
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
||||||
let nt_meta = match self.token.kind {
|
let nt_meta = match self.token.kind {
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt, _) => match **nt {
|
||||||
token::NtMeta(ref e) => Some(e.clone()),
|
token::NtMeta(ref e) => Some(e.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -26,7 +26,7 @@ use std::mem;
|
|||||||
/// `token::Interpolated` tokens.
|
/// `token::Interpolated` tokens.
|
||||||
macro_rules! maybe_whole_expr {
|
macro_rules! maybe_whole_expr {
|
||||||
($p:expr) => {
|
($p:expr) => {
|
||||||
if let token::Interpolated(nt) = &$p.token.kind {
|
if let token::Interpolated(nt, _) = &$p.token.kind {
|
||||||
match &**nt {
|
match &**nt {
|
||||||
token::NtExpr(e) | token::NtLiteral(e) => {
|
token::NtExpr(e) | token::NtLiteral(e) => {
|
||||||
let e = e.clone();
|
let e = e.clone();
|
||||||
|
@ -1780,7 +1780,7 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
fn is_named_param(&self) -> bool {
|
fn is_named_param(&self) -> bool {
|
||||||
let offset = match self.token.kind {
|
let offset = match self.token.kind {
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt, _) => match **nt {
|
||||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||||
_ => 0,
|
_ => 0,
|
||||||
},
|
},
|
||||||
|
@ -54,7 +54,7 @@ enum BlockMode {
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! maybe_whole {
|
macro_rules! maybe_whole {
|
||||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
||||||
if let token::Interpolated(nt) = &$p.token.kind {
|
if let token::Interpolated(nt, _) = &$p.token.kind {
|
||||||
if let token::$constructor(x) = &**nt {
|
if let token::$constructor(x) = &**nt {
|
||||||
let $x = x.clone();
|
let $x = x.clone();
|
||||||
$p.bump();
|
$p.bump();
|
||||||
@ -69,7 +69,7 @@ macro_rules! maybe_whole {
|
|||||||
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
||||||
($self: expr, $allow_qpath_recovery: expr) => {
|
($self: expr, $allow_qpath_recovery: expr) => {
|
||||||
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
|
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
|
||||||
if let token::Interpolated(nt) = &$self.token.kind {
|
if let token::Interpolated(nt, _) = &$self.token.kind {
|
||||||
if let token::NtTy(ty) = &**nt {
|
if let token::NtTy(ty) = &**nt {
|
||||||
let ty = ty.clone();
|
let ty = ty.clone();
|
||||||
$self.bump();
|
$self.bump();
|
||||||
@ -922,7 +922,7 @@ impl<'a> Parser<'a> {
|
|||||||
if self.eat(&token::Eq) {
|
if self.eat(&token::Eq) {
|
||||||
let eq_span = self.prev_token.span;
|
let eq_span = self.prev_token.span;
|
||||||
let mut is_interpolated_expr = false;
|
let mut is_interpolated_expr = false;
|
||||||
if let token::Interpolated(nt) = &self.token.kind {
|
if let token::Interpolated(nt, _) = &self.token.kind {
|
||||||
if let token::NtExpr(..) = **nt {
|
if let token::NtExpr(..) = **nt {
|
||||||
is_interpolated_expr = true;
|
is_interpolated_expr = true;
|
||||||
}
|
}
|
||||||
|
@ -515,7 +515,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.recover_additional_muts();
|
self.recover_additional_muts();
|
||||||
|
|
||||||
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
||||||
if let token::Interpolated(ref nt) = self.token.kind {
|
if let token::Interpolated(ref nt, _) = self.token.kind {
|
||||||
if let token::NtPat(_) = **nt {
|
if let token::NtPat(_) = **nt {
|
||||||
self.expected_ident_found().emit();
|
self.expected_ident_found().emit();
|
||||||
}
|
}
|
||||||
|
@ -1325,7 +1325,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let token::Interpolated(nt) = t.kind {
|
if let token::Interpolated(nt, _) = t.kind {
|
||||||
if let token::NtExpr(ref expr) = *nt {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ast::ExprKind::MacCall(..) = expr.kind {
|
if let ast::ExprKind::MacCall(..) = expr.kind {
|
||||||
self.visit_invoc(expr.id);
|
self.visit_invoc(expr.id);
|
||||||
|
@ -256,7 +256,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let token::Interpolated(nt) = t.kind {
|
if let token::Interpolated(nt, _) = t.kind {
|
||||||
if let token::NtExpr(ref expr) = *nt {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ExprKind::MacCall(..) = expr.kind {
|
if let ExprKind::MacCall(..) = expr.kind {
|
||||||
self.visit_macro_invoc(expr.id);
|
self.visit_macro_invoc(expr.id);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user