Rollup merge of #66935 - petrochenkov:attrtok2, r=Centril

syntax: Unify macro and attribute arguments in AST

The unified form (`ast::MacArgs`) represents parsed arguments instead of an unstructured token stream that was previously used for attributes.
It also tracks some spans and delimiter kinds better for fn-like macros and macro definitions.

I've been talking about implementing this with @nnethercote in https://github.com/rust-lang/rust/pull/65750#issuecomment-546517322.
The parsed representation is closer to `MetaItem` and requires less token juggling during conversions, so it potentially may be faster.

r? @Centril
This commit is contained in:
Mazdak Farrokhzad 2019-12-03 11:07:05 +01:00 committed by GitHub
commit cf937fa84d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 395 additions and 327 deletions

View File

@ -1003,7 +1003,7 @@ impl<'a> LoweringContext<'a> {
AttrKind::Normal(ref item) => {
AttrKind::Normal(AttrItem {
path: item.path.clone(),
tokens: self.lower_token_stream(item.tokens.clone()),
args: self.lower_mac_args(&item.args),
})
}
AttrKind::DocComment(comment) => AttrKind::DocComment(comment)
@ -1017,6 +1017,16 @@ impl<'a> LoweringContext<'a> {
}
}
fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
match *args {
MacArgs::Empty => MacArgs::Empty,
MacArgs::Delimited(dspan, delim, ref tokens) =>
MacArgs::Delimited(dspan, delim, self.lower_token_stream(tokens.clone())),
MacArgs::Eq(eq_span, ref tokens) =>
MacArgs::Eq(eq_span, self.lower_token_stream(tokens.clone())),
}
}
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens
.into_trees()

View File

@ -233,7 +233,7 @@ impl LoweringContext<'_> {
if let ItemKind::MacroDef(ref def) = i.kind {
if !def.legacy || attr::contains_name(&i.attrs, sym::macro_export) {
let body = self.lower_token_stream(def.stream());
let body = self.lower_token_stream(def.body.inner_tokens());
let hir_id = self.lower_node_id(i.id);
self.exported_macros.push(hir::MacroDef {
name: ident.name,

View File

@ -1450,10 +1450,10 @@ impl KeywordIdents {
impl EarlyLintPass for KeywordIdents {
fn check_mac_def(&mut self, cx: &EarlyContext<'_>, mac_def: &ast::MacroDef, _id: ast::NodeId) {
self.check_tokens(cx, mac_def.stream());
self.check_tokens(cx, mac_def.body.inner_tokens());
}
fn check_mac(&mut self, cx: &EarlyContext<'_>, mac: &ast::Mac) {
self.check_tokens(cx, mac.tts.clone().into());
self.check_tokens(cx, mac.args.inner_tokens());
}
fn check_ident(&mut self, cx: &EarlyContext<'_>, ident: ast::Ident) {
self.check_ident_token(cx, UnderMacro(false), ident);

View File

@ -32,6 +32,8 @@ use syntax::source_map;
use syntax::source_map::Spanned;
use syntax::symbol::Symbol;
use syntax::expand::allocator::AllocatorKind;
use syntax::ptr::P;
use syntax::tokenstream::DelimSpan;
use syntax_pos::{Span, FileName};
macro_rules! provide {
@ -427,6 +429,7 @@ impl CStore {
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
let local_span = Span::with_root_ctxt(source_file.start_pos, source_file.end_pos);
let dspan = DelimSpan::from_single(local_span);
let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
emit_unclosed_delims(&mut errors, &sess.parse_sess);
@ -448,7 +451,7 @@ impl CStore {
span: local_span,
attrs: attrs.iter().cloned().collect(),
kind: ast::ItemKind::MacroDef(ast::MacroDef {
tokens: body.into(),
body: P(ast::MacArgs::Delimited(dspan, ast::MacDelimiter::Brace, body)),
legacy: def.legacy,
}),
vis: source_map::respan(local_span.shrink_to_lo(), ast::VisibilityKind::Inherited),

View File

@ -101,7 +101,7 @@ impl<'a> StripUnconfigured<'a> {
if !attr.has_name(sym::cfg_attr) {
return vec![attr];
}
if attr.get_normal_item().tokens.is_empty() {
if let ast::MacArgs::Empty = attr.get_normal_item().args {
self.sess.span_diagnostic
.struct_span_err(
attr.span,

View File

@ -277,7 +277,9 @@ pub fn parse_in_attr<'a, T>(
) -> PResult<'a, T> {
let mut parser = Parser::new(
sess,
attr.get_normal_item().tokens.clone(),
// FIXME(#66940, Centril | petrochenkov): refactor this function so it doesn't
// require reconstructing and immediately re-parsing delimiters.
attr.get_normal_item().args.outer_tokens(),
None,
false,
false,
@ -409,7 +411,7 @@ fn prepend_attrs(
brackets.push(stream);
}
brackets.push(item.tokens.clone());
brackets.push(item.args.outer_tokens());
// The span we list here for `#` and for `[ ... ]` are both wrong in
// that it encompasses more than each token, but it hopefully is "good

View File

@ -2,8 +2,7 @@ use super::{SeqSep, Parser, TokenType, PathStyle};
use syntax::attr;
use syntax::ast;
use syntax::util::comments;
use syntax::token::{self, Nonterminal, DelimToken};
use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::token::{self, Nonterminal};
use syntax_pos::{Span, Symbol};
use errors::PResult;
@ -181,31 +180,8 @@ impl<'a> Parser<'a> {
item
} else {
let path = self.parse_path(PathStyle::Mod)?;
let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) ||
self.check(&token::OpenDelim(DelimToken::Bracket)) ||
self.check(&token::OpenDelim(DelimToken::Brace)) {
self.parse_token_tree().into()
} else if self.eat(&token::Eq) {
let eq = TokenTree::token(token::Eq, self.prev_span);
let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtExpr(..) = **nt {
is_interpolated_expr = true;
}
}
let token_tree = if is_interpolated_expr {
// We need to accept arbitrary interpolated expressions to continue
// supporting things like `doc = $expr` that work on stable.
// Non-literal interpolated expressions are rejected after expansion.
self.parse_token_tree()
} else {
self.parse_unsuffixed_lit()?.token_tree()
};
TokenStream::new(vec![eq.into(), token_tree.into()])
} else {
TokenStream::default()
};
ast::AttrItem { path, tokens }
let args = self.parse_attr_args()?;
ast::AttrItem { path, args }
})
}
@ -244,7 +220,7 @@ impl<'a> Parser<'a> {
Ok(attrs)
}
fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
pub(super) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
let lit = self.parse_lit()?;
debug!("checking if {:?} is unusuffixed", lit);

View File

@ -922,13 +922,11 @@ impl<'a> Parser<'a> {
// `!`, as an operator, is prefix, so we know this isn't that.
if self.eat(&token::Not) {
// MACRO INVOCATION expression
let (delim, tts) = self.expect_delimited_token_tree()?;
let args = self.parse_mac_args()?;
hi = self.prev_span;
ex = ExprKind::Mac(Mac {
path,
tts,
delim,
span: lo.to(hi),
args,
prior_type_ascription: self.last_type_ascription,
});
} else if self.check(&token::OpenDelim(token::Brace)) {

View File

@ -8,12 +8,12 @@ use syntax::ast::{ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, Us
use syntax::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness, Extern, StrLit};
use syntax::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
use syntax::ast::{Ty, TyKind, Generics, TraitRef, EnumDef, Variant, VariantData, StructField};
use syntax::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param};
use syntax::ast::{Mac, MacArgs, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param};
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::ThinVec;
use syntax::token;
use syntax::tokenstream::{TokenTree, TokenStream};
use syntax::tokenstream::{DelimSpan, TokenTree, TokenStream};
use syntax::source_map::{self, respan, Span};
use syntax::struct_span_err;
use syntax_pos::BytePos;
@ -432,22 +432,18 @@ impl<'a> Parser<'a> {
let prev_span = self.prev_span;
self.complain_if_pub_macro(&visibility.node, prev_span);
let mac_lo = self.token.span;
// Item macro
let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::Not)?;
let (delim, tts) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
let args = self.parse_mac_args()?;
if args.need_semicolon() && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
let hi = self.prev_span;
let mac = Mac {
path,
tts,
delim,
span: mac_lo.to(hi),
args,
prior_type_ascription: self.last_type_ascription,
};
let item =
@ -500,7 +496,6 @@ impl<'a> Parser<'a> {
if self.token.is_path_start() &&
!(self.is_async_fn() && self.token.span.rust_2015()) {
let prev_span = self.prev_span;
let lo = self.token.span;
let path = self.parse_path(PathStyle::Mod)?;
if path.segments.len() == 1 {
@ -518,16 +513,14 @@ impl<'a> Parser<'a> {
*at_end = true;
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace {
let args = self.parse_mac_args()?;
if args.need_semicolon() {
self.expect_semi()?;
}
Ok(Some(Mac {
path,
tts,
delim,
span: lo.to(self.prev_span),
args,
prior_type_ascription: self.last_type_ascription,
}))
} else {
@ -1624,33 +1617,31 @@ impl<'a> Parser<'a> {
vis: &Visibility,
lo: Span
) -> PResult<'a, Option<P<Item>>> {
let token_lo = self.token.span;
let (ident, def) = if self.eat_keyword(kw::Macro) {
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
}
let body = if self.check(&token::OpenDelim(token::Brace)) {
self.parse_mac_args()?
} else if self.check(&token::OpenDelim(token::Paren)) {
let args = self.parse_token_tree();
let params = self.parse_token_tree();
let pspan = params.span();
let body = if self.check(&token::OpenDelim(token::Brace)) {
self.parse_token_tree()
} else {
self.unexpected()?;
unreachable!()
return self.unexpected();
};
TokenStream::new(vec![
args.into(),
TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
let bspan = body.span();
let tokens = TokenStream::new(vec![
params.into(),
TokenTree::token(token::FatArrow, pspan.between(bspan)).into(),
body.into(),
])
]);
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
P(MacArgs::Delimited(dspan, MacDelimiter::Brace, tokens))
} else {
self.unexpected()?;
unreachable!()
return self.unexpected();
};
(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
(ident, ast::MacroDef { body, legacy: false })
} else if self.check_keyword(sym::macro_rules) &&
self.look_ahead(1, |t| *t == token::Not) &&
self.look_ahead(2, |t| t.is_ident()) {
@ -1660,12 +1651,12 @@ impl<'a> Parser<'a> {
self.bump();
let ident = self.parse_ident()?;
let (delim, tokens) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
let body = self.parse_mac_args()?;
if body.need_semicolon() && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
(ident, ast::MacroDef { tokens, legacy: true })
(ident, ast::MacroDef { body, legacy: true })
} else {
return Ok(None);
};

View File

@ -16,7 +16,7 @@ use crate::lexer::UnmatchedBrace;
use syntax::ast::{
self, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Extern, Ident, StrLit,
IsAsync, MacDelimiter, Mutability, Visibility, VisibilityKind, Unsafety,
IsAsync, MacArgs, MacDelimiter, Mutability, Visibility, VisibilityKind, Unsafety,
};
use syntax::print::pprust;
@ -1010,27 +1010,49 @@ impl<'a> Parser<'a> {
}
}
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
let delim = match self.token.kind {
token::OpenDelim(delim) => delim,
_ => {
let msg = "expected open delimiter";
let mut err = self.fatal(msg);
err.span_label(self.token.span, msg);
return Err(err)
fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
self.parse_mac_args_common(true).map(P)
}
fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
self.parse_mac_args_common(false)
}
fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
Ok(if self.check(&token::OpenDelim(DelimToken::Paren)) ||
self.check(&token::OpenDelim(DelimToken::Bracket)) ||
self.check(&token::OpenDelim(DelimToken::Brace)) {
match self.parse_token_tree() {
TokenTree::Delimited(dspan, delim, tokens) =>
// We've confirmed above that there is a delimiter so unwrapping is OK.
MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens),
_ => unreachable!(),
}
};
let tts = match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
};
let delim = match delim {
token::Paren => MacDelimiter::Parenthesis,
token::Bracket => MacDelimiter::Bracket,
token::Brace => MacDelimiter::Brace,
token::NoDelim => self.bug("unexpected no delimiter"),
};
Ok((delim, tts.into()))
} else if !delimited_only {
if self.eat(&token::Eq) {
let eq_span = self.prev_span;
let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtExpr(..) = **nt {
is_interpolated_expr = true;
}
}
let token_tree = if is_interpolated_expr {
// We need to accept arbitrary interpolated expressions to continue
// supporting things like `doc = $expr` that work on stable.
// Non-literal interpolated expressions are rejected after expansion.
self.parse_token_tree()
} else {
self.parse_unsuffixed_lit()?.token_tree()
};
MacArgs::Eq(eq_span, token_tree.into())
} else {
MacArgs::Empty
}
} else {
return self.unexpected();
})
}
fn parse_or_use_outer_attributes(

View File

@ -338,7 +338,7 @@ impl<'a> Parser<'a> {
(None, self.parse_path(PathStyle::Expr)?)
};
match self.token.kind {
token::Not if qself.is_none() => self.parse_pat_mac_invoc(lo, path)?,
token::Not if qself.is_none() => self.parse_pat_mac_invoc(path)?,
token::DotDotDot | token::DotDotEq | token::DotDot => {
self.parse_pat_range_starting_with_path(lo, qself, path)?
}
@ -593,14 +593,12 @@ impl<'a> Parser<'a> {
}
/// Parse macro invocation
fn parse_pat_mac_invoc(&mut self, lo: Span, path: Path) -> PResult<'a, PatKind> {
fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
self.bump();
let (delim, tts) = self.expect_delimited_token_tree()?;
let args = self.parse_mac_args()?;
let mac = Mac {
path,
tts,
delim,
span: lo.to(self.prev_span),
args,
prior_type_ascription: self.last_type_ascription,
};
Ok(PatKind::Mac(mac))

View File

@ -2,6 +2,7 @@ use super::{Parser, TokenType};
use crate::maybe_whole;
use syntax::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs};
use syntax::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
use syntax::ast::MacArgs;
use syntax::ThinVec;
use syntax::token::{self, Token};
use syntax::source_map::{Span, BytePos};
@ -114,9 +115,9 @@ impl<'a> Parser<'a> {
fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> {
let meta_ident = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref item) => match item.tokens.is_empty() {
true => Some(item.path.clone()),
false => None,
token::NtMeta(ref item) => match item.args {
MacArgs::Empty => Some(item.path.clone()),
_ => None,
},
_ => None,
},

View File

@ -10,7 +10,7 @@ use syntax::ThinVec;
use syntax::ptr::P;
use syntax::ast;
use syntax::ast::{DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
use syntax::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
use syntax::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac};
use syntax::util::classify;
use syntax::token;
use syntax::source_map::{respan, Span};
@ -93,10 +93,11 @@ impl<'a> Parser<'a> {
}));
}
let (delim, tts) = self.expect_delimited_token_tree()?;
let args = self.parse_mac_args()?;
let delim = args.delim();
let hi = self.prev_span;
let style = if delim == MacDelimiter::Brace {
let style = if delim == token::Brace {
MacStmtStyle::Braces
} else {
MacStmtStyle::NoBraces
@ -104,12 +105,10 @@ impl<'a> Parser<'a> {
let mac = Mac {
path,
tts,
delim,
span: lo.to(hi),
args,
prior_type_ascription: self.last_type_ascription,
};
let kind = if delim == MacDelimiter::Brace ||
let kind = if delim == token::Brace ||
self.token == token::Semi || self.token == token::Eof {
StmtKind::Mac(P((mac, style, attrs.into())))
}
@ -130,7 +129,7 @@ impl<'a> Parser<'a> {
self.warn_missing_semicolon();
StmtKind::Mac(P((mac, style, attrs.into())))
} else {
let e = self.mk_expr(mac.span, ExprKind::Mac(mac), ThinVec::new());
let e = self.mk_expr(lo.to(hi), ExprKind::Mac(mac), ThinVec::new());
let e = self.maybe_recover_from_bad_qpath(e, true)?;
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;

View File

@ -177,12 +177,10 @@ impl<'a> Parser<'a> {
let path = self.parse_path(PathStyle::Type)?;
if self.eat(&token::Not) {
// Macro invocation in type position
let (delim, tts) = self.expect_delimited_token_tree()?;
let args = self.parse_mac_args()?;
let mac = Mac {
path,
tts,
delim,
span: lo.to(self.prev_span),
args,
prior_type_ascription: self.last_type_ascription,
};
TyKind::Mac(mac)

View File

@ -2,11 +2,9 @@
use errors::{PResult, Applicability};
use rustc_feature::{AttributeTemplate, BUILTIN_ATTRIBUTE_MAP};
use syntax::ast::{self, Attribute, AttrKind, Ident, MetaItem, MetaItemKind};
use syntax::ast::{self, Attribute, AttrKind, Ident, MacArgs, MetaItem, MetaItemKind};
use syntax::attr::mk_name_value_item_str;
use syntax::early_buffered_lints::BufferedEarlyLintId;
use syntax::token;
use syntax::tokenstream::TokenTree;
use syntax::sess::ParseSess;
use syntax_pos::{Symbol, sym};
@ -19,11 +17,9 @@ pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
// `rustc_dummy` doesn't have any restrictions specific to built-in attributes.
Some((name, _, template, _)) if name != sym::rustc_dummy =>
check_builtin_attribute(sess, attr, name, template),
_ => if let Some(TokenTree::Token(token)) = attr.get_normal_item().tokens.trees().next() {
if token == token::Eq {
// All key-value attributes are restricted to meta-item syntax.
parse_meta(sess, attr).map_err(|mut err| err.emit()).ok();
}
_ => if let MacArgs::Eq(..) = attr.get_normal_item().args {
// All key-value attributes are restricted to meta-item syntax.
parse_meta(sess, attr).map_err(|mut err| err.emit()).ok();
}
}
}

View File

@ -737,14 +737,6 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|this| visit::walk_enum_def(this, enum_definition, generics, item_id))
}
fn visit_mac(&mut self, mac: &Mac) {
// when a new macro kind is added but the author forgets to set it up for expansion
// because that's the only part that won't cause a compiler error
self.session.diagnostic()
.span_bug(mac.span, "macro invocation missed in expansion; did you forget to override \
the relevant `fold_*()` method in `PlaceholderExpander`?");
}
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
if let ImplItemKind::Method(ref sig, _) = ii.kind {
self.check_fn_decl(&sig.decl);

View File

@ -1515,14 +1515,6 @@ impl<'l, 'tcx> Visitor<'l> for DumpVisitor<'l, 'tcx> {
}
}
fn visit_mac(&mut self, mac: &'l ast::Mac) {
// These shouldn't exist in the AST at this point, log a span bug.
span_bug!(
mac.span,
"macro invocation should have been expanded out of AST"
);
}
fn visit_pat(&mut self, p: &'l ast::Pat) {
self.process_macro_use(p.span);
self.process_pat(p);

View File

@ -482,7 +482,7 @@ fn build_macro(cx: &DocContext<'_>, did: DefId, name: ast::Name) -> clean::ItemE
match cx.enter_resolver(|r| r.cstore().load_macro_untracked(did, cx.sess())) {
LoadedMacro::MacroDef(def, _) => {
let matchers: hir::HirVec<Span> = if let ast::ItemKind::MacroDef(ref def) = def.kind {
let tts: Vec<_> = def.stream().into_trees().collect();
let tts: Vec<_> = def.body.inner_tokens().into_trees().collect();
tts.chunks(4).map(|arm| arm[0].span()).collect()
} else {
unreachable!()

View File

@ -27,7 +27,7 @@ pub use syntax_pos::symbol::{Ident, Symbol as Name};
use crate::ptr::P;
use crate::source_map::{dummy_spanned, respan, Spanned};
use crate::token::{self, DelimToken};
use crate::tokenstream::TokenStream;
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
use syntax_pos::symbol::{kw, sym, Symbol};
use syntax_pos::{Span, DUMMY_SP, ExpnId};
@ -40,6 +40,7 @@ use rustc_index::vec::Idx;
use rustc_serialize::{self, Decoder, Encoder};
use rustc_macros::HashStable_Generic;
use std::iter;
use std::fmt;
#[cfg(test)]
@ -1372,34 +1373,89 @@ pub enum Movability {
Movable,
}
/// Represents a macro invocation. The `Path` indicates which macro
/// is being invoked, and the vector of token-trees contains the source
/// of the macro invocation.
///
/// N.B., the additional ident for a `macro_rules`-style macro is actually
/// stored in the enclosing item.
/// Represents a macro invocation. The `path` indicates which macro
/// is being invoked, and the `args` are arguments passed to it.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Mac {
pub path: Path,
pub delim: MacDelimiter,
pub tts: TokenStream,
pub span: Span,
pub args: P<MacArgs>,
pub prior_type_ascription: Option<(Span, bool)>,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
impl Mac {
pub fn span(&self) -> Span {
self.path.span.to(self.args.span().unwrap_or(self.path.span))
}
}
/// Arguments passed to an attribute or a function-like macro.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum MacArgs {
/// No arguments - `#[attr]`.
Empty,
/// Delimited arguments - `#[attr()/[]/{}]` or `mac!()/[]/{}`.
Delimited(DelimSpan, MacDelimiter, TokenStream),
/// Arguments of a key-value attribute - `#[attr = "value"]`.
Eq(
/// Span of the `=` token.
Span,
/// Token stream of the "value".
TokenStream,
),
}
impl MacArgs {
pub fn delim(&self) -> DelimToken {
match self {
MacArgs::Delimited(_, delim, _) => delim.to_token(),
MacArgs::Empty | MacArgs::Eq(..) => token::NoDelim,
}
}
pub fn span(&self) -> Option<Span> {
match *self {
MacArgs::Empty => None,
MacArgs::Delimited(dspan, ..) => Some(dspan.entire()),
MacArgs::Eq(eq_span, ref tokens) => Some(eq_span.to(tokens.span().unwrap_or(eq_span))),
}
}
/// Tokens inside the delimiters or after `=`.
/// Proc macros see these tokens, for example.
pub fn inner_tokens(&self) -> TokenStream {
match self {
MacArgs::Empty => TokenStream::default(),
MacArgs::Delimited(.., tokens) |
MacArgs::Eq(.., tokens) => tokens.clone(),
}
}
/// Tokens together with the delimiters or `=`.
/// Use of this method generally means that something suboptimal or hacky is happening.
pub fn outer_tokens(&self) -> TokenStream {
match *self {
MacArgs::Empty => TokenStream::default(),
MacArgs::Delimited(dspan, delim, ref tokens) =>
TokenTree::Delimited(dspan, delim.to_token(), tokens.clone()).into(),
MacArgs::Eq(eq_span, ref tokens) => iter::once(TokenTree::token(token::Eq, eq_span))
.chain(tokens.trees()).collect(),
}
}
/// Whether a macro with these arguments needs a semicolon
/// when used as a standalone item or statement.
pub fn need_semicolon(&self) -> bool {
!matches!(self, MacArgs::Delimited(_, MacDelimiter::Brace ,_))
}
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum MacDelimiter {
Parenthesis,
Bracket,
Brace,
}
impl Mac {
pub fn stream(&self) -> TokenStream {
self.tts.clone()
}
}
impl MacDelimiter {
crate fn to_token(self) -> DelimToken {
match self {
@ -1408,22 +1464,25 @@ impl MacDelimiter {
MacDelimiter::Brace => DelimToken::Brace,
}
}
pub fn from_token(delim: DelimToken) -> Option<MacDelimiter> {
match delim {
token::Paren => Some(MacDelimiter::Parenthesis),
token::Bracket => Some(MacDelimiter::Bracket),
token::Brace => Some(MacDelimiter::Brace),
token::NoDelim => None,
}
}
}
/// Represents a macro definition.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MacroDef {
pub tokens: TokenStream,
pub body: P<MacArgs>,
/// `true` if macro was defined with `macro_rules`.
pub legacy: bool,
}
impl MacroDef {
pub fn stream(&self) -> TokenStream {
self.tokens.clone().into()
}
}
// Clippy uses Hash and PartialEq
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq, HashStable_Generic)]
pub enum StrStyle {
@ -2323,7 +2382,7 @@ impl rustc_serialize::Decodable for AttrId {
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub struct AttrItem {
pub path: Path,
pub tokens: TokenStream,
pub args: MacArgs,
}
/// Metadata associated with an item.

View File

@ -10,7 +10,7 @@ pub use crate::ast::Attribute;
use crate::ast;
use crate::ast::{AttrItem, AttrId, AttrKind, AttrStyle, Name, Ident, Path, PathSegment};
use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam};
use crate::mut_visit::visit_clobber;
use crate::source_map::{BytePos, Spanned};
@ -198,7 +198,7 @@ impl Attribute {
pub fn is_word(&self) -> bool {
if let AttrKind::Normal(item) = &self.kind {
item.tokens.is_empty()
matches!(item.args, MacArgs::Empty)
} else {
false
}
@ -278,17 +278,9 @@ impl MetaItem {
impl AttrItem {
pub fn meta(&self, span: Span) -> Option<MetaItem> {
let mut tokens = self.tokens.trees().peekable();
Some(MetaItem {
path: self.path.clone(),
kind: if let Some(kind) = MetaItemKind::from_tokens(&mut tokens) {
if tokens.peek().is_some() {
return None;
}
kind
} else {
return None;
},
kind: MetaItemKind::from_mac_args(&self.args)?,
span,
})
}
@ -362,8 +354,8 @@ crate fn mk_attr_id() -> AttrId {
AttrId(id)
}
pub fn mk_attr(style: AttrStyle, path: Path, tokens: TokenStream, span: Span) -> Attribute {
mk_attr_from_item(style, AttrItem { path, tokens }, span)
pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attribute {
mk_attr_from_item(style, AttrItem { path, args }, span)
}
pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
@ -377,12 +369,12 @@ pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attrib
/// Returns an inner attribute with the given value and span.
pub fn mk_attr_inner(item: MetaItem) -> Attribute {
mk_attr(AttrStyle::Inner, item.path, item.kind.tokens(item.span), item.span)
mk_attr(AttrStyle::Inner, item.path, item.kind.mac_args(item.span), item.span)
}
/// Returns an outer attribute with the given value and span.
pub fn mk_attr_outer(item: MetaItem) -> Attribute {
mk_attr(AttrStyle::Outer, item.path, item.kind.tokens(item.span), item.span)
mk_attr(AttrStyle::Outer, item.path, item.kind.mac_args(item.span), item.span)
}
pub fn mk_doc_comment(style: AttrStyle, comment: Symbol, span: Span) -> Attribute {
@ -520,7 +512,26 @@ impl MetaItem {
}
impl MetaItemKind {
pub fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
pub fn mac_args(&self, span: Span) -> MacArgs {
match self {
MetaItemKind::Word => MacArgs::Empty,
MetaItemKind::NameValue(lit) => MacArgs::Eq(span, lit.token_tree().into()),
MetaItemKind::List(list) => {
let mut tts = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
tts.push(TokenTree::token(token::Comma, span).into());
}
tts.extend(item.token_trees_and_joints())
}
MacArgs::Delimited(
DelimSpan::from_single(span), MacDelimiter::Parenthesis, TokenStream::new(tts)
)
}
}
}
fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
match *self {
MetaItemKind::Word => vec![],
MetaItemKind::NameValue(ref lit) => {
@ -548,33 +559,8 @@ impl MetaItemKind {
}
}
// Premature conversions of `TokenTree`s to `TokenStream`s can hurt
// performance. Do not use this function if `token_trees_and_joints()` can
// be used instead.
pub fn tokens(&self, span: Span) -> TokenStream {
TokenStream::new(self.token_trees_and_joints(span))
}
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
where I: Iterator<Item = TokenTree>,
{
let delimited = match tokens.peek().cloned() {
Some(TokenTree::Token(token)) if token == token::Eq => {
tokens.next();
return if let Some(TokenTree::Token(token)) = tokens.next() {
Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
} else {
None
};
}
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
tokens.next();
tts.clone()
}
_ => return Some(MetaItemKind::Word),
};
let mut tokens = delimited.into_trees().peekable();
fn list_from_tokens(tokens: TokenStream) -> Option<MetaItemKind> {
let mut tokens = tokens.into_trees().peekable();
let mut result = Vec::new();
while let Some(..) = tokens.peek() {
let item = NestedMetaItem::from_tokens(&mut tokens)?;
@ -586,6 +572,47 @@ impl MetaItemKind {
}
Some(MetaItemKind::List(result))
}
fn name_value_from_tokens(
tokens: &mut impl Iterator<Item = TokenTree>,
) -> Option<MetaItemKind> {
match tokens.next() {
Some(TokenTree::Token(token)) =>
Lit::from_token(&token).ok().map(MetaItemKind::NameValue),
_ => None,
}
}
fn from_mac_args(args: &MacArgs) -> Option<MetaItemKind> {
match args {
MacArgs::Delimited(_, MacDelimiter::Parenthesis, tokens) =>
MetaItemKind::list_from_tokens(tokens.clone()),
MacArgs::Delimited(..) => None,
MacArgs::Eq(_, tokens) => {
assert!(tokens.len() == 1);
MetaItemKind::name_value_from_tokens(&mut tokens.trees())
}
MacArgs::Empty => Some(MetaItemKind::Word),
}
}
fn from_tokens(
tokens: &mut iter::Peekable<impl Iterator<Item = TokenTree>>,
) -> Option<MetaItemKind> {
match tokens.peek() {
Some(TokenTree::Delimited(_, token::Paren, inner_tokens)) => {
let inner_tokens = inner_tokens.clone();
tokens.next();
MetaItemKind::list_from_tokens(inner_tokens)
}
Some(TokenTree::Delimited(..)) => None,
Some(TokenTree::Token(Token { kind: token::Eq, .. })) => {
tokens.next();
MetaItemKind::name_value_from_tokens(tokens)
}
_ => Some(MetaItemKind::Word),
}
}
}
impl NestedMetaItem {

View File

@ -12,6 +12,7 @@
#![feature(const_transmute)]
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
#![feature(matches_macro)]
#![feature(nll)]
#![feature(try_trait)]
#![feature(slice_patterns)]

View File

@ -359,6 +359,26 @@ pub fn visit_fn_sig<T: MutVisitor>(FnSig { header, decl }: &mut FnSig, vis: &mut
vis.visit_fn_decl(decl);
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_mac_args<T: MutVisitor>(args: &mut MacArgs, vis: &mut T) {
match args {
MacArgs::Empty => {}
MacArgs::Delimited(dspan, _delim, tokens) => {
visit_delim_span(dspan, vis);
vis.visit_tts(tokens);
}
MacArgs::Eq(eq_span, tokens) => {
vis.visit_span(eq_span);
vis.visit_tts(tokens);
}
}
}
pub fn visit_delim_span<T: MutVisitor>(dspan: &mut DelimSpan, vis: &mut T) {
vis.visit_span(&mut dspan.open);
vis.visit_span(&mut dspan.close);
}
pub fn noop_flat_map_field_pattern<T: MutVisitor>(
mut fp: FieldPat,
vis: &mut T,
@ -550,9 +570,9 @@ pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
let Attribute { kind, id: _, style: _, span } = attr;
match kind {
AttrKind::Normal(AttrItem { path, tokens }) => {
AttrKind::Normal(AttrItem { path, args }) => {
vis.visit_path(path);
vis.visit_tts(tokens);
visit_mac_args(args, vis);
}
AttrKind::DocComment(_) => {}
}
@ -560,15 +580,14 @@ pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
}
pub fn noop_visit_mac<T: MutVisitor>(mac: &mut Mac, vis: &mut T) {
let Mac { path, delim: _, tts, span, prior_type_ascription: _ } = mac;
let Mac { path, args, prior_type_ascription: _ } = mac;
vis.visit_path(path);
vis.visit_tts(tts);
vis.visit_span(span);
visit_mac_args(args, vis);
}
pub fn noop_visit_macro_def<T: MutVisitor>(macro_def: &mut MacroDef, vis: &mut T) {
let MacroDef { tokens, legacy: _ } = macro_def;
vis.visit_tts(tokens);
let MacroDef { body, legacy: _ } = macro_def;
visit_mac_args(body, vis);
}
pub fn noop_visit_meta_list_item<T: MutVisitor>(li: &mut NestedMetaItem, vis: &mut T) {
@ -682,9 +701,9 @@ pub fn noop_visit_interpolated<T: MutVisitor>(nt: &mut token::Nonterminal, vis:
token::NtIdent(ident, _is_raw) => vis.visit_ident(ident),
token::NtLifetime(ident) => vis.visit_ident(ident),
token::NtLiteral(expr) => vis.visit_expr(expr),
token::NtMeta(AttrItem { path, tokens }) => {
token::NtMeta(AttrItem { path, args }) => {
vis.visit_path(path);
vis.visit_tts(tokens);
visit_mac_args(args, vis);
}
token::NtPath(path) => vis.visit_path(path),
token::NtTT(tt) => vis.visit_tt(tt),

View File

@ -1,6 +1,6 @@
use crate::ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
use crate::ast::{SelfKind, GenericBound, TraitBoundModifier};
use crate::ast::{Attribute, MacDelimiter, GenericArg};
use crate::ast::{Attribute, GenericArg, MacArgs};
use crate::util::parser::{self, AssocOp, Fixity};
use crate::util::comments;
use crate::attr;
@ -639,17 +639,22 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_attr_item(&mut self, item: &ast::AttrItem, span: Span) {
self.ibox(0);
match item.tokens.trees().next() {
Some(TokenTree::Delimited(_, delim, tts)) => {
self.print_mac_common(
Some(MacHeader::Path(&item.path)), false, None, delim, tts, true, span
);
}
tree => {
match &item.args {
MacArgs::Delimited(_, delim, tokens) => self.print_mac_common(
Some(MacHeader::Path(&item.path)),
false,
None,
delim.to_token(),
tokens.clone(),
true,
span,
),
MacArgs::Empty | MacArgs::Eq(..) => {
self.print_path(&item.path, false, 0);
if tree.is_some() {
if let MacArgs::Eq(_, tokens) = &item.args {
self.space();
self.print_tts(item.tokens.clone(), true);
self.word_space("=");
self.print_tts(tokens.clone(), true);
}
}
}
@ -1097,9 +1102,8 @@ impl<'a> State<'a> {
}
ast::ForeignItemKind::Macro(ref m) => {
self.print_mac(m);
match m.delim {
MacDelimiter::Brace => {},
_ => self.s.word(";")
if m.args.need_semicolon() {
self.s.word(";");
}
}
}
@ -1361,9 +1365,8 @@ impl<'a> State<'a> {
}
ast::ItemKind::Mac(ref mac) => {
self.print_mac(mac);
match mac.delim {
MacDelimiter::Brace => {}
_ => self.s.word(";"),
if mac.args.need_semicolon() {
self.s.word(";");
}
}
ast::ItemKind::MacroDef(ref macro_def) => {
@ -1377,8 +1380,8 @@ impl<'a> State<'a> {
Some(MacHeader::Keyword(kw)),
has_bang,
Some(item.ident),
DelimToken::Brace,
macro_def.stream(),
macro_def.body.delim(),
macro_def.body.inner_tokens(),
true,
item.span,
);
@ -1578,9 +1581,8 @@ impl<'a> State<'a> {
}
ast::TraitItemKind::Macro(ref mac) => {
self.print_mac(mac);
match mac.delim {
MacDelimiter::Brace => {}
_ => self.s.word(";"),
if mac.args.need_semicolon() {
self.s.word(";");
}
}
}
@ -1608,9 +1610,8 @@ impl<'a> State<'a> {
}
ast::ImplItemKind::Macro(ref mac) => {
self.print_mac(mac);
match mac.delim {
MacDelimiter::Brace => {}
_ => self.s.word(";"),
if mac.args.need_semicolon() {
self.s.word(";");
}
}
}
@ -1775,10 +1776,10 @@ impl<'a> State<'a> {
Some(MacHeader::Path(&m.path)),
true,
None,
m.delim.to_token(),
m.stream(),
m.args.delim(),
m.args.inner_tokens(),
true,
m.span,
m.span(),
);
}

View File

@ -225,6 +225,14 @@ impl TokenStream {
self.0.len()
}
pub fn span(&self) -> Option<Span> {
match &**self.0 {
[] => None,
[(tt, _)] => Some(tt.span()),
[(tt_start, _), .., (tt_end, _)] => Some(tt_start.span().to(tt_end.span())),
}
}
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() {
0 => TokenStream::default(),

View File

@ -841,11 +841,19 @@ pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) {
pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) {
match attr.kind {
AttrKind::Normal(ref item) => visitor.visit_tts(item.tokens.clone()),
AttrKind::Normal(ref item) => walk_mac_args(visitor, &item.args),
AttrKind::DocComment(_) => {}
}
}
pub fn walk_mac_args<'a, V: Visitor<'a>>(visitor: &mut V, args: &'a MacArgs) {
match args {
MacArgs::Empty => {}
MacArgs::Delimited(_dspan, _delim, tokens) => visitor.visit_tts(tokens.clone()),
MacArgs::Eq(_eq_span, tokens) => visitor.visit_tts(tokens.clone()),
}
}
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
match tt {
TokenTree::Token(token) => visitor.visit_token(token),

View File

@ -11,7 +11,7 @@ use rustc_parse::DirectoryOwnership;
use rustc_parse::parser::Parser;
use rustc_parse::validate_attr;
use syntax::ast::{self, AttrItem, Block, Ident, LitKind, NodeId, PatKind, Path};
use syntax::ast::{MacStmtStyle, StmtKind, ItemKind};
use syntax::ast::{MacArgs, MacStmtStyle, StmtKind, ItemKind};
use syntax::attr::{self, HasAttrs, is_builtin_attr};
use syntax::source_map::respan;
use syntax::feature_gate::{self, feature_err};
@ -597,13 +597,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
InvocationKind::Bang { mac, .. } => match ext {
SyntaxExtensionKind::Bang(expander) => {
self.gate_proc_macro_expansion_kind(span, fragment_kind);
let tok_result = expander.expand(self.cx, span, mac.stream());
let tok_result = expander.expand(self.cx, span, mac.args.inner_tokens());
self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
}
SyntaxExtensionKind::LegacyBang(expander) => {
let prev = self.cx.current_expansion.prior_type_ascription;
self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription;
let tok_result = expander.expand(self.cx, span, mac.stream());
let tok_result = expander.expand(self.cx, span, mac.args.inner_tokens());
let result = if let Some(result) = fragment_kind.make_from(tok_result) {
result
} else {
@ -642,8 +642,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
=> panic!("unexpected annotatable"),
})), DUMMY_SP).into();
let item = attr.unwrap_normal_item();
let input = self.extract_proc_macro_attr_input(item.tokens, span);
let tok_result = expander.expand(self.cx, span, input, item_tok);
if let MacArgs::Eq(..) = item.args {
self.cx.span_err(span, "key-value macro attributes are not supported");
}
let tok_result =
expander.expand(self.cx, span, item.args.inner_tokens(), item_tok);
self.parse_ast_fragment(tok_result, fragment_kind, &item.path, span)
}
SyntaxExtensionKind::LegacyAttr(expander) => {
@ -687,23 +690,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
}
fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream {
let mut trees = tokens.trees();
match trees.next() {
Some(TokenTree::Delimited(_, _, tts)) => {
if trees.next().is_none() {
return tts.into()
}
}
Some(TokenTree::Token(..)) => {}
None => return TokenStream::default(),
}
self.cx.span_err(span, "custom attribute invocations must be \
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
followed by a delimiter token");
TokenStream::default()
}
fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
let kind = match item {
Annotatable::Item(item) => match &item.kind {
@ -1560,7 +1546,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
*at = attr::Attribute {
kind: ast::AttrKind::Normal(
AttrItem { path: meta.path, tokens: meta.kind.tokens(meta.span) },
AttrItem { path: meta.path, args: meta.kind.mac_args(meta.span) },
),
span: at.span,
id: at.id,

View File

@ -318,8 +318,8 @@ pub fn compile_declarative_macro(
let tt_spec = ast::Ident::new(sym::tt, def.span);
// Parse the macro_rules! invocation
let body = match def.kind {
ast::ItemKind::MacroDef(ref body) => body,
let (is_legacy, body) = match &def.kind {
ast::ItemKind::MacroDef(macro_def) => (macro_def.legacy, macro_def.body.inner_tokens()),
_ => unreachable!(),
};
@ -338,7 +338,7 @@ pub fn compile_declarative_macro(
mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
],
separator: Some(Token::new(
if body.legacy { token::Semi } else { token::Comma },
if is_legacy { token::Semi } else { token::Comma },
def.span,
)),
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
@ -350,7 +350,7 @@ pub fn compile_declarative_macro(
DelimSpan::dummy(),
Lrc::new(mbe::SequenceRepetition {
tts: vec![mbe::TokenTree::token(
if body.legacy { token::Semi } else { token::Comma },
if is_legacy { token::Semi } else { token::Comma },
def.span,
)],
separator: None,
@ -360,7 +360,7 @@ pub fn compile_declarative_macro(
),
];
let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
let argument_map = match parse(sess, body, &argument_gram, None, true) {
Success(m) => m,
Failure(token, msg) => {
let s = parse_failure_msg(&token);
@ -435,7 +435,7 @@ pub fn compile_declarative_macro(
// that is not lint-checked and trigger the "failed to process buffered lint here" bug.
valid &= macro_check::check_meta_variables(sess, ast::CRATE_NODE_ID, def.span, &lhses, &rhses);
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, body.legacy);
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, is_legacy);
match transparency_error {
Some(TransparencyError::UnknownTransparency(value, span)) =>
diag.span_err(span, &format!("unknown macro transparency: `{}`", value)),

View File

@ -30,13 +30,6 @@ impl MutVisitor for Marker {
}
}
impl Marker {
fn visit_delim_span(&mut self, dspan: &mut DelimSpan) {
self.visit_span(&mut dspan.open);
self.visit_span(&mut dspan.close);
}
}
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
enum Frame {
Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
@ -271,7 +264,7 @@ pub(super) fn transcribe(
// jump back out of the Delimited, pop the result_stack and add the new results back to
// the previous results (from outside the Delimited).
mbe::TokenTree::Delimited(mut span, delimited) => {
marker.visit_delim_span(&mut span);
mut_visit::visit_delim_span(&mut span, &mut marker);
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
result_stack.push(mem::take(&mut result));
}

View File

@ -272,7 +272,7 @@ fn ttdelim_span() {
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
let tts: Vec<_> = match expr.kind {
ast::ExprKind::Mac(ref mac) => mac.stream().trees().collect(),
ast::ExprKind::Mac(ref mac) => mac.args.inner_tokens().trees().collect(),
_ => panic!("not a macro"),
};

View File

@ -3,7 +3,6 @@ use crate::expand::{AstFragment, AstFragmentKind};
use syntax::ast;
use syntax::source_map::{DUMMY_SP, dummy_spanned};
use syntax::tokenstream::TokenStream;
use syntax::mut_visit::*;
use syntax::ptr::P;
use syntax::ThinVec;
@ -17,9 +16,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId, vis: Option<ast::Visi
fn mac_placeholder() -> ast::Mac {
ast::Mac {
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
tts: TokenStream::default().into(),
delim: ast::MacDelimiter::Brace,
span: DUMMY_SP,
args: P(ast::MacArgs::Empty),
prior_type_ascription: None,
}
}

View File

@ -1,7 +1,7 @@
use crate::base::{self, *};
use crate::proc_macro_server;
use syntax::ast::{self, ItemKind};
use syntax::ast::{self, ItemKind, MacArgs};
use syntax::errors::{Applicability, FatalError};
use syntax::symbol::sym;
use syntax::token;
@ -183,7 +183,7 @@ crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>)
}
let parse_derive_paths = |attr: &ast::Attribute| {
if attr.get_normal_item().tokens.is_empty() {
if let MacArgs::Empty = attr.get_normal_item().args {
return Ok(Vec::new());
}
rustc_parse::parse_in_attr(cx.parse_sess, attr, |p| p.parse_derive_paths())

View File

@ -6,7 +6,7 @@ use syntax::token::{self, TokenKind};
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::symbol::{sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
use syntax_expand::base::*;
use syntax_pos::{Span, DUMMY_SP};
@ -26,19 +26,19 @@ pub fn expand_assert<'cx>(
// `core::panic` and `std::panic` are different macros, so we use call-site
// context to pick up whichever is currently in scope.
let sp = cx.with_call_site_ctxt(sp);
let tokens = custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::token(
TokenKind::lit(token::Str, Symbol::intern(&format!(
"assertion failed: {}",
pprust::expr_to_string(&cond_expr).escape_debug()
)), None),
DUMMY_SP,
))
});
let args = P(MacArgs::Delimited(DelimSpan::from_single(sp), MacDelimiter::Parenthesis, tokens));
let panic_call = Mac {
path: Path::from_ident(Ident::new(sym::panic, sp)),
tts: custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::token(
TokenKind::lit(token::Str, Symbol::intern(&format!(
"assertion failed: {}",
pprust::expr_to_string(&cond_expr).escape_debug()
)), None),
DUMMY_SP,
))
}).into(),
delim: MacDelimiter::Parenthesis,
span: sp,
args,
prior_type_ascription: None,
};
let if_expr = cx.expr_if(

View File

@ -16,7 +16,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
);
let start_span = parser.token.span;
let AttrItem { path, tokens } = panictry!(parser.parse_attr_item());
let AttrItem { path, args } = panictry!(parser.parse_attr_item());
let end_span = parser.token.span;
if parser.token != token::Eof {
parse_sess.span_diagnostic
@ -24,7 +24,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
continue;
}
krate.attrs.push(mk_attr(AttrStyle::Inner, path, tokens, start_span.to(end_span)));
krate.attrs.push(mk_attr(AttrStyle::Inner, path, args, start_span.to(end_span)));
}
krate

View File

@ -340,14 +340,12 @@ pub fn combine_substructure(f: CombineSubstructureFunc<'_>)
fn find_type_parameters(
ty: &ast::Ty,
ty_param_names: &[ast::Name],
span: Span,
cx: &ExtCtxt<'_>,
) -> Vec<P<ast::Ty>> {
use syntax::visit;
struct Visitor<'a, 'b> {
cx: &'a ExtCtxt<'b>,
span: Span,
ty_param_names: &'a [ast::Name],
types: Vec<P<ast::Ty>>,
}
@ -366,18 +364,11 @@ fn find_type_parameters(
}
fn visit_mac(&mut self, mac: &ast::Mac) {
let span = mac.span.with_ctxt(self.span.ctxt());
self.cx.span_err(span, "`derive` cannot be used on items with type macros");
self.cx.span_err(mac.span(), "`derive` cannot be used on items with type macros");
}
}
let mut visitor = Visitor {
ty_param_names,
types: Vec::new(),
span,
cx,
};
let mut visitor = Visitor { cx, ty_param_names, types: Vec::new() };
visit::Visitor::visit_ty(&mut visitor, ty);
visitor.types
@ -605,7 +596,7 @@ impl<'a> TraitDef<'a> {
.collect();
for field_ty in field_tys {
let tys = find_type_parameters(&field_ty, &ty_param_names, self.span, cx);
let tys = find_type_parameters(&field_ty, &ty_param_names, cx);
for ty in tys {
// if we have already handled this type, skip it

View File

@ -11,9 +11,9 @@ macro_rules! foo{
pub fn main() {
foo!();
assert!({one! two()}); //~ ERROR expected open delimiter
assert!({one! two()}); //~ ERROR expected one of `(`, `[`, or `{`, found `two`
// regardless of whether nested macro_rules works, the following should at
// least throw a conventional error.
assert!({one! two}); //~ ERROR expected open delimiter
assert!({one! two}); //~ ERROR expected one of `(`, `[`, or `{`, found `two`
}

View File

@ -1,14 +1,14 @@
error: expected open delimiter
error: expected one of `(`, `[`, or `{`, found `two`
--> $DIR/issue-10536.rs:14:19
|
LL | assert!({one! two()});
| ^^^ expected open delimiter
| ^^^ expected one of `(`, `[`, or `{`
error: expected open delimiter
error: expected one of `(`, `[`, or `{`, found `two`
--> $DIR/issue-10536.rs:18:19
|
LL | assert!({one! two});
| ^^^ expected open delimiter
| ^^^ expected one of `(`, `[`, or `{`
error: aborting due to 2 previous errors

View File

@ -1,3 +1,3 @@
fn main() {
foo! bar < //~ ERROR expected open delimiter
foo! bar < //~ ERROR expected one of `(`, `[`, or `{`, found `bar`
}

View File

@ -1,8 +1,8 @@
error: expected open delimiter
error: expected one of `(`, `[`, or `{`, found `bar`
--> $DIR/macro-bad-delimiter-ident.rs:2:10
|
LL | foo! bar <
| ^^^ expected open delimiter
| ^^^ expected one of `(`, `[`, or `{`
error: aborting due to previous error

View File

@ -18,7 +18,7 @@ mod _test2_inner {
//~| ERROR: non-builtin inner attributes are unstable
}
#[empty_attr = "y"] //~ ERROR: must only be followed by a delimiter token
#[empty_attr = "y"] //~ ERROR: key-value macro attributes are not supported
fn _test3() {}
fn attrs() {

View File

@ -34,7 +34,7 @@ LL | #![empty_attr]
= note: for more information, see https://github.com/rust-lang/rust/issues/54727
= help: add `#![feature(proc_macro_hygiene)]` to the crate attributes to enable
error: custom attribute invocations must be of the form `#[foo]` or `#[foo(..)]`, the macro name must only be followed by a delimiter token
error: key-value macro attributes are not supported
--> $DIR/proc-macro-gates.rs:21:1
|
LL | #[empty_attr = "y"]