Rollup merge of #124607 - nnethercote:rustc_expand-cleanups, r=compiler-errors
`rustc_expand` cleanups Some cleanups I made while looking through this code. Nothing that requires any real domain-specific knowledge about this crate. r? ````@michaelwoerister````
This commit is contained in:
commit
ad0be151af
@ -21,7 +21,7 @@
|
|||||||
use rustc_session::{parse::ParseSess, Limit, Session};
|
use rustc_session::{parse::ParseSess, Limit, Session};
|
||||||
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
|
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
|
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId, MacroKind};
|
||||||
use rustc_span::source_map::SourceMap;
|
use rustc_span::source_map::SourceMap;
|
||||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||||
use rustc_span::{FileName, Span, DUMMY_SP};
|
use rustc_span::{FileName, Span, DUMMY_SP};
|
||||||
@ -32,8 +32,6 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
|
|
||||||
pub(crate) use rustc_span::hygiene::MacroKind;
|
|
||||||
|
|
||||||
// When adding new variants, make sure to
|
// When adding new variants, make sure to
|
||||||
// adjust the `visit_*` / `flat_map_*` calls in `InvocationCollector`
|
// adjust the `visit_*` / `flat_map_*` calls in `InvocationCollector`
|
||||||
// to use `assign_id!`
|
// to use `assign_id!`
|
||||||
@ -573,35 +571,6 @@ pub fn raw_expr(sp: Span, guar: Option<ErrorGuaranteed>) -> P<ast::Expr> {
|
|||||||
tokens: None,
|
tokens: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A plain dummy pattern.
|
|
||||||
pub fn raw_pat(sp: Span) -> ast::Pat {
|
|
||||||
ast::Pat { id: ast::DUMMY_NODE_ID, kind: PatKind::Wild, span: sp, tokens: None }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A plain dummy type.
|
|
||||||
pub fn raw_ty(sp: Span) -> P<ast::Ty> {
|
|
||||||
// FIXME(nnethercote): you might expect `ast::TyKind::Dummy` to be used here, but some
|
|
||||||
// values produced here end up being lowered to HIR, which `ast::TyKind::Dummy` does not
|
|
||||||
// support, so we use an empty tuple instead.
|
|
||||||
P(ast::Ty {
|
|
||||||
id: ast::DUMMY_NODE_ID,
|
|
||||||
kind: ast::TyKind::Tup(ThinVec::new()),
|
|
||||||
span: sp,
|
|
||||||
tokens: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A plain dummy crate.
|
|
||||||
pub fn raw_crate() -> ast::Crate {
|
|
||||||
ast::Crate {
|
|
||||||
attrs: Default::default(),
|
|
||||||
items: Default::default(),
|
|
||||||
spans: Default::default(),
|
|
||||||
id: ast::DUMMY_NODE_ID,
|
|
||||||
is_placeholder: Default::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacResult for DummyResult {
|
impl MacResult for DummyResult {
|
||||||
@ -610,7 +579,12 @@ fn make_expr(self: Box<DummyResult>) -> Option<P<ast::Expr>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
|
fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
|
||||||
Some(P(DummyResult::raw_pat(self.span)))
|
Some(P(ast::Pat {
|
||||||
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
kind: PatKind::Wild,
|
||||||
|
span: self.span,
|
||||||
|
tokens: None,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_items(self: Box<DummyResult>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
|
fn make_items(self: Box<DummyResult>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
|
||||||
@ -638,7 +612,15 @@ fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
|
fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
|
||||||
Some(DummyResult::raw_ty(self.span))
|
// FIXME(nnethercote): you might expect `ast::TyKind::Dummy` to be used here, but some
|
||||||
|
// values produced here end up being lowered to HIR, which `ast::TyKind::Dummy` does not
|
||||||
|
// support, so we use an empty tuple instead.
|
||||||
|
Some(P(ast::Ty {
|
||||||
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
kind: ast::TyKind::Tup(ThinVec::new()),
|
||||||
|
span: self.span,
|
||||||
|
tokens: None,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_arms(self: Box<DummyResult>) -> Option<SmallVec<[ast::Arm; 1]>> {
|
fn make_arms(self: Box<DummyResult>) -> Option<SmallVec<[ast::Arm; 1]>> {
|
||||||
@ -670,7 +652,13 @@ fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn make_crate(self: Box<DummyResult>) -> Option<ast::Crate> {
|
fn make_crate(self: Box<DummyResult>) -> Option<ast::Crate> {
|
||||||
Some(DummyResult::raw_crate())
|
Some(ast::Crate {
|
||||||
|
attrs: Default::default(),
|
||||||
|
items: Default::default(),
|
||||||
|
spans: Default::default(),
|
||||||
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
is_placeholder: Default::default(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,20 +175,6 @@ pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
|
|||||||
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
|
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stmt_let_pat(&self, sp: Span, pat: P<ast::Pat>, ex: P<ast::Expr>) -> ast::Stmt {
|
|
||||||
let local = P(ast::Local {
|
|
||||||
pat,
|
|
||||||
ty: None,
|
|
||||||
id: ast::DUMMY_NODE_ID,
|
|
||||||
kind: LocalKind::Init(ex),
|
|
||||||
span: sp,
|
|
||||||
colon_sp: None,
|
|
||||||
attrs: AttrVec::new(),
|
|
||||||
tokens: None,
|
|
||||||
});
|
|
||||||
self.stmt_local(local, sp)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
|
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
|
||||||
self.stmt_let_ty(sp, mutbl, ident, None, ex)
|
self.stmt_let_ty(sp, mutbl, ident, None, ex)
|
||||||
}
|
}
|
||||||
@ -278,10 +264,6 @@ pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
|
|||||||
self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
|
self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_field(&self, span: Span, expr: P<Expr>, field: Ident) -> P<ast::Expr> {
|
|
||||||
self.expr(span, ast::ExprKind::Field(expr, field))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expr_macro_call(&self, span: Span, call: P<ast::MacCall>) -> P<ast::Expr> {
|
pub fn expr_macro_call(&self, span: Span, call: P<ast::MacCall>) -> P<ast::Expr> {
|
||||||
self.expr(span, ast::ExprKind::MacCall(call))
|
self.expr(span, ast::ExprKind::MacCall(call))
|
||||||
}
|
}
|
||||||
@ -394,11 +376,6 @@ pub fn expr_str(&self, span: Span, s: Symbol) -> P<ast::Expr> {
|
|||||||
self.expr(span, ast::ExprKind::Lit(lit))
|
self.expr(span, ast::ExprKind::Lit(lit))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_char(&self, span: Span, ch: char) -> P<ast::Expr> {
|
|
||||||
let lit = token::Lit::new(token::Char, literal::escape_char_symbol(ch), None);
|
|
||||||
self.expr(span, ast::ExprKind::Lit(lit))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expr_byte_str(&self, span: Span, bytes: Vec<u8>) -> P<ast::Expr> {
|
pub fn expr_byte_str(&self, span: Span, bytes: Vec<u8>) -> P<ast::Expr> {
|
||||||
let lit = token::Lit::new(token::ByteStr, literal::escape_byte_str_symbol(&bytes), None);
|
let lit = token::Lit::new(token::ByteStr, literal::escape_byte_str_symbol(&bytes), None);
|
||||||
self.expr(span, ast::ExprKind::Lit(lit))
|
self.expr(span, ast::ExprKind::Lit(lit))
|
||||||
@ -414,10 +391,6 @@ pub fn expr_array_ref(&self, sp: Span, exprs: ThinVec<P<ast::Expr>>) -> P<ast::E
|
|||||||
self.expr_addr_of(sp, self.expr_array(sp, exprs))
|
self.expr_addr_of(sp, self.expr_array(sp, exprs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
|
|
||||||
self.expr(sp, ast::ExprKind::Cast(expr, ty))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
pub fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
||||||
let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
|
let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
|
||||||
self.expr_call_global(sp, some, thin_vec![expr])
|
self.expr_call_global(sp, some, thin_vec![expr])
|
||||||
|
@ -99,10 +99,11 @@ fn feature_list(attr: &Attribute) -> ThinVec<ast::NestedMetaItem> {
|
|||||||
// If the declared feature is unstable, record it.
|
// If the declared feature is unstable, record it.
|
||||||
if let Some(f) = UNSTABLE_FEATURES.iter().find(|f| name == f.feature.name) {
|
if let Some(f) = UNSTABLE_FEATURES.iter().find(|f| name == f.feature.name) {
|
||||||
(f.set_enabled)(&mut features);
|
(f.set_enabled)(&mut features);
|
||||||
// When the ICE comes from core, alloc or std (approximation of the standard library), there's a chance
|
// When the ICE comes from core, alloc or std (approximation of the standard
|
||||||
// that the person hitting the ICE may be using -Zbuild-std or similar with an untested target.
|
// library), there's a chance that the person hitting the ICE may be using
|
||||||
// The bug is probably in the standard library and not the compiler in that case, but that doesn't
|
// -Zbuild-std or similar with an untested target. The bug is probably in the
|
||||||
// really matter - we want a bug report.
|
// standard library and not the compiler in that case, but that doesn't really
|
||||||
|
// matter - we want a bug report.
|
||||||
if features.internal(name)
|
if features.internal(name)
|
||||||
&& ![sym::core, sym::alloc, sym::std].contains(&crate_name)
|
&& ![sym::core, sym::alloc, sym::std].contains(&crate_name)
|
||||||
{
|
{
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
IncompleteParse, RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported,
|
IncompleteParse, RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported,
|
||||||
UnsupportedKeyValue, WrongFragmentKind,
|
UnsupportedKeyValue, WrongFragmentKind,
|
||||||
};
|
};
|
||||||
use crate::hygiene::SyntaxContext;
|
|
||||||
use crate::mbe::diagnostics::annotate_err_with_kind;
|
use crate::mbe::diagnostics::annotate_err_with_kind;
|
||||||
use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod};
|
use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod};
|
||||||
use crate::placeholders::{placeholder, PlaceholderExpander};
|
use crate::placeholders::{placeholder, PlaceholderExpander};
|
||||||
@ -32,6 +31,7 @@
|
|||||||
use rustc_session::lint::BuiltinLintDiag;
|
use rustc_session::lint::BuiltinLintDiag;
|
||||||
use rustc_session::parse::feature_err;
|
use rustc_session::parse::feature_err;
|
||||||
use rustc_session::{Limit, Session};
|
use rustc_session::{Limit, Session};
|
||||||
|
use rustc_span::hygiene::SyntaxContext;
|
||||||
use rustc_span::symbol::{sym, Ident};
|
use rustc_span::symbol::{sym, Ident};
|
||||||
use rustc_span::{ErrorGuaranteed, FileName, LocalExpnId, Span};
|
use rustc_span::{ErrorGuaranteed, FileName, LocalExpnId, Span};
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ fn make_from<'a>(self, result: Box<dyn MacResult + 'a>) -> Option<AstFragment> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl AstFragment {
|
impl AstFragment {
|
||||||
pub fn add_placeholders(&mut self, placeholders: &[NodeId]) {
|
fn add_placeholders(&mut self, placeholders: &[NodeId]) {
|
||||||
if placeholders.is_empty() {
|
if placeholders.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -100,14 +100,14 @@ pub fn add_placeholders(&mut self, placeholders: &[NodeId]) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_opt_expr(self) -> Option<P<ast::Expr>> {
|
pub(crate) fn make_opt_expr(self) -> Option<P<ast::Expr>> {
|
||||||
match self {
|
match self {
|
||||||
AstFragment::OptExpr(expr) => expr,
|
AstFragment::OptExpr(expr) => expr,
|
||||||
_ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
|
_ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_method_receiver_expr(self) -> P<ast::Expr> {
|
pub(crate) fn make_method_receiver_expr(self) -> P<ast::Expr> {
|
||||||
match self {
|
match self {
|
||||||
AstFragment::MethodReceiverExpr(expr) => expr,
|
AstFragment::MethodReceiverExpr(expr) => expr,
|
||||||
_ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
|
_ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
|
||||||
@ -125,7 +125,7 @@ fn make_ast<T: InvocationCollectorNode>(self) -> T::OutputTy {
|
|||||||
T::fragment_to_output(self)
|
T::fragment_to_output(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
|
pub(crate) fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
|
||||||
match self {
|
match self {
|
||||||
AstFragment::OptExpr(opt_expr) => {
|
AstFragment::OptExpr(opt_expr) => {
|
||||||
visit_clobber(opt_expr, |opt_expr| {
|
visit_clobber(opt_expr, |opt_expr| {
|
||||||
@ -372,6 +372,14 @@ pub fn span(&self) -> Span {
|
|||||||
InvocationKind::Derive { path, .. } => path.span,
|
InvocationKind::Derive { path, .. } => path.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn span_mut(&mut self) -> &mut Span {
|
||||||
|
match &mut self.kind {
|
||||||
|
InvocationKind::Bang { span, .. } => span,
|
||||||
|
InvocationKind::Attr { attr, .. } => &mut attr.span,
|
||||||
|
InvocationKind::Derive { path, .. } => &mut path.span,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MacroExpander<'a, 'b> {
|
pub struct MacroExpander<'a, 'b> {
|
||||||
@ -432,7 +440,8 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
invocations = mem::take(&mut undetermined_invocations);
|
invocations = mem::take(&mut undetermined_invocations);
|
||||||
force = !mem::replace(&mut progress, false);
|
force = !progress;
|
||||||
|
progress = false;
|
||||||
if force && self.monotonic {
|
if force && self.monotonic {
|
||||||
self.cx.dcx().span_delayed_bug(
|
self.cx.dcx().span_delayed_bug(
|
||||||
invocations.last().unwrap().0.span(),
|
invocations.last().unwrap().0.span(),
|
||||||
@ -471,7 +480,7 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
|
|||||||
self.cx.force_mode = force;
|
self.cx.force_mode = force;
|
||||||
|
|
||||||
let fragment_kind = invoc.fragment_kind;
|
let fragment_kind = invoc.fragment_kind;
|
||||||
let (expanded_fragment, new_invocations) = match self.expand_invoc(invoc, &ext.kind) {
|
match self.expand_invoc(invoc, &ext.kind) {
|
||||||
ExpandResult::Ready(fragment) => {
|
ExpandResult::Ready(fragment) => {
|
||||||
let mut derive_invocations = Vec::new();
|
let mut derive_invocations = Vec::new();
|
||||||
let derive_placeholders = self
|
let derive_placeholders = self
|
||||||
@ -503,12 +512,19 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
|
|||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let (fragment, collected_invocations) =
|
let (expanded_fragment, collected_invocations) =
|
||||||
self.collect_invocations(fragment, &derive_placeholders);
|
self.collect_invocations(fragment, &derive_placeholders);
|
||||||
// We choose to expand any derive invocations associated with this macro invocation
|
// We choose to expand any derive invocations associated with this macro
|
||||||
// *before* any macro invocations collected from the output fragment
|
// invocation *before* any macro invocations collected from the output
|
||||||
|
// fragment.
|
||||||
derive_invocations.extend(collected_invocations);
|
derive_invocations.extend(collected_invocations);
|
||||||
(fragment, derive_invocations)
|
|
||||||
|
progress = true;
|
||||||
|
if expanded_fragments.len() < depth {
|
||||||
|
expanded_fragments.push(Vec::new());
|
||||||
|
}
|
||||||
|
expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
|
||||||
|
invocations.extend(derive_invocations.into_iter().rev());
|
||||||
}
|
}
|
||||||
ExpandResult::Retry(invoc) => {
|
ExpandResult::Retry(invoc) => {
|
||||||
if force {
|
if force {
|
||||||
@ -519,17 +535,9 @@ pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragm
|
|||||||
} else {
|
} else {
|
||||||
// Cannot expand, will retry this invocation later.
|
// Cannot expand, will retry this invocation later.
|
||||||
undetermined_invocations.push((invoc, Some(ext)));
|
undetermined_invocations.push((invoc, Some(ext)));
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
progress = true;
|
|
||||||
if expanded_fragments.len() < depth {
|
|
||||||
expanded_fragments.push(Vec::new());
|
|
||||||
}
|
}
|
||||||
expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
|
|
||||||
invocations.extend(new_invocations.into_iter().rev());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.cx.current_expansion = orig_expansion_data;
|
self.cx.current_expansion = orig_expansion_data;
|
||||||
@ -590,11 +598,7 @@ fn collect_invocations(
|
|||||||
for (invoc, _) in invocations.iter_mut() {
|
for (invoc, _) in invocations.iter_mut() {
|
||||||
let expn_id = invoc.expansion_data.id;
|
let expn_id = invoc.expansion_data.id;
|
||||||
let parent_def = self.cx.resolver.invocation_parent(expn_id);
|
let parent_def = self.cx.resolver.invocation_parent(expn_id);
|
||||||
let span = match &mut invoc.kind {
|
let span = invoc.span_mut();
|
||||||
InvocationKind::Bang { span, .. } => span,
|
|
||||||
InvocationKind::Attr { attr, .. } => &mut attr.span,
|
|
||||||
InvocationKind::Derive { path, .. } => &mut path.span,
|
|
||||||
};
|
|
||||||
*span = span.with_parent(Some(parent_def));
|
*span = span.with_parent(Some(parent_def));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -957,7 +961,7 @@ pub fn parse_ast_fragment<'a>(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ensure_complete_parse<'a>(
|
pub(crate) fn ensure_complete_parse<'a>(
|
||||||
parser: &Parser<'a>,
|
parser: &Parser<'a>,
|
||||||
macro_path: &ast::Path,
|
macro_path: &ast::Path,
|
||||||
kind_name: &str,
|
kind_name: &str,
|
||||||
|
@ -1,43 +1,39 @@
|
|||||||
|
// tidy-alphabetical-start
|
||||||
|
#![allow(internal_features)]
|
||||||
|
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||||
#![doc(rust_logo)]
|
#![doc(rust_logo)]
|
||||||
#![feature(rustdoc_internals)]
|
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
#![feature(associated_type_defaults)]
|
#![feature(associated_type_defaults)]
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
#![feature(lint_reasons)]
|
|
||||||
#![feature(macro_metavar_expr)]
|
#![feature(macro_metavar_expr)]
|
||||||
#![feature(map_try_insert)]
|
#![feature(map_try_insert)]
|
||||||
#![feature(proc_macro_diagnostic)]
|
#![feature(proc_macro_diagnostic)]
|
||||||
#![feature(proc_macro_internals)]
|
#![feature(proc_macro_internals)]
|
||||||
#![feature(proc_macro_span)]
|
#![feature(rustdoc_internals)]
|
||||||
#![feature(try_blocks)]
|
#![feature(try_blocks)]
|
||||||
#![feature(yeet_expr)]
|
#![feature(yeet_expr)]
|
||||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
// tidy-alphabetical-end
|
||||||
#![allow(internal_features)]
|
|
||||||
|
|
||||||
extern crate proc_macro as pm;
|
extern crate proc_macro as pm;
|
||||||
|
|
||||||
|
mod build;
|
||||||
|
mod errors;
|
||||||
|
// FIXME(Nilstrieb) Translate macro_rules diagnostics
|
||||||
|
#[allow(rustc::untranslatable_diagnostic)]
|
||||||
|
mod mbe;
|
||||||
mod placeholders;
|
mod placeholders;
|
||||||
mod proc_macro_server;
|
mod proc_macro_server;
|
||||||
|
|
||||||
pub use mbe::macro_rules::compile_declarative_macro;
|
pub use mbe::macro_rules::compile_declarative_macro;
|
||||||
pub(crate) use rustc_span::hygiene;
|
|
||||||
pub mod base;
|
pub mod base;
|
||||||
pub mod build;
|
|
||||||
#[macro_use]
|
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod errors;
|
|
||||||
pub mod expand;
|
pub mod expand;
|
||||||
pub mod module;
|
pub mod module;
|
||||||
|
|
||||||
// FIXME(Nilstrieb) Translate proc_macro diagnostics
|
// FIXME(Nilstrieb) Translate proc_macro diagnostics
|
||||||
#[allow(rustc::untranslatable_diagnostic)]
|
#[allow(rustc::untranslatable_diagnostic)]
|
||||||
pub mod proc_macro;
|
pub mod proc_macro;
|
||||||
|
|
||||||
// FIXME(Nilstrieb) Translate macro_rules diagnostics
|
|
||||||
#[allow(rustc::untranslatable_diagnostic)]
|
|
||||||
pub(crate) mod mbe;
|
|
||||||
|
|
||||||
// HACK(Centril, #64197): These shouldn't really be here.
|
// HACK(Centril, #64197): These shouldn't really be here.
|
||||||
// Rather, they should be with their respective modules which are defined in other crates.
|
// Rather, they should be with their respective modules which are defined in other crates.
|
||||||
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,
|
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,
|
||||||
|
@ -4,12 +4,13 @@
|
|||||||
//! official terminology: "declarative macros".
|
//! official terminology: "declarative macros".
|
||||||
|
|
||||||
pub(crate) mod diagnostics;
|
pub(crate) mod diagnostics;
|
||||||
pub(crate) mod macro_check;
|
|
||||||
pub(crate) mod macro_parser;
|
|
||||||
pub(crate) mod macro_rules;
|
pub(crate) mod macro_rules;
|
||||||
pub(crate) mod metavar_expr;
|
|
||||||
pub(crate) mod quoted;
|
mod macro_check;
|
||||||
pub(crate) mod transcribe;
|
mod macro_parser;
|
||||||
|
mod metavar_expr;
|
||||||
|
mod quoted;
|
||||||
|
mod transcribe;
|
||||||
|
|
||||||
use metavar_expr::MetaVarExpr;
|
use metavar_expr::MetaVarExpr;
|
||||||
use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
|
use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
|
||||||
|
@ -28,7 +28,8 @@ pub(super) fn failed_to_match_macro<'cx>(
|
|||||||
) -> Box<dyn MacResult + 'cx> {
|
) -> Box<dyn MacResult + 'cx> {
|
||||||
let psess = &cx.sess.psess;
|
let psess = &cx.sess.psess;
|
||||||
|
|
||||||
// An error occurred, try the expansion again, tracking the expansion closely for better diagnostics.
|
// An error occurred, try the expansion again, tracking the expansion closely for better
|
||||||
|
// diagnostics.
|
||||||
let mut tracker = CollectTrackerAndEmitter::new(cx, sp);
|
let mut tracker = CollectTrackerAndEmitter::new(cx, sp);
|
||||||
|
|
||||||
let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);
|
let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);
|
||||||
|
@ -157,8 +157,8 @@ pub(super) trait Tracker<'matcher> {
|
|||||||
/// This is called before trying to match next MatcherLoc on the current token.
|
/// This is called before trying to match next MatcherLoc on the current token.
|
||||||
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
||||||
|
|
||||||
/// This is called after an arm has been parsed, either successfully or unsuccessfully. When this is called,
|
/// This is called after an arm has been parsed, either successfully or unsuccessfully. When
|
||||||
/// `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
|
/// this is called, `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
|
||||||
fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
|
fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
|
||||||
|
|
||||||
/// For tracing.
|
/// For tracing.
|
||||||
@ -169,7 +169,8 @@ fn recovery() -> Recovery {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to monomorphization.
|
/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to
|
||||||
|
/// monomorphization.
|
||||||
pub(super) struct NoopTracker;
|
pub(super) struct NoopTracker;
|
||||||
|
|
||||||
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
||||||
@ -492,7 +493,7 @@ pub fn compile_declarative_macro(
|
|||||||
.pop()
|
.pop()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// We don't handle errors here, the driver will abort
|
// We don't handle errors here, the driver will abort
|
||||||
// after parsing/expansion. we can report every error in every macro this way.
|
// after parsing/expansion. We can report every error in every macro this way.
|
||||||
check_emission(check_lhs_nt_follows(sess, def, &tt));
|
check_emission(check_lhs_nt_follows(sess, def, &tt));
|
||||||
return tt;
|
return tt;
|
||||||
}
|
}
|
||||||
@ -528,7 +529,7 @@ pub fn compile_declarative_macro(
|
|||||||
check_emission(check_rhs(sess, rhs));
|
check_emission(check_rhs(sess, rhs));
|
||||||
}
|
}
|
||||||
|
|
||||||
// don't abort iteration early, so that errors for multiple lhses can be reported
|
// Don't abort iteration early, so that errors for multiple lhses can be reported.
|
||||||
for lhs in &lhses {
|
for lhs in &lhses {
|
||||||
check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(lhs)));
|
check_emission(check_lhs_no_empty_seq(sess, slice::from_ref(lhs)));
|
||||||
}
|
}
|
||||||
|
@ -39,26 +39,32 @@ fn visit_span(&mut self, span: &mut Span) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||||
enum Frame<'a> {
|
struct Frame<'a> {
|
||||||
Delimited {
|
tts: &'a [mbe::TokenTree],
|
||||||
tts: &'a [mbe::TokenTree],
|
idx: usize,
|
||||||
idx: usize,
|
kind: FrameKind,
|
||||||
delim: Delimiter,
|
}
|
||||||
span: DelimSpan,
|
|
||||||
spacing: DelimSpacing,
|
enum FrameKind {
|
||||||
},
|
Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
|
||||||
Sequence {
|
Sequence { sep: Option<Token>, kleene_op: KleeneOp },
|
||||||
tts: &'a [mbe::TokenTree],
|
|
||||||
idx: usize,
|
|
||||||
sep: Option<Token>,
|
|
||||||
kleene_op: KleeneOp,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Frame<'a> {
|
impl<'a> Frame<'a> {
|
||||||
/// Construct a new frame around the delimited set of tokens.
|
fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
|
||||||
fn new(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
|
Frame {
|
||||||
Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span, spacing }
|
tts: &src.tts,
|
||||||
|
idx: 0,
|
||||||
|
kind: FrameKind::Delimited { delim: src.delim, span, spacing },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_sequence(
|
||||||
|
src: &'a mbe::SequenceRepetition,
|
||||||
|
sep: Option<Token>,
|
||||||
|
kleene_op: KleeneOp,
|
||||||
|
) -> Frame<'a> {
|
||||||
|
Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,13 +72,9 @@ impl<'a> Iterator for Frame<'a> {
|
|||||||
type Item = &'a mbe::TokenTree;
|
type Item = &'a mbe::TokenTree;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<&'a mbe::TokenTree> {
|
fn next(&mut self) -> Option<&'a mbe::TokenTree> {
|
||||||
match self {
|
let res = self.tts.get(self.idx);
|
||||||
Frame::Delimited { tts, idx, .. } | Frame::Sequence { tts, idx, .. } => {
|
self.idx += 1;
|
||||||
let res = tts.get(*idx);
|
res
|
||||||
*idx += 1;
|
|
||||||
res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,13 +113,16 @@ pub(super) fn transcribe<'a>(
|
|||||||
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
|
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
|
||||||
// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
|
// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
|
||||||
// choice of spacing values doesn't matter.
|
// choice of spacing values doesn't matter.
|
||||||
let mut stack: SmallVec<[Frame<'_>; 1]> =
|
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited(
|
||||||
smallvec![Frame::new(src, src_span, DelimSpacing::new(Spacing::Alone, Spacing::Alone))];
|
src,
|
||||||
|
src_span,
|
||||||
|
DelimSpacing::new(Spacing::Alone, Spacing::Alone)
|
||||||
|
)];
|
||||||
|
|
||||||
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
|
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
|
||||||
// `repeats` keeps track of where we are in matching at each level, with the last element being
|
// `repeats` keeps track of where we are in matching at each level, with the last element being
|
||||||
// the most deeply nested sequence. This is used as a stack.
|
// the most deeply nested sequence. This is used as a stack.
|
||||||
let mut repeats = Vec::new();
|
let mut repeats: Vec<(usize, usize)> = Vec::new();
|
||||||
|
|
||||||
// `result` contains resulting token stream from the TokenTree we just finished processing. At
|
// `result` contains resulting token stream from the TokenTree we just finished processing. At
|
||||||
// the end, this will contain the full result of transcription, but at arbitrary points during
|
// the end, this will contain the full result of transcription, but at arbitrary points during
|
||||||
@ -142,11 +147,12 @@ pub(super) fn transcribe<'a>(
|
|||||||
|
|
||||||
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
|
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
|
||||||
// go back to the beginning of the sequence.
|
// go back to the beginning of the sequence.
|
||||||
if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
|
let frame = stack.last_mut().unwrap();
|
||||||
|
if let FrameKind::Sequence { sep, .. } = &frame.kind {
|
||||||
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
|
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
|
||||||
*repeat_idx += 1;
|
*repeat_idx += 1;
|
||||||
if repeat_idx < repeat_len {
|
if repeat_idx < repeat_len {
|
||||||
*idx = 0;
|
frame.idx = 0;
|
||||||
if let Some(sep) = sep {
|
if let Some(sep) = sep {
|
||||||
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
|
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
|
||||||
}
|
}
|
||||||
@ -157,16 +163,16 @@ pub(super) fn transcribe<'a>(
|
|||||||
// We are done with the top of the stack. Pop it. Depending on what it was, we do
|
// We are done with the top of the stack. Pop it. Depending on what it was, we do
|
||||||
// different things. Note that the outermost item must be the delimited, wrapped RHS
|
// different things. Note that the outermost item must be the delimited, wrapped RHS
|
||||||
// that was passed in originally to `transcribe`.
|
// that was passed in originally to `transcribe`.
|
||||||
match stack.pop().unwrap() {
|
match stack.pop().unwrap().kind {
|
||||||
// Done with a sequence. Pop from repeats.
|
// Done with a sequence. Pop from repeats.
|
||||||
Frame::Sequence { .. } => {
|
FrameKind::Sequence { .. } => {
|
||||||
repeats.pop();
|
repeats.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// We are done processing a Delimited. If this is the top-level delimited, we are
|
// We are done processing a Delimited. If this is the top-level delimited, we are
|
||||||
// done. Otherwise, we unwind the result_stack to append what we have produced to
|
// done. Otherwise, we unwind the result_stack to append what we have produced to
|
||||||
// any previous results.
|
// any previous results.
|
||||||
Frame::Delimited { delim, span, mut spacing, .. } => {
|
FrameKind::Delimited { delim, span, mut spacing, .. } => {
|
||||||
// Hack to force-insert a space after `]` in certain case.
|
// Hack to force-insert a space after `]` in certain case.
|
||||||
// See discussion of the `hex-literal` crate in #114571.
|
// See discussion of the `hex-literal` crate in #114571.
|
||||||
if delim == Delimiter::Bracket {
|
if delim == Delimiter::Bracket {
|
||||||
@ -192,7 +198,7 @@ pub(super) fn transcribe<'a>(
|
|||||||
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
||||||
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
||||||
// macro writer has made a mistake.
|
// macro writer has made a mistake.
|
||||||
seq @ mbe::TokenTree::Sequence(_, delimited) => {
|
seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
|
||||||
match lockstep_iter_size(seq, interp, &repeats) {
|
match lockstep_iter_size(seq, interp, &repeats) {
|
||||||
LockstepIterSize::Unconstrained => {
|
LockstepIterSize::Unconstrained => {
|
||||||
return Err(cx
|
return Err(cx
|
||||||
@ -233,12 +239,11 @@ pub(super) fn transcribe<'a>(
|
|||||||
// The first time we encounter the sequence we push it to the stack. It
|
// The first time we encounter the sequence we push it to the stack. It
|
||||||
// then gets reused (see the beginning of the loop) until we are done
|
// then gets reused (see the beginning of the loop) until we are done
|
||||||
// repeating.
|
// repeating.
|
||||||
stack.push(Frame::Sequence {
|
stack.push(Frame::new_sequence(
|
||||||
idx: 0,
|
seq_rep,
|
||||||
sep: seq.separator.clone(),
|
seq.separator.clone(),
|
||||||
tts: &delimited.tts,
|
seq.kleene.op,
|
||||||
kleene_op: seq.kleene.op,
|
));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -294,13 +299,7 @@ pub(super) fn transcribe<'a>(
|
|||||||
// the previous results (from outside the Delimited).
|
// the previous results (from outside the Delimited).
|
||||||
mbe::TokenTree::Delimited(mut span, spacing, delimited) => {
|
mbe::TokenTree::Delimited(mut span, spacing, delimited) => {
|
||||||
mut_visit::visit_delim_span(&mut span, &mut marker);
|
mut_visit::visit_delim_span(&mut span, &mut marker);
|
||||||
stack.push(Frame::Delimited {
|
stack.push(Frame::new_delimited(delimited, span, *spacing));
|
||||||
tts: &delimited.tts,
|
|
||||||
delim: delimited.delim,
|
|
||||||
idx: 0,
|
|
||||||
span,
|
|
||||||
spacing: *spacing,
|
|
||||||
});
|
|
||||||
result_stack.push(mem::take(&mut result));
|
result_stack.push(mem::take(&mut result));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -358,10 +357,13 @@ fn maybe_use_metavar_location(
|
|||||||
) -> TokenTree {
|
) -> TokenTree {
|
||||||
let undelimited_seq = matches!(
|
let undelimited_seq = matches!(
|
||||||
stack.last(),
|
stack.last(),
|
||||||
Some(Frame::Sequence {
|
Some(Frame {
|
||||||
tts: [_],
|
tts: [_],
|
||||||
sep: None,
|
kind: FrameKind::Sequence {
|
||||||
kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
|
sep: None,
|
||||||
|
kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
|
||||||
|
..
|
||||||
|
},
|
||||||
..
|
..
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
|
|
||||||
pub fn placeholder(
|
pub(crate) fn placeholder(
|
||||||
kind: AstFragmentKind,
|
kind: AstFragmentKind,
|
||||||
id: ast::NodeId,
|
id: ast::NodeId,
|
||||||
vis: Option<ast::Visibility>,
|
vis: Option<ast::Visibility>,
|
||||||
|
Loading…
Reference in New Issue
Block a user