Auto merge of #37213 - jseyfried:refactor_crate_var, r=nrc
macros: improve `$crate` This PR refactors the implementation of `$crate` so that - `$crate` is only allowed at the start of a path (like `super`), - we can make `$crate` work with inter-crate re-exports (groundwork for macro modularization), and - we can support importing macros from an extern crate that is not declared at the crate root (also groundwork for macro modularization). This is a [breaking-change]. For example, the following would break: ```rust fn foo() {} macro_rules! m { () => { $crate foo $crate () $crate $crate; //^ Today, `$crate` is allowed just about anywhere in unexported macros. } } fn main() { m!(); } ``` r? @nrc
This commit is contained in:
commit
cfc9b5185b
@ -37,6 +37,7 @@
|
||||
use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind};
|
||||
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
|
||||
use syntax::ext::base::{SyntaxExtension, Resolver as SyntaxResolver};
|
||||
use syntax::ext::expand::mark_tts;
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::feature_gate::{self, emit_feature_err};
|
||||
use syntax::ext::tt::macro_rules;
|
||||
@ -95,14 +96,14 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
|
||||
// Extract and intern the module part of the path. For
|
||||
// globs and lists, the path is found directly in the AST;
|
||||
// for simple paths we have to munge the path a little.
|
||||
let module_path: Vec<Name> = match view_path.node {
|
||||
let module_path: Vec<_> = match view_path.node {
|
||||
ViewPathSimple(_, ref full_path) => {
|
||||
full_path.segments
|
||||
.split_last()
|
||||
.unwrap()
|
||||
.1
|
||||
.iter()
|
||||
.map(|seg| seg.identifier.name)
|
||||
.map(|seg| seg.identifier)
|
||||
.collect()
|
||||
}
|
||||
|
||||
@ -110,7 +111,7 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
|
||||
ViewPathList(ref module_ident_path, _) => {
|
||||
module_ident_path.segments
|
||||
.iter()
|
||||
.map(|seg| seg.identifier.name)
|
||||
.map(|seg| seg.identifier)
|
||||
.collect()
|
||||
}
|
||||
};
|
||||
@ -159,7 +160,7 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
|
||||
(module_path.clone(), node.name.name, rename)
|
||||
} else {
|
||||
let name = match module_path.last() {
|
||||
Some(name) => *name,
|
||||
Some(ident) => ident.name,
|
||||
None => {
|
||||
resolve_error(
|
||||
self,
|
||||
@ -207,11 +208,16 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
|
||||
};
|
||||
|
||||
let mut custom_derive_crate = false;
|
||||
// The mark of the expansion that generates the loaded macros.
|
||||
let mut opt_mark = None;
|
||||
for loaded_macro in self.crate_loader.load_macros(item, is_crate_root) {
|
||||
let mark = opt_mark.unwrap_or_else(Mark::fresh);
|
||||
opt_mark = Some(mark);
|
||||
match loaded_macro.kind {
|
||||
LoadedMacroKind::Def(mut def) => {
|
||||
if def.use_locally {
|
||||
self.macro_names.insert(def.ident.name);
|
||||
def.body = mark_tts(&def.body, mark);
|
||||
let ext = macro_rules::compile(&self.session.parse_sess, &def);
|
||||
import_macro(self, def.ident.name, ext, loaded_macro.import_site);
|
||||
}
|
||||
@ -249,6 +255,17 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
|
||||
});
|
||||
self.define(parent, name, TypeNS, (module, sp, vis));
|
||||
|
||||
if let Some(mark) = opt_mark {
|
||||
let invocation = self.arenas.alloc_invocation_data(InvocationData {
|
||||
module: Cell::new(module),
|
||||
def_index: CRATE_DEF_INDEX,
|
||||
const_integer: false,
|
||||
legacy_scope: Cell::new(LegacyScope::Empty),
|
||||
expansion: Cell::new(LegacyScope::Empty),
|
||||
});
|
||||
self.invocations.insert(mark, invocation);
|
||||
}
|
||||
|
||||
self.populate_module_if_necessary(module);
|
||||
} else if custom_derive_crate {
|
||||
// Define an empty module
|
||||
|
@ -53,9 +53,9 @@
|
||||
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
|
||||
use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet};
|
||||
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::ext::hygiene::{Mark, SyntaxContext};
|
||||
use syntax::ast::{self, FloatTy};
|
||||
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, IntTy, UintTy};
|
||||
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, IntTy, UintTy};
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
@ -509,7 +509,7 @@ struct BindingInfo {
|
||||
}
|
||||
|
||||
// Map from the name in a pattern to its binding mode.
|
||||
type BindingMap = FnvHashMap<ast::Ident, BindingInfo>;
|
||||
type BindingMap = FnvHashMap<Ident, BindingInfo>;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
enum PatternSource {
|
||||
@ -714,7 +714,7 @@ enum ModulePrefixResult<'a> {
|
||||
/// One local scope.
|
||||
#[derive(Debug)]
|
||||
struct Rib<'a> {
|
||||
bindings: FnvHashMap<ast::Ident, Def>,
|
||||
bindings: FnvHashMap<Ident, Def>,
|
||||
kind: RibKind<'a>,
|
||||
}
|
||||
|
||||
@ -1178,18 +1178,18 @@ fn definitions(&mut self) -> &mut Definitions {
|
||||
}
|
||||
|
||||
trait Named {
|
||||
fn name(&self) -> Name;
|
||||
fn ident(&self) -> Ident;
|
||||
}
|
||||
|
||||
impl Named for ast::PathSegment {
|
||||
fn name(&self) -> Name {
|
||||
self.identifier.name
|
||||
fn ident(&self) -> Ident {
|
||||
self.identifier
|
||||
}
|
||||
}
|
||||
|
||||
impl Named for hir::PathSegment {
|
||||
fn name(&self) -> Name {
|
||||
self.name
|
||||
fn ident(&self) -> Ident {
|
||||
Ident::with_empty_ctxt(self.name)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1364,7 +1364,7 @@ fn expect_module(&mut self, name: Name, binding: &'a NameBinding<'a>, span: Opti
|
||||
/// Resolves the given module path from the given root `search_module`.
|
||||
fn resolve_module_path_from_root(&mut self,
|
||||
mut search_module: Module<'a>,
|
||||
module_path: &[Name],
|
||||
module_path: &[Ident],
|
||||
index: usize,
|
||||
span: Option<Span>)
|
||||
-> ResolveResult<Module<'a>> {
|
||||
@ -1387,7 +1387,7 @@ fn search_parent_externals<'a>(this: &mut Resolver<'a>, needle: Name, module: Mo
|
||||
// upward though scope chains; we simply resolve names directly in
|
||||
// modules as we go.
|
||||
while index < module_path_len {
|
||||
let name = module_path[index];
|
||||
let name = module_path[index].name;
|
||||
match self.resolve_name_in_module(search_module, name, TypeNS, false, span) {
|
||||
Failed(_) => {
|
||||
let segment_name = name.as_str();
|
||||
@ -1441,7 +1441,7 @@ fn search_parent_externals<'a>(this: &mut Resolver<'a>, needle: Name, module: Mo
|
||||
/// Attempts to resolve the module part of an import directive or path
|
||||
/// rooted at the given module.
|
||||
fn resolve_module_path(&mut self,
|
||||
module_path: &[Name],
|
||||
module_path: &[Ident],
|
||||
use_lexical_scope: UseLexicalScopeFlag,
|
||||
span: Option<Span>)
|
||||
-> ResolveResult<Module<'a>> {
|
||||
@ -1479,7 +1479,7 @@ fn resolve_module_path(&mut self,
|
||||
// This is not a crate-relative path. We resolve the
|
||||
// first component of the path in the current lexical
|
||||
// scope and then proceed to resolve below that.
|
||||
let ident = ast::Ident::with_empty_ctxt(module_path[0]);
|
||||
let ident = module_path[0];
|
||||
let lexical_binding =
|
||||
self.resolve_ident_in_lexical_scope(ident, TypeNS, span);
|
||||
if let Some(binding) = lexical_binding.and_then(LexicalScopeBinding::item) {
|
||||
@ -1525,12 +1525,12 @@ fn resolve_module_path(&mut self,
|
||||
/// Invariant: This must only be called during main resolution, not during
|
||||
/// import resolution.
|
||||
fn resolve_ident_in_lexical_scope(&mut self,
|
||||
mut ident: ast::Ident,
|
||||
mut ident: Ident,
|
||||
ns: Namespace,
|
||||
record_used: Option<Span>)
|
||||
-> Option<LexicalScopeBinding<'a>> {
|
||||
if ns == TypeNS {
|
||||
ident = ast::Ident::with_empty_ctxt(ident.name);
|
||||
ident = Ident::with_empty_ctxt(ident.name);
|
||||
}
|
||||
|
||||
// Walk backwards up the ribs in scope.
|
||||
@ -1577,11 +1577,22 @@ fn resolve_ident_in_lexical_scope(&mut self,
|
||||
/// Resolves a "module prefix". A module prefix is one or both of (a) `self::`;
|
||||
/// (b) some chain of `super::`.
|
||||
/// grammar: (SELF MOD_SEP ) ? (SUPER MOD_SEP) *
|
||||
fn resolve_module_prefix(&mut self, module_path: &[Name], span: Option<Span>)
|
||||
fn resolve_module_prefix(&mut self, module_path: &[Ident], span: Option<Span>)
|
||||
-> ResolveResult<ModulePrefixResult<'a>> {
|
||||
if &*module_path[0].name.as_str() == "$crate" {
|
||||
let mut ctxt = module_path[0].ctxt;
|
||||
while ctxt.source().0 != SyntaxContext::empty() {
|
||||
ctxt = ctxt.source().0;
|
||||
}
|
||||
let module = self.invocations[&ctxt.source().1].module.get();
|
||||
let crate_root =
|
||||
if module.def_id().unwrap().is_local() { self.graph_root } else { module };
|
||||
return Success(PrefixFound(crate_root, 1))
|
||||
}
|
||||
|
||||
// Start at the current module if we see `self` or `super`, or at the
|
||||
// top of the crate otherwise.
|
||||
let mut i = match &*module_path[0].as_str() {
|
||||
let mut i = match &*module_path[0].name.as_str() {
|
||||
"self" => 1,
|
||||
"super" => 0,
|
||||
_ => return Success(NoPrefixFound),
|
||||
@ -1591,7 +1602,7 @@ fn resolve_module_prefix(&mut self, module_path: &[Name], span: Option<Span>)
|
||||
self.module_map[&self.current_module.normal_ancestor_id.unwrap()];
|
||||
|
||||
// Now loop through all the `super`s we find.
|
||||
while i < module_path.len() && "super" == module_path[i].as_str() {
|
||||
while i < module_path.len() && "super" == module_path[i].name.as_str() {
|
||||
debug!("(resolving module prefix) resolving `super` at {}",
|
||||
module_to_string(&containing_module));
|
||||
if let Some(parent) = containing_module.parent {
|
||||
@ -1649,7 +1660,7 @@ fn with_scope<F>(&mut self, id: NodeId, f: F)
|
||||
|
||||
/// Searches the current set of local scopes for labels.
|
||||
/// Stops after meeting a closure.
|
||||
fn search_label(&self, mut ident: ast::Ident) -> Option<Def> {
|
||||
fn search_label(&self, mut ident: Ident) -> Option<Def> {
|
||||
for rib in self.label_ribs.iter().rev() {
|
||||
match rib.kind {
|
||||
NormalRibKind => {
|
||||
@ -1813,7 +1824,7 @@ fn with_type_parameter_rib<'b, F>(&'b mut self, type_parameters: TypeParameters<
|
||||
// plain insert (no renaming)
|
||||
let def_id = self.definitions.local_def_id(type_parameter.id);
|
||||
let def = Def::TyParam(def_id);
|
||||
function_type_rib.bindings.insert(ast::Ident::with_empty_ctxt(name), def);
|
||||
function_type_rib.bindings.insert(Ident::with_empty_ctxt(name), def);
|
||||
self.record_def(type_parameter.id, PathResolution::new(def));
|
||||
}
|
||||
self.type_ribs.push(function_type_rib);
|
||||
@ -2271,7 +2282,7 @@ fn fresh_binding(&mut self,
|
||||
pat_id: NodeId,
|
||||
outer_pat_id: NodeId,
|
||||
pat_src: PatternSource,
|
||||
bindings: &mut FnvHashMap<ast::Ident, NodeId>)
|
||||
bindings: &mut FnvHashMap<Ident, NodeId>)
|
||||
-> PathResolution {
|
||||
// Add the binding to the local ribs, if it
|
||||
// doesn't already exist in the bindings map. (We
|
||||
@ -2372,7 +2383,7 @@ fn resolve_pattern(&mut self,
|
||||
pat_src: PatternSource,
|
||||
// Maps idents to the node ID for the
|
||||
// outermost pattern that binds them.
|
||||
bindings: &mut FnvHashMap<ast::Ident, NodeId>) {
|
||||
bindings: &mut FnvHashMap<Ident, NodeId>) {
|
||||
// Visit all direct subpatterns of this pattern.
|
||||
let outer_pat_id = pat.id;
|
||||
pat.walk(&mut |pat| {
|
||||
@ -2573,7 +2584,7 @@ fn resolve_path(&mut self, id: NodeId, path: &Path, path_depth: usize, namespace
|
||||
|
||||
// Resolve a single identifier
|
||||
fn resolve_identifier(&mut self,
|
||||
identifier: ast::Ident,
|
||||
identifier: Ident,
|
||||
namespace: Namespace,
|
||||
record_used: Option<Span>)
|
||||
-> Option<LocalDef> {
|
||||
@ -2681,12 +2692,8 @@ fn resolve_module_relative_path(&mut self,
|
||||
namespace: Namespace)
|
||||
-> Result<&'a NameBinding<'a>,
|
||||
bool /* true if an error was reported */> {
|
||||
let module_path = segments.split_last()
|
||||
.unwrap()
|
||||
.1
|
||||
.iter()
|
||||
.map(|ps| ps.identifier.name)
|
||||
.collect::<Vec<_>>();
|
||||
let module_path =
|
||||
segments.split_last().unwrap().1.iter().map(|ps| ps.identifier).collect::<Vec<_>>();
|
||||
|
||||
let containing_module;
|
||||
match self.resolve_module_path(&module_path, UseLexicalScope, Some(span)) {
|
||||
@ -2715,7 +2722,7 @@ fn resolve_crate_relative_path<T>(&mut self, span: Span, segments: &[T], namespa
|
||||
bool /* true if an error was reported */>
|
||||
where T: Named,
|
||||
{
|
||||
let module_path = segments.split_last().unwrap().1.iter().map(T::name).collect::<Vec<_>>();
|
||||
let module_path = segments.split_last().unwrap().1.iter().map(T::ident).collect::<Vec<_>>();
|
||||
let root_module = self.graph_root;
|
||||
|
||||
let containing_module;
|
||||
@ -2734,7 +2741,7 @@ fn resolve_crate_relative_path<T>(&mut self, span: Span, segments: &[T], namespa
|
||||
}
|
||||
}
|
||||
|
||||
let name = segments.last().unwrap().name();
|
||||
let name = segments.last().unwrap().ident().name;
|
||||
let result =
|
||||
self.resolve_name_in_module(containing_module, name, namespace, false, Some(span));
|
||||
result.success().ok_or(false)
|
||||
@ -2835,7 +2842,7 @@ fn find_best_match(&mut self, name: &str) -> SuggestionType {
|
||||
} SuggestionType::NotFound
|
||||
}
|
||||
|
||||
fn resolve_labeled_block(&mut self, label: Option<ast::Ident>, id: NodeId, block: &Block) {
|
||||
fn resolve_labeled_block(&mut self, label: Option<Ident>, id: NodeId, block: &Block) {
|
||||
if let Some(label) = label {
|
||||
let def = Def::Label(id);
|
||||
self.with_label_rib(|this| {
|
||||
@ -2976,9 +2983,8 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
|
||||
msg = format!("did you mean {}?", msg);
|
||||
} else {
|
||||
// we display a help message if this is a module
|
||||
let name_path = path.segments.iter()
|
||||
.map(|seg| seg.identifier.name)
|
||||
.collect::<Vec<_>>();
|
||||
let name_path: Vec<_> =
|
||||
path.segments.iter().map(|seg| seg.identifier).collect();
|
||||
|
||||
match self.resolve_module_path(&name_path[..],
|
||||
UseLexicalScope,
|
||||
@ -3237,7 +3243,7 @@ fn lookup_candidates<FilterFn>(&mut self,
|
||||
if name == lookup_name && ns == namespace {
|
||||
if filter_fn(name_binding.def()) {
|
||||
// create the path
|
||||
let ident = ast::Ident::with_empty_ctxt(name);
|
||||
let ident = Ident::with_empty_ctxt(name);
|
||||
let params = PathParameters::none();
|
||||
let segment = PathSegment {
|
||||
identifier: ident,
|
||||
@ -3271,7 +3277,7 @@ fn lookup_candidates<FilterFn>(&mut self,
|
||||
_ if module.parent.is_none() => path_segments.clone(),
|
||||
ModuleKind::Def(_, name) => {
|
||||
let mut paths = path_segments.clone();
|
||||
let ident = ast::Ident::with_empty_ctxt(name);
|
||||
let ident = Ident::with_empty_ctxt(name);
|
||||
let params = PathParameters::none();
|
||||
let segm = PathSegment {
|
||||
identifier: ident,
|
||||
@ -3317,7 +3323,7 @@ fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility {
|
||||
}
|
||||
};
|
||||
|
||||
let segments: Vec<_> = path.segments.iter().map(|seg| seg.identifier.name).collect();
|
||||
let segments: Vec<_> = path.segments.iter().map(|seg| seg.identifier).collect();
|
||||
let mut path_resolution = err_path_resolution();
|
||||
let vis = match self.resolve_module_path(&segments, DontUseLexicalScope, Some(path.span)) {
|
||||
Success(module) => {
|
||||
@ -3469,26 +3475,24 @@ fn report_conflict(&self,
|
||||
}
|
||||
}
|
||||
|
||||
fn names_to_string(names: &[Name]) -> String {
|
||||
fn names_to_string(names: &[Ident]) -> String {
|
||||
let mut first = true;
|
||||
let mut result = String::new();
|
||||
for name in names {
|
||||
for ident in names {
|
||||
if first {
|
||||
first = false
|
||||
} else {
|
||||
result.push_str("::")
|
||||
}
|
||||
result.push_str(&name.as_str());
|
||||
result.push_str(&ident.name.as_str());
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn path_names_to_string(path: &Path, depth: usize) -> String {
|
||||
let names: Vec<ast::Name> = path.segments[..path.segments.len() - depth]
|
||||
.iter()
|
||||
.map(|seg| seg.identifier.name)
|
||||
.collect();
|
||||
names_to_string(&names[..])
|
||||
let names: Vec<_> =
|
||||
path.segments[..path.segments.len() - depth].iter().map(|seg| seg.identifier).collect();
|
||||
names_to_string(&names)
|
||||
}
|
||||
|
||||
/// When an entity with a given name is not available in scope, we search for
|
||||
@ -3551,15 +3555,15 @@ fn show_candidates(session: &mut DiagnosticBuilder,
|
||||
fn module_to_string(module: Module) -> String {
|
||||
let mut names = Vec::new();
|
||||
|
||||
fn collect_mod(names: &mut Vec<ast::Name>, module: Module) {
|
||||
fn collect_mod(names: &mut Vec<Ident>, module: Module) {
|
||||
if let ModuleKind::Def(_, name) = module.kind {
|
||||
if let Some(parent) = module.parent {
|
||||
names.push(name);
|
||||
names.push(Ident::with_empty_ctxt(name));
|
||||
collect_mod(names, parent);
|
||||
}
|
||||
} else {
|
||||
// danger, shouldn't be ident?
|
||||
names.push(token::intern("<opaque>"));
|
||||
names.push(token::str_to_ident("<opaque>"));
|
||||
collect_mod(names, module.parent.unwrap());
|
||||
}
|
||||
}
|
||||
@ -3568,7 +3572,7 @@ fn collect_mod(names: &mut Vec<ast::Name>, module: Module) {
|
||||
if names.is_empty() {
|
||||
return "???".to_string();
|
||||
}
|
||||
names_to_string(&names.into_iter().rev().collect::<Vec<ast::Name>>())
|
||||
names_to_string(&names.into_iter().rev().collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
fn err_path_resolution() -> PathResolution {
|
||||
|
@ -29,10 +29,10 @@
|
||||
#[derive(Clone)]
|
||||
pub struct InvocationData<'a> {
|
||||
pub module: Cell<Module<'a>>,
|
||||
def_index: DefIndex,
|
||||
pub def_index: DefIndex,
|
||||
// True if this expansion is in a `const_integer` position, for example `[u32; m!()]`.
|
||||
// c.f. `DefCollector::visit_ast_const_integer`.
|
||||
const_integer: bool,
|
||||
pub const_integer: bool,
|
||||
// The scope in which the invocation path is resolved.
|
||||
pub legacy_scope: Cell<LegacyScope<'a>>,
|
||||
// The smallest scope that includes this invocation's expansion,
|
||||
|
@ -24,7 +24,7 @@
|
||||
use rustc::lint::builtin::PRIVATE_IN_PUBLIC;
|
||||
use rustc::hir::def::*;
|
||||
|
||||
use syntax::ast::{NodeId, Name};
|
||||
use syntax::ast::{Ident, NodeId, Name};
|
||||
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
use syntax_pos::Span;
|
||||
@ -69,7 +69,7 @@ pub fn single(target: Name, source: Name) -> Self {
|
||||
pub struct ImportDirective<'a> {
|
||||
pub id: NodeId,
|
||||
parent: Module<'a>,
|
||||
module_path: Vec<Name>,
|
||||
module_path: Vec<Ident>,
|
||||
imported_module: Cell<Option<Module<'a>>>, // the resolution of `module_path`
|
||||
subclass: ImportDirectiveSubclass<'a>,
|
||||
span: Span,
|
||||
@ -252,7 +252,7 @@ fn try_result(&mut self, resolution: &NameResolution<'a>, ns: Namespace)
|
||||
|
||||
// Add an import directive to the current module.
|
||||
pub fn add_import_directive(&mut self,
|
||||
module_path: Vec<Name>,
|
||||
module_path: Vec<Ident>,
|
||||
subclass: ImportDirectiveSubclass<'a>,
|
||||
span: Span,
|
||||
id: NodeId,
|
||||
@ -816,7 +816,7 @@ fn finalize_resolutions_in(&mut self, module: Module<'b>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn import_path_to_string(names: &[Name], subclass: &ImportDirectiveSubclass) -> String {
|
||||
fn import_path_to_string(names: &[Ident], subclass: &ImportDirectiveSubclass) -> String {
|
||||
if names.is_empty() {
|
||||
import_directive_subclass_to_string(subclass)
|
||||
} else {
|
||||
|
@ -295,7 +295,9 @@ fn write_token<W: Writer>(&mut self,
|
||||
"Option" | "Result" => Class::PreludeTy,
|
||||
"Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
|
||||
|
||||
"$crate" => Class::KeyWord,
|
||||
_ if tas.tok.is_any_keyword() => Class::KeyWord,
|
||||
|
||||
_ => {
|
||||
if self.in_macro_nonterminal {
|
||||
self.in_macro_nonterminal = false;
|
||||
@ -310,9 +312,6 @@ fn write_token<W: Writer>(&mut self,
|
||||
}
|
||||
}
|
||||
|
||||
// Special macro vars are like keywords.
|
||||
token::SpecialVarNt(_) => Class::KeyWord,
|
||||
|
||||
token::Lifetime(..) => Class::Lifetime,
|
||||
|
||||
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||
|
@ -939,6 +939,6 @@ fn new_span(&mut self, mut span: Span) -> Span {
|
||||
}
|
||||
|
||||
// apply a given mark to the given token trees. Used prior to expansion of a macro.
|
||||
fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
|
||||
pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
|
||||
noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
|
||||
}
|
||||
|
@ -58,7 +58,6 @@ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: ExpansionKind) -> Expansion
|
||||
|
||||
struct MacroRulesMacroExpander {
|
||||
name: ast::Ident,
|
||||
imported_from: Option<ast::Ident>,
|
||||
lhses: Vec<TokenTree>,
|
||||
rhses: Vec<TokenTree>,
|
||||
valid: bool,
|
||||
@ -76,7 +75,6 @@ fn expand<'cx>(&self,
|
||||
generic_extension(cx,
|
||||
sp,
|
||||
self.name,
|
||||
self.imported_from,
|
||||
arg,
|
||||
&self.lhses,
|
||||
&self.rhses)
|
||||
@ -87,7 +85,6 @@ fn expand<'cx>(&self,
|
||||
fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
sp: Span,
|
||||
name: ast::Ident,
|
||||
imported_from: Option<ast::Ident>,
|
||||
arg: &[TokenTree],
|
||||
lhses: &[TokenTree],
|
||||
rhses: &[TokenTree])
|
||||
@ -116,10 +113,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||
};
|
||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||
let trncbr = new_tt_reader(&cx.parse_sess.span_diagnostic,
|
||||
Some(named_matches),
|
||||
imported_from,
|
||||
rhs);
|
||||
let trncbr =
|
||||
new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
|
||||
let mut p = Parser::new(cx.parse_sess(), cx.cfg().clone(), Box::new(trncbr));
|
||||
p.directory = cx.current_expansion.module.directory.clone();
|
||||
p.restrictions = match cx.current_expansion.no_noninline_mod {
|
||||
@ -223,7 +218,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
];
|
||||
|
||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, None, def.body.clone());
|
||||
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, def.body.clone());
|
||||
|
||||
let argument_map = match parse(sess, &Vec::new(), arg_reader, &argument_gram) {
|
||||
Success(m) => m,
|
||||
@ -269,7 +264,6 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
|
||||
let exp: Box<_> = Box::new(MacroRulesMacroExpander {
|
||||
name: def.ident,
|
||||
imported_from: def.imported_from,
|
||||
lhses: lhses,
|
||||
rhses: rhses,
|
||||
valid: valid,
|
||||
|
@ -14,7 +14,7 @@
|
||||
use errors::{Handler, DiagnosticBuilder};
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use parse::token::{DocComment, MatchNt, SubstNt};
|
||||
use parse::token::{Token, Interpolated, NtIdent, NtTT, SpecialMacroVar};
|
||||
use parse::token::{Token, Interpolated, NtIdent, NtTT};
|
||||
use parse::token;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use tokenstream::{self, TokenTree};
|
||||
@ -39,10 +39,7 @@ pub struct TtReader<'a> {
|
||||
stack: Vec<TtFrame>,
|
||||
/* for MBE-style macro transcription */
|
||||
interpolations: HashMap<Ident, Rc<NamedMatch>>,
|
||||
imported_from: Option<Ident>,
|
||||
|
||||
// Some => return imported_from as the next token
|
||||
crate_name_next: Option<Span>,
|
||||
repeat_idx: Vec<usize>,
|
||||
repeat_len: Vec<usize>,
|
||||
/* cached: */
|
||||
@ -59,10 +56,9 @@ pub struct TtReader<'a> {
|
||||
/// (and should) be None.
|
||||
pub fn new_tt_reader(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
imported_from: Option<Ident>,
|
||||
src: Vec<tokenstream::TokenTree>)
|
||||
-> TtReader {
|
||||
new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
|
||||
new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
|
||||
}
|
||||
|
||||
/// The extra `desugar_doc_comments` flag enables reading doc comments
|
||||
@ -73,7 +69,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
|
||||
/// (and should) be None.
|
||||
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
imported_from: Option<Ident>,
|
||||
src: Vec<tokenstream::TokenTree>,
|
||||
desugar_doc_comments: bool)
|
||||
-> TtReader {
|
||||
@ -93,8 +88,6 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||
None => HashMap::new(),
|
||||
Some(x) => x,
|
||||
},
|
||||
imported_from: imported_from,
|
||||
crate_name_next: None,
|
||||
repeat_idx: Vec::new(),
|
||||
repeat_len: Vec::new(),
|
||||
desugar_doc_comments: desugar_doc_comments,
|
||||
@ -189,14 +182,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
sp: r.cur_span.clone(),
|
||||
};
|
||||
loop {
|
||||
match r.crate_name_next.take() {
|
||||
None => (),
|
||||
Some(sp) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = token::Ident(r.imported_from.unwrap());
|
||||
return ret_val;
|
||||
},
|
||||
}
|
||||
let should_pop = match r.stack.last() {
|
||||
None => {
|
||||
assert_eq!(ret_val.tok, token::Eof);
|
||||
@ -346,18 +331,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
sep: None
|
||||
});
|
||||
}
|
||||
TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
|
||||
if r.imported_from.is_some() {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = token::ModSep;
|
||||
r.crate_name_next = Some(sp);
|
||||
return ret_val;
|
||||
}
|
||||
|
||||
// otherwise emit nothing and proceed to the next token
|
||||
}
|
||||
TokenTree::Token(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
|
@ -276,7 +276,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
|
||||
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
|
||||
tts: Vec<tokenstream::TokenTree>,
|
||||
cfg: ast::CrateConfig) -> Parser<'a> {
|
||||
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
|
||||
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
|
||||
let mut p = Parser::new(sess, cfg, Box::new(trdr));
|
||||
p.check_unknown_macro_variable();
|
||||
p
|
||||
|
@ -48,8 +48,7 @@
|
||||
use parse::common::SeqSep;
|
||||
use parse::lexer::{Reader, TokenAndSpan};
|
||||
use parse::obsolete::ObsoleteSyntax;
|
||||
use parse::token::{self, intern, MatchNt, SubstNt, SpecialVarNt, InternedString};
|
||||
use parse::token::{keywords, SpecialMacroVar};
|
||||
use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString};
|
||||
use parse::{new_sub_parser_from_file, ParseSess};
|
||||
use util::parser::{AssocOp, Fixity};
|
||||
use print::pprust;
|
||||
@ -2653,8 +2652,12 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
|
||||
num_captures: name_num
|
||||
})));
|
||||
} else if self.token.is_keyword(keywords::Crate) {
|
||||
let ident = match self.token {
|
||||
token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id },
|
||||
_ => unreachable!(),
|
||||
};
|
||||
self.bump();
|
||||
return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
|
||||
return Ok(TokenTree::Token(sp, token::Ident(ident)));
|
||||
} else {
|
||||
sp = mk_sp(sp.lo, self.span.hi);
|
||||
self.parse_ident().unwrap_or_else(|mut e| {
|
||||
|
@ -52,21 +52,6 @@ pub enum DelimToken {
|
||||
NoDelim,
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||
pub enum SpecialMacroVar {
|
||||
/// `$crate` will be filled in with the name of the crate a macro was
|
||||
/// imported from, if any.
|
||||
CrateMacroVar,
|
||||
}
|
||||
|
||||
impl SpecialMacroVar {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
SpecialMacroVar::CrateMacroVar => "crate",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||
pub enum Lit {
|
||||
Byte(ast::Name),
|
||||
@ -148,8 +133,6 @@ pub enum Token {
|
||||
// In right-hand-sides of MBE macros:
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
SubstNt(ast::Ident),
|
||||
/// A macro variable with special meaning.
|
||||
SpecialVarNt(SpecialMacroVar),
|
||||
|
||||
// Junk. These carry no data because we don't really care about the data
|
||||
// they *would* carry, and don't really want to allocate a new ident for
|
||||
|
@ -285,8 +285,6 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||
token::Comment => "/* */".to_string(),
|
||||
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
||||
|
||||
token::SpecialVarNt(var) => format!("${}", var.as_str()),
|
||||
|
||||
token::Interpolated(ref nt) => match *nt {
|
||||
token::NtExpr(ref e) => expr_to_string(&e),
|
||||
token::NtMeta(ref e) => meta_item_to_string(&e),
|
||||
|
@ -134,7 +134,6 @@ pub fn len(&self) -> usize {
|
||||
AttrStyle::Inner => 3,
|
||||
}
|
||||
}
|
||||
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
|
||||
TokenTree::Token(_, token::MatchNt(..)) => 3,
|
||||
TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(..))) => 1,
|
||||
TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
|
||||
@ -188,11 +187,6 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
|
||||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
|
||||
let v = [TokenTree::Token(sp, token::Dollar),
|
||||
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
|
||||
v[index].clone()
|
||||
}
|
||||
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
|
||||
let v = [TokenTree::Token(sp, token::SubstNt(name)),
|
||||
TokenTree::Token(sp, token::Colon),
|
||||
@ -223,7 +217,6 @@ pub fn parse(cx: &base::ExtCtxt,
|
||||
-> macro_parser::NamedParseResult {
|
||||
// `None` is because we're not interpolating
|
||||
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
|
||||
None,
|
||||
None,
|
||||
tts.iter().cloned().collect(),
|
||||
true);
|
||||
|
@ -38,7 +38,7 @@ pub fn bar() {
|
||||
|
||||
|
||||
|
||||
((::std::fmt::format as
|
||||
(($crate::fmt::format as
|
||||
fn(std::fmt::Arguments<'_>) -> std::string::String {std::fmt::format})(((::std::fmt::Arguments::new_v1
|
||||
as
|
||||
fn(&[&str], &[std::fmt::ArgumentV1<'_>]) -> std::fmt::Arguments<'_> {std::fmt::Arguments<'_>::new_v1})(({
|
||||
|
Loading…
Reference in New Issue
Block a user