Hygiene serialization implementation
This commit is contained in:
parent
6c8927b0cf
commit
0caebfabe6
@ -9,7 +9,8 @@
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Res;
|
||||
use rustc_span::source_map::{respan, DesugaringKind, ForLoopLoc, Span, Spanned};
|
||||
use rustc_span::hygiene::ForLoopLoc;
|
||||
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_target::asm;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
@ -13,7 +13,7 @@
|
||||
use rustc_errors::{DiagnosticBuilder, ErrorReported};
|
||||
use rustc_parse::{self, nt_to_tokenstream, parser, MACRO_ARGUMENTS};
|
||||
use rustc_session::{parse::ParseSess, Limit};
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::{AstPass, ExpnData, ExpnId, ExpnKind};
|
||||
use rustc_span::source_map::SourceMap;
|
||||
@ -873,6 +873,8 @@ pub fn expn_data(
|
||||
local_inner_macros: self.local_inner_macros,
|
||||
edition: self.edition,
|
||||
macro_def_id,
|
||||
krate: LOCAL_CRATE,
|
||||
orig_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::base::*;
|
||||
use crate::config::StripUnconfigured;
|
||||
use crate::configure;
|
||||
use crate::hygiene::{ExpnData, ExpnId, ExpnKind, SyntaxContext};
|
||||
use crate::hygiene::{ExpnData, ExpnKind, SyntaxContext};
|
||||
use crate::mbe::macro_rules::annotate_err_with_kind;
|
||||
use crate::module::{parse_external_mod, push_directory, Directory, DirectoryOwnership};
|
||||
use crate::placeholders::{placeholder, PlaceholderExpander};
|
||||
@ -27,7 +27,7 @@
|
||||
use rustc_session::Limit;
|
||||
use rustc_span::source_map::respan;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::{FileName, Span, DUMMY_SP};
|
||||
use rustc_span::{ExpnId, FileName, Span, DUMMY_SP};
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::io::ErrorKind;
|
||||
|
@ -307,11 +307,16 @@ fn register_crate(
|
||||
let private_dep =
|
||||
self.sess.opts.externs.get(&name.as_str()).map(|e| e.is_private_dep).unwrap_or(false);
|
||||
|
||||
info!("register crate `{}` (private_dep = {})", crate_root.name(), private_dep);
|
||||
|
||||
// Claim this crate number and cache it
|
||||
let cnum = self.cstore.alloc_new_crate_num();
|
||||
|
||||
info!(
|
||||
"register crate `{}` (cnum = {}. private_dep = {})",
|
||||
crate_root.name(),
|
||||
cnum,
|
||||
private_dep
|
||||
);
|
||||
|
||||
// Maintain a reference to the top most crate.
|
||||
// Stash paths for top-most crate locally if necessary.
|
||||
let crate_paths;
|
||||
@ -339,22 +344,21 @@ fn register_crate(
|
||||
None
|
||||
};
|
||||
|
||||
self.cstore.set_crate_data(
|
||||
let crate_metadata = CrateMetadata::new(
|
||||
self.sess,
|
||||
metadata,
|
||||
crate_root,
|
||||
raw_proc_macros,
|
||||
cnum,
|
||||
CrateMetadata::new(
|
||||
self.sess,
|
||||
metadata,
|
||||
crate_root,
|
||||
raw_proc_macros,
|
||||
cnum,
|
||||
cnum_map,
|
||||
dep_kind,
|
||||
source,
|
||||
private_dep,
|
||||
host_hash,
|
||||
),
|
||||
cnum_map,
|
||||
dep_kind,
|
||||
source,
|
||||
private_dep,
|
||||
host_hash,
|
||||
);
|
||||
|
||||
self.cstore.set_crate_data(cnum, crate_metadata);
|
||||
|
||||
Ok(cnum)
|
||||
}
|
||||
|
||||
@ -569,6 +573,8 @@ fn resolve_crate_deps(
|
||||
let cnum = self.maybe_resolve_crate(dep.name, dep_kind, Some((root, &dep)))?;
|
||||
crate_num_map.push(cnum);
|
||||
}
|
||||
|
||||
debug!("resolve_crate_deps: cnum_map for {:?} is {:?}", krate, crate_num_map);
|
||||
Ok(crate_num_map)
|
||||
}
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(never_type)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
@ -32,18 +32,21 @@
|
||||
use rustc_middle::util::common::record_time;
|
||||
use rustc_serialize::{opaque, Decodable, Decoder, SpecializedDecoder, UseSpecializedDecodable};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::hygiene::ExpnDataDecodeMode;
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::{self, hygiene::MacroKind, BytePos, Pos, Span, DUMMY_SP};
|
||||
use rustc_span::{self, hygiene::MacroKind, BytePos, ExpnId, Pos, Span, SyntaxContext, DUMMY_SP};
|
||||
|
||||
use log::debug;
|
||||
use proc_macro::bridge::client::ProcMacro;
|
||||
use std::cell::Cell;
|
||||
use std::io;
|
||||
use std::mem;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
|
||||
pub use cstore_impl::{provide, provide_extern};
|
||||
use rustc_span::hygiene::HygieneContext;
|
||||
|
||||
mod cstore_impl;
|
||||
|
||||
@ -106,6 +109,13 @@
|
||||
/// The hash for the host proc macro. Used to support `-Z dual-proc-macro`.
|
||||
host_hash: Option<Svh>,
|
||||
|
||||
/// Additional data used for decoding `HygieneData` (e.g. `SyntaxContext`
|
||||
/// and `ExpnId`).
|
||||
/// Note that we store a `HygieneContext` for each `CrateMetadat`. This is
|
||||
/// because `SyntaxContext` ids are not globally unique, so we need
|
||||
/// to track which ids we've decoded on a per-crate basis.
|
||||
hygiene_context: HygieneContext,
|
||||
|
||||
// --- Data used only for improving diagnostics ---
|
||||
/// Information about the `extern crate` item or path that caused this crate to be loaded.
|
||||
/// If this is `None`, then the crate was injected (e.g., by the allocator).
|
||||
@ -411,6 +421,7 @@ fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
|
||||
|
||||
let lo = BytePos::decode(self)?;
|
||||
let len = BytePos::decode(self)?;
|
||||
let ctxt = SyntaxContext::decode(self)?;
|
||||
let hi = lo + len;
|
||||
|
||||
let sess = if let Some(sess) = self.sess {
|
||||
@ -524,7 +535,7 @@ fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
|
||||
let hi =
|
||||
(hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos;
|
||||
|
||||
Ok(Span::with_root_ctxt(lo, hi))
|
||||
Ok(Span::new(lo, hi, ctxt))
|
||||
}
|
||||
}
|
||||
|
||||
@ -1120,6 +1131,14 @@ fn is_item_mir_available(&self, id: DefIndex) -> bool {
|
||||
!self.is_proc_macro(id) && self.root.tables.mir.get(self, id).is_some()
|
||||
}
|
||||
|
||||
fn module_expansion(&self, id: DefIndex, sess: &Session) -> ExpnId {
|
||||
if let EntryKind::Mod(m) = self.kind(id) {
|
||||
m.decode((self, sess)).expansion
|
||||
} else {
|
||||
panic!("Expected module, found {:?}", self.local_def_id(id))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> {
|
||||
self.root
|
||||
.tables
|
||||
@ -1652,6 +1671,7 @@ impl CrateMetadata {
|
||||
private_dep,
|
||||
host_hash,
|
||||
extern_crate: Lock::new(None),
|
||||
hygiene_context: HygieneContext::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1784,3 +1804,57 @@ fn macro_kind(raw: &ProcMacro) -> MacroKind {
|
||||
ProcMacro::Bang { .. } => MacroKind::Bang,
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<SyntaxContext> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<SyntaxContext, Self::Error> {
|
||||
let cdata = self.cdata();
|
||||
let sess = self.sess.unwrap();
|
||||
let cname = cdata.root.name;
|
||||
rustc_span::hygiene::decode_syntax_context(self, &cdata.hygiene_context, |_, id| {
|
||||
debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id);
|
||||
Ok(cdata
|
||||
.root
|
||||
.syntax_contexts
|
||||
.get(&cdata, id)
|
||||
.unwrap_or_else(|| panic!("Missing SyntaxContext {:?} for crate {:?}", id, cname))
|
||||
.decode((&cdata, sess)))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<ExpnId> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<ExpnId, Self::Error> {
|
||||
let local_cdata = self.cdata();
|
||||
let sess = self.sess.unwrap();
|
||||
let expn_cnum = Cell::new(None);
|
||||
let get_ctxt = |cnum| {
|
||||
expn_cnum.set(Some(cnum));
|
||||
if cnum == LOCAL_CRATE {
|
||||
&local_cdata.hygiene_context
|
||||
} else {
|
||||
&local_cdata.cstore.get_crate_data(cnum).cdata.hygiene_context
|
||||
}
|
||||
};
|
||||
|
||||
rustc_span::hygiene::decode_expn_id(
|
||||
self,
|
||||
ExpnDataDecodeMode::Metadata(get_ctxt),
|
||||
|_this, index| {
|
||||
let cnum = expn_cnum.get().unwrap();
|
||||
// Lookup local `ExpnData`s in our own crate data. Foreign `ExpnData`s
|
||||
// are stored in the owning crate, to avoid duplication.
|
||||
let crate_data = if cnum == LOCAL_CRATE {
|
||||
local_cdata
|
||||
} else {
|
||||
local_cdata.cstore.get_crate_data(cnum)
|
||||
};
|
||||
Ok(crate_data
|
||||
.root
|
||||
.expn_data
|
||||
.get(&crate_data, index)
|
||||
.unwrap()
|
||||
.decode((&crate_data, sess)))
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -21,9 +21,10 @@
|
||||
use rustc_session::utils::NativeLibKind;
|
||||
use rustc_session::{CrateDisambiguator, Session};
|
||||
use rustc_span::source_map::{self, Span, Spanned};
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::symbol::Symbol;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_span::ExpnId;
|
||||
use smallvec::SmallVec;
|
||||
use std::any::Any;
|
||||
|
||||
@ -417,13 +418,7 @@ pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
|
||||
attr::mark_used(attr);
|
||||
}
|
||||
|
||||
let ident = data
|
||||
.def_key(id.index)
|
||||
.disambiguated_data
|
||||
.data
|
||||
.get_opt_name()
|
||||
.map(Ident::with_dummy_span) // FIXME: cross-crate hygiene
|
||||
.expect("no name in load_macro");
|
||||
let ident = data.item_ident(id.index, sess);
|
||||
|
||||
LoadedMacro::MacroDef(
|
||||
ast::Item {
|
||||
@ -454,6 +449,10 @@ pub fn get_span_untracked(&self, def_id: DefId, sess: &Session) -> Span {
|
||||
pub fn item_generics_num_lifetimes(&self, def_id: DefId, sess: &Session) -> usize {
|
||||
self.get_crate_data(def_id.krate).get_generics(def_id.index, sess).own_counts().lifetimes
|
||||
}
|
||||
|
||||
pub fn module_expansion_untracked(&self, def_id: DefId, sess: &Session) -> ExpnId {
|
||||
self.get_crate_data(def_id.krate).module_expansion(def_id.index, sess)
|
||||
}
|
||||
}
|
||||
|
||||
impl CrateStore for CStore {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::rmeta::table::FixedSizeEncoding;
|
||||
use crate::rmeta::table::{FixedSizeEncoding, TableBuilder};
|
||||
use crate::rmeta::*;
|
||||
|
||||
use log::{debug, trace};
|
||||
@ -30,9 +30,10 @@
|
||||
use rustc_middle::ty::{self, SymbolName, Ty, TyCtxt};
|
||||
use rustc_serialize::{opaque, Encodable, Encoder, SpecializedEncoder, UseSpecializedEncodable};
|
||||
use rustc_session::config::CrateType;
|
||||
use rustc_span::hygiene::ExpnDataEncodeMode;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::{self, ExternalSource, FileName, SourceFile, Span};
|
||||
use rustc_span::{self, ExternalSource, FileName, SourceFile, Span, SyntaxContext};
|
||||
use rustc_target::abi::VariantIdx;
|
||||
use std::hash::Hash;
|
||||
use std::num::NonZeroUsize;
|
||||
@ -66,6 +67,15 @@ struct EncodeContext<'tcx> {
|
||||
// with a result containing a foreign `Span`.
|
||||
required_source_files: Option<GrowableBitSet<usize>>,
|
||||
is_proc_macro: bool,
|
||||
/// All `SyntaxContexts` for which we have writen `SyntaxContextData` into crate metadata.
|
||||
/// This is `None` after we finish encoding `SyntaxContexts`, to ensure
|
||||
/// that we don't accidentally try to encode any more `SyntaxContexts`
|
||||
serialized_ctxts: Option<FxHashSet<SyntaxContext>>,
|
||||
/// The `SyntaxContexts` that we have serialized (e.g. as a result of encoding `Spans`)
|
||||
/// in the most recent 'round' of serializnig. Serializing `SyntaxContextData`
|
||||
/// may cause us to serialize more `SyntaxContext`s, so serialize in a loop
|
||||
/// until we reach a fixed point.
|
||||
latest_ctxts: Option<FxHashSet<SyntaxContext>>,
|
||||
}
|
||||
|
||||
macro_rules! encoder_methods {
|
||||
@ -150,6 +160,21 @@ fn specialized_encode(&mut self, def_id: &DefId) -> Result<(), Self::Error> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> SpecializedEncoder<SyntaxContext> for EncodeContext<'tcx> {
|
||||
fn specialized_encode(&mut self, ctxt: &SyntaxContext) -> Result<(), Self::Error> {
|
||||
if !self.serialized_ctxts.as_ref().unwrap().contains(ctxt) {
|
||||
self.latest_ctxts.as_mut().unwrap().insert(*ctxt);
|
||||
}
|
||||
rustc_span::hygiene::raw_encode_syntax_context(*ctxt, self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> SpecializedEncoder<ExpnId> for EncodeContext<'tcx> {
|
||||
fn specialized_encode(&mut self, expn: &ExpnId) -> Result<(), Self::Error> {
|
||||
rustc_span::hygiene::raw_encode_expn_id(*expn, ExpnDataEncodeMode::Metadata, self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> SpecializedEncoder<DefIndex> for EncodeContext<'tcx> {
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, def_index: &DefIndex) -> Result<(), Self::Error> {
|
||||
@ -234,15 +259,24 @@ fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
|
||||
let len = hi - lo;
|
||||
len.encode(self)?;
|
||||
|
||||
// FIXME: Once #69976 is merged, treat proc-macros normally
|
||||
// Currently, we can't encode `SyntaxContextData` for proc-macro crates,
|
||||
// since the `SyntaxContextData`/`ExpnData` might reference `DefIds` from
|
||||
// dependencies (which are not currently loaded during decoding).
|
||||
if self.is_proc_macro {
|
||||
SyntaxContext::root().encode(self)?;
|
||||
} else {
|
||||
span.ctxt.encode(self)?;
|
||||
}
|
||||
|
||||
if tag == TAG_VALID_SPAN_FOREIGN {
|
||||
// This needs to be two lines to avoid holding the `self.source_file_cache`
|
||||
// while calling `cnum.encode(self)`
|
||||
let cnum = self.source_file_cache.0.cnum;
|
||||
cnum.encode(self)?;
|
||||
}
|
||||
Ok(())
|
||||
|
||||
// Don't encode the expansion context.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -478,6 +512,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
|
||||
|
||||
let mut i = self.position();
|
||||
|
||||
// Encode the crate deps
|
||||
let crate_deps = self.encode_crate_deps();
|
||||
let dylib_dependency_formats = self.encode_dylib_dependency_formats();
|
||||
let dep_bytes = self.position() - i;
|
||||
@ -556,12 +591,23 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
|
||||
let proc_macro_data_bytes = self.position() - i;
|
||||
|
||||
// Encode exported symbols info. This is prefetched in `encode_metadata` so we encode
|
||||
// this late to give the prefetching as much time as possible to complete.
|
||||
// this as late as possible to give the prefetching as much time as possible to complete.
|
||||
i = self.position();
|
||||
let exported_symbols = tcx.exported_symbols(LOCAL_CRATE);
|
||||
let exported_symbols = self.encode_exported_symbols(&exported_symbols);
|
||||
let exported_symbols_bytes = self.position() - i;
|
||||
|
||||
// Encode the hygiene data,
|
||||
// IMPORTANT: this *must* be the last thing that we encode (other than `SourceMap`). The process
|
||||
// of encoding other items (e.g. `optimized_mir`) may cause us to load
|
||||
// data from the incremental cache. If this causes us to deserialize a `Span`,
|
||||
// then we may load additional `SyntaxContext`s into the global `HygieneData`.
|
||||
// Therefore, we need to encode the hygiene data last to ensure that we encode
|
||||
// any `SyntaxContext`s that might be used.
|
||||
i = self.position();
|
||||
let (syntax_contexts, syntax_bytes, expn_data, expn_bytes) = self.encode_hygiene();
|
||||
let hygiene_bytes = self.position() - i;
|
||||
|
||||
// Encode source_map. This needs to be done last,
|
||||
// since encoding `Span`s tells us which `SourceFiles` we actually
|
||||
// need to encode.
|
||||
@ -618,6 +664,8 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
|
||||
exported_symbols,
|
||||
interpret_alloc_index,
|
||||
tables,
|
||||
syntax_contexts,
|
||||
expn_data,
|
||||
});
|
||||
|
||||
let total_bytes = self.position();
|
||||
@ -643,6 +691,9 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
|
||||
println!(" proc-macro-data-bytes: {}", proc_macro_data_bytes);
|
||||
println!(" item bytes: {}", item_bytes);
|
||||
println!(" table bytes: {}", tables_bytes);
|
||||
println!(" hygiene bytes: {}", hygiene_bytes);
|
||||
println!(" SyntaxContext bytes: {}", syntax_bytes);
|
||||
println!(" ExpnId bytes: {}", expn_bytes);
|
||||
println!(" zero bytes: {}", zero_bytes);
|
||||
println!(" total bytes: {}", total_bytes);
|
||||
}
|
||||
@ -752,11 +803,12 @@ fn encode_info_for_mod(
|
||||
vis: &hir::Visibility<'_>,
|
||||
) {
|
||||
let tcx = self.tcx;
|
||||
let def_id = tcx.hir().local_def_id(id);
|
||||
let local_def_id = tcx.hir().local_def_id(id);
|
||||
let def_id = local_def_id.to_def_id();
|
||||
debug!("EncodeContext::encode_info_for_mod({:?})", def_id);
|
||||
|
||||
let data = ModData {
|
||||
reexports: match tcx.module_exports(def_id) {
|
||||
reexports: match tcx.module_exports(local_def_id) {
|
||||
Some(exports) => {
|
||||
let hir_map = self.tcx.hir();
|
||||
self.lazy(
|
||||
@ -767,10 +819,9 @@ fn encode_info_for_mod(
|
||||
}
|
||||
_ => Lazy::empty(),
|
||||
},
|
||||
expansion: tcx.hir().definitions().expansion_that_defined(local_def_id),
|
||||
};
|
||||
|
||||
let def_id = def_id.to_def_id();
|
||||
|
||||
record!(self.tables.kind[def_id] <- EntryKind::Mod(self.lazy(data)));
|
||||
record!(self.tables.visibility[def_id] <- ty::Visibility::from_hir(vis, id, self.tcx));
|
||||
record!(self.tables.span[def_id] <- self.tcx.def_span(def_id));
|
||||
@ -1425,6 +1476,77 @@ fn encode_foreign_modules(&mut self) -> Lazy<[ForeignModule]> {
|
||||
self.lazy(foreign_modules.iter().cloned())
|
||||
}
|
||||
|
||||
fn encode_hygiene(&mut self) -> (SyntaxContextTable, usize, ExpnDataTable, usize) {
|
||||
let mut syntax_contexts: TableBuilder<_, _> = Default::default();
|
||||
let mut expn_data_table: TableBuilder<_, _> = Default::default();
|
||||
|
||||
let mut i = self.position();
|
||||
// We need to encode the `ExpnData` *before* we encode
|
||||
// the `SyntaxContextData`, since encoding `ExpnData` may cause
|
||||
// us to use more `SyntaxContexts` when we encode the spans stored
|
||||
// inside `ExpnData`
|
||||
rustc_span::hygiene::for_all_expn_data(|index, expn_data| {
|
||||
// Don't encode the ExpnData for ExpnIds from foreign crates.
|
||||
// The crate that defines the ExpnId will store the ExpnData,
|
||||
// and the metadata decoder will look it from from that crate via the CStore
|
||||
if expn_data.krate == LOCAL_CRATE {
|
||||
expn_data_table.set(index, self.lazy(expn_data));
|
||||
}
|
||||
Ok::<(), !>(())
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let expn_bytes = self.position() - i;
|
||||
|
||||
i = self.position();
|
||||
let mut num_serialized = 0;
|
||||
|
||||
// When we serialize a `SyntaxContextData`, we may end up serializing
|
||||
// a `SyntaxContext` that we haven't seen before. Therefore,
|
||||
while !self.latest_ctxts.as_ref().unwrap().is_empty() {
|
||||
debug!(
|
||||
"encode_hygiene: Serializing a round of {:?} SyntaxContextDatas: {:?}",
|
||||
self.latest_ctxts.as_ref().unwrap().len(),
|
||||
self.latest_ctxts.as_ref().unwrap()
|
||||
);
|
||||
|
||||
// Consume the current round of SyntaxContexts.
|
||||
let latest = self.latest_ctxts.replace(FxHashSet::default()).unwrap();
|
||||
|
||||
// It's fine to iterate over a HashMap, because thw serialization
|
||||
// of the table that we insert data into doesn't depend on insertion
|
||||
// order
|
||||
rustc_span::hygiene::for_all_data_in(latest.into_iter(), |(index, ctxt, data)| {
|
||||
if self.serialized_ctxts.as_mut().unwrap().insert(ctxt) {
|
||||
syntax_contexts.set(index, self.lazy(data));
|
||||
num_serialized += 1;
|
||||
}
|
||||
Ok::<_, !>(())
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
debug!("encode_hygiene: Done serializing SyntaxContextData");
|
||||
let syntax_bytes = self.position() - i;
|
||||
|
||||
let total = rustc_span::hygiene::num_syntax_ctxts();
|
||||
debug!(
|
||||
"encode_hygiene: stored {}/{} ({})",
|
||||
num_serialized,
|
||||
total,
|
||||
(num_serialized as f32) / (total as f32)
|
||||
);
|
||||
|
||||
self.serialized_ctxts.take();
|
||||
self.latest_ctxts.take();
|
||||
|
||||
(
|
||||
syntax_contexts.encode(&mut self.opaque),
|
||||
syntax_bytes,
|
||||
expn_data_table.encode(&mut self.opaque),
|
||||
expn_bytes,
|
||||
)
|
||||
}
|
||||
|
||||
fn encode_proc_macros(&mut self) -> Option<Lazy<[DefIndex]>> {
|
||||
let is_proc_macro = self.tcx.sess.crate_types().contains(&CrateType::ProcMacro);
|
||||
if is_proc_macro {
|
||||
@ -1919,6 +2041,8 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata {
|
||||
interpret_allocs_inverse: Default::default(),
|
||||
required_source_files: Some(GrowableBitSet::with_capacity(source_map_files.len())),
|
||||
is_proc_macro: tcx.sess.crate_types().contains(&CrateType::ProcMacro),
|
||||
serialized_ctxts: Some(Default::default()),
|
||||
latest_ctxts: Some(Default::default()),
|
||||
};
|
||||
drop(source_map_files);
|
||||
|
||||
|
@ -20,7 +20,7 @@
|
||||
use rustc_session::CrateDisambiguator;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{self, Span};
|
||||
use rustc_span::{self, ExpnData, ExpnId, Span};
|
||||
use rustc_target::spec::{PanicStrategy, TargetTriple};
|
||||
|
||||
use std::marker::PhantomData;
|
||||
@ -28,6 +28,7 @@
|
||||
|
||||
pub use decoder::{provide, provide_extern};
|
||||
crate use decoder::{CrateMetadata, CrateNumMap, MetadataBlob};
|
||||
use rustc_span::hygiene::SyntaxContextData;
|
||||
|
||||
mod decoder;
|
||||
mod encoder;
|
||||
@ -168,6 +169,9 @@ macro_rules! Lazy {
|
||||
($T:ty) => {Lazy<$T, ()>};
|
||||
}
|
||||
|
||||
type SyntaxContextTable = Lazy<Table<u32, Lazy<SyntaxContextData>>>;
|
||||
type ExpnDataTable = Lazy<Table<u32, Lazy<ExpnData>>>;
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
crate struct CrateRoot<'tcx> {
|
||||
name: Symbol,
|
||||
@ -202,6 +206,10 @@ macro_rules! Lazy {
|
||||
proc_macro_data: Option<Lazy<[DefIndex]>>,
|
||||
|
||||
exported_symbols: Lazy!([(ExportedSymbol<'tcx>, SymbolExportLevel)]),
|
||||
|
||||
syntax_contexts: SyntaxContextTable,
|
||||
expn_data: ExpnDataTable,
|
||||
|
||||
source_map: Lazy<[rustc_span::SourceFile]>,
|
||||
|
||||
compiler_builtins: bool,
|
||||
@ -322,6 +330,7 @@ enum EntryKind {
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
struct ModData {
|
||||
reexports: Lazy<[Export<hir::HirId>]>,
|
||||
expansion: ExpnId,
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
|
@ -155,7 +155,7 @@ impl<I: Idx, T> TableBuilder<I, T>
|
||||
where
|
||||
Option<T>: FixedSizeEncoding,
|
||||
{
|
||||
pub(super) fn set(&mut self, i: I, value: T) {
|
||||
pub(crate) fn set(&mut self, i: I, value: T) {
|
||||
// FIXME(eddyb) investigate more compact encodings for sparse tables.
|
||||
// On the PR @michaelwoerister mentioned:
|
||||
// > Space requirements could perhaps be optimized by using the HAMT `popcnt`
|
||||
@ -170,7 +170,7 @@ pub(super) fn set(&mut self, i: I, value: T) {
|
||||
Some(value).write_to_bytes_at(&mut self.bytes, i);
|
||||
}
|
||||
|
||||
pub(super) fn encode(&self, buf: &mut Encoder) -> Lazy<Table<I, T>> {
|
||||
pub(crate) fn encode(&self, buf: &mut Encoder) -> Lazy<Table<I, T>> {
|
||||
let pos = buf.position();
|
||||
buf.emit_raw_bytes(&self.bytes);
|
||||
Lazy::from_position_and_meta(NonZeroUsize::new(pos as usize).unwrap(), self.bytes.len())
|
||||
|
@ -14,6 +14,7 @@
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::{BytePos, CachingSourceMapView, SourceFile};
|
||||
|
||||
use rustc_span::def_id::{CrateNum, CRATE_DEF_INDEX};
|
||||
use smallvec::SmallVec;
|
||||
use std::cmp::Ord;
|
||||
|
||||
@ -229,6 +230,12 @@ fn hash_spans(&self) -> bool {
|
||||
self.hash_spans
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn hash_crate_num(&mut self, cnum: CrateNum, hasher: &mut StableHasher) {
|
||||
let hcx = self;
|
||||
hcx.def_path_hash(DefId { krate: cnum, index: CRATE_DEF_INDEX }).hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn hash_def_id(&mut self, def_id: DefId, hasher: &mut StableHasher) {
|
||||
let hcx = self;
|
||||
|
@ -147,13 +147,6 @@ fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> DefPathHash {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for CrateNum {
|
||||
#[inline]
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||
hcx.def_path_hash(DefId { krate: *self, index: CRATE_DEF_INDEX }).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToStableHashKey<StableHashingContext<'a>> for CrateNum {
|
||||
type KeyType = DefPathHash;
|
||||
|
||||
|
@ -346,6 +346,6 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
|
||||
// Dummy span for the `def_site` means it's an external macro.
|
||||
expn_data.def_site.is_dummy() || sess.source_map().is_imported(expn_data.def_site)
|
||||
}
|
||||
ExpnKind::Macro(..) => true, // definitely a plugin
|
||||
ExpnKind::Macro { .. } => true, // definitely a plugin
|
||||
}
|
||||
}
|
||||
|
@ -17,22 +17,24 @@
|
||||
UseSpecializedDecodable, UseSpecializedEncodable,
|
||||
};
|
||||
use rustc_session::{CrateDisambiguator, Session};
|
||||
use rustc_span::hygiene::{ExpnId, SyntaxContext};
|
||||
use rustc_span::hygiene::{
|
||||
ExpnDataDecodeMode, ExpnDataEncodeMode, ExpnId, HygieneContext, SyntaxContext,
|
||||
SyntaxContextData,
|
||||
};
|
||||
use rustc_span::source_map::{SourceMap, StableSourceFileId};
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::CachingSourceMapView;
|
||||
use rustc_span::{BytePos, SourceFile, Span, DUMMY_SP};
|
||||
use rustc_span::{BytePos, ExpnData, SourceFile, Span, DUMMY_SP};
|
||||
use std::mem;
|
||||
|
||||
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
|
||||
|
||||
const TAG_NO_EXPN_DATA: u8 = 0;
|
||||
const TAG_EXPN_DATA_SHORTHAND: u8 = 1;
|
||||
const TAG_EXPN_DATA_INLINE: u8 = 2;
|
||||
|
||||
const TAG_VALID_SPAN: u8 = 0;
|
||||
const TAG_INVALID_SPAN: u8 = 1;
|
||||
|
||||
const TAG_SYNTAX_CONTEXT: u8 = 0;
|
||||
const TAG_EXPN_DATA: u8 = 1;
|
||||
|
||||
/// Provides an interface to incremental compilation data cached from the
|
||||
/// previous compilation session. This data will eventually include the results
|
||||
/// of a few selected queries (like `typeck` and `mir_optimized`) and
|
||||
@ -53,7 +55,6 @@ pub struct OnDiskCache<'sess> {
|
||||
|
||||
// Caches that are populated lazily during decoding.
|
||||
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
||||
synthetic_syntax_contexts: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
|
||||
// A map from dep-node to the position of the cached query result in
|
||||
// `serialized_data`.
|
||||
@ -64,9 +65,28 @@ pub struct OnDiskCache<'sess> {
|
||||
prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
|
||||
|
||||
alloc_decoding_state: AllocDecodingState,
|
||||
|
||||
// A map from syntax context ids to the position of their associated
|
||||
// `SyntaxContextData`. We use a `u32` instead of a `SyntaxContext`
|
||||
// to represent the fact that we are storing *encoded* ids. When we decode
|
||||
// a `SyntaxContext`, a new id will be allocated from the global `HygieneData`,
|
||||
// which will almost certainly be different than the serialized id.
|
||||
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
|
||||
// A map from the `DefPathHash` of an `ExpnId` to the position
|
||||
// of their associated `ExpnData`. Ideally, we would store a `DefId`,
|
||||
// but we need to decode this before we've constructed a `TyCtxt` (which
|
||||
// makes it difficult to decode a `DefId`).
|
||||
|
||||
// Note that these `DefPathHashes` correspond to both local and foreign
|
||||
// `ExpnData` (e.g `ExpnData.krate` may not be `LOCAL_CRATE`). Alternatively,
|
||||
// we could look up the `ExpnData` from the metadata of foreign crates,
|
||||
// but it seemed easier to have `OnDiskCache` be independent of the `CStore`.
|
||||
expn_data: FxHashMap<u32, AbsoluteBytePos>,
|
||||
// Additional information used when decoding hygiene data.
|
||||
hygiene_context: HygieneContext,
|
||||
}
|
||||
|
||||
// This type is used only for (de-)serialization.
|
||||
// This type is used only for serialization and deserialization.
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
struct Footer {
|
||||
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
|
||||
@ -75,6 +95,10 @@ struct Footer {
|
||||
diagnostics_index: EncodedQueryResultIndex,
|
||||
// The location of all allocations.
|
||||
interpret_alloc_index: Vec<u32>,
|
||||
// See `OnDiskCache.syntax_contexts`
|
||||
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
|
||||
// See `OnDiskCache.expn_data`
|
||||
expn_data: FxHashMap<u32, AbsoluteBytePos>,
|
||||
}
|
||||
|
||||
type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
|
||||
@ -116,6 +140,7 @@ pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> Self {
|
||||
|
||||
// Decode the file footer, which contains all the lookup tables, etc.
|
||||
decoder.set_position(footer_pos);
|
||||
|
||||
decode_tagged(&mut decoder, TAG_FILE_FOOTER)
|
||||
.expect("error while trying to decode footer position")
|
||||
};
|
||||
@ -130,8 +155,10 @@ pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> Self {
|
||||
current_diagnostics: Default::default(),
|
||||
query_result_index: footer.query_result_index.into_iter().collect(),
|
||||
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
|
||||
synthetic_syntax_contexts: Default::default(),
|
||||
alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
|
||||
syntax_contexts: footer.syntax_contexts,
|
||||
expn_data: footer.expn_data,
|
||||
hygiene_context: HygieneContext::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -146,8 +173,10 @@ pub fn new_empty(source_map: &'sess SourceMap) -> Self {
|
||||
current_diagnostics: Default::default(),
|
||||
query_result_index: Default::default(),
|
||||
prev_diagnostics_index: Default::default(),
|
||||
synthetic_syntax_contexts: Default::default(),
|
||||
alloc_decoding_state: AllocDecodingState::new(Vec::new()),
|
||||
syntax_contexts: FxHashMap::default(),
|
||||
expn_data: FxHashMap::default(),
|
||||
hygiene_context: HygieneContext::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,7 +209,6 @@ pub fn serialize<'tcx, E>(&self, tcx: TyCtxt<'tcx>, encoder: &mut E) -> Result<(
|
||||
encoder,
|
||||
type_shorthands: Default::default(),
|
||||
predicate_shorthands: Default::default(),
|
||||
expn_data_shorthands: Default::default(),
|
||||
interpret_allocs: Default::default(),
|
||||
interpret_allocs_inverse: Vec::new(),
|
||||
source_map: CachingSourceMapView::new(tcx.sess.source_map()),
|
||||
@ -264,7 +292,32 @@ macro_rules! encode_queries {
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Encode the file footer.
|
||||
let mut syntax_contexts = FxHashMap::default();
|
||||
let mut expn_data = FxHashMap::default();
|
||||
|
||||
// Encode all hygiene data (`SyntaxContextData` and `ExpnData`) from the current
|
||||
// session.
|
||||
// FIXME: Investigate tracking which `SyntaxContext`s and `ExpnId`s we actually
|
||||
// need, to avoid serializing data that will never be used. This will require
|
||||
// tracking which `SyntaxContext`s/`ExpnId`s are actually (transitively) referenced
|
||||
// from any of the `Span`s that we serialize.
|
||||
|
||||
rustc_span::hygiene::for_all_data(|(index, _ctxt, data)| {
|
||||
let pos = AbsoluteBytePos::new(encoder.position());
|
||||
encoder.encode_tagged(TAG_SYNTAX_CONTEXT, data)?;
|
||||
syntax_contexts.insert(index, pos);
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
rustc_span::hygiene::for_all_expn_data(|index, data| {
|
||||
let pos = AbsoluteBytePos::new(encoder.position());
|
||||
encoder.encode_tagged(TAG_EXPN_DATA, data)?;
|
||||
//let hash = tcx.def_path_hash(data.def_id.unwrap());
|
||||
expn_data.insert(index, pos);
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// `Encode the file footer.
|
||||
let footer_pos = encoder.position() as u64;
|
||||
encoder.encode_tagged(
|
||||
TAG_FILE_FOOTER,
|
||||
@ -274,6 +327,8 @@ macro_rules! encode_queries {
|
||||
query_result_index,
|
||||
diagnostics_index,
|
||||
interpret_alloc_index,
|
||||
syntax_contexts,
|
||||
expn_data,
|
||||
},
|
||||
)?;
|
||||
|
||||
@ -367,6 +422,21 @@ fn load_indexed<'tcx, T>(
|
||||
{
|
||||
let pos = index.get(&dep_node_index).cloned()?;
|
||||
|
||||
self.with_decoder(tcx, pos, |decoder| match decode_tagged(decoder, dep_node_index) {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
|
||||
})
|
||||
}
|
||||
|
||||
fn with_decoder<'tcx, T, F: FnOnce(&mut CacheDecoder<'sess, 'tcx>) -> T>(
|
||||
&'sess self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
pos: AbsoluteBytePos,
|
||||
f: F,
|
||||
) -> T
|
||||
where
|
||||
T: Decodable,
|
||||
{
|
||||
let cnum_map =
|
||||
self.cnum_map.get_or_init(|| Self::compute_cnum_map(tcx, &self.prev_cnums[..]));
|
||||
|
||||
@ -375,16 +445,14 @@ fn load_indexed<'tcx, T>(
|
||||
opaque: opaque::Decoder::new(&self.serialized_data[..], pos.to_usize()),
|
||||
source_map: self.source_map,
|
||||
cnum_map,
|
||||
synthetic_syntax_contexts: &self.synthetic_syntax_contexts,
|
||||
file_index_to_file: &self.file_index_to_file,
|
||||
file_index_to_stable_id: &self.file_index_to_stable_id,
|
||||
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
|
||||
syntax_contexts: &self.syntax_contexts,
|
||||
expn_data: &self.expn_data,
|
||||
hygiene_context: &self.hygiene_context,
|
||||
};
|
||||
|
||||
match decode_tagged(&mut decoder, dep_node_index) {
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
|
||||
}
|
||||
f(&mut decoder)
|
||||
}
|
||||
|
||||
// This function builds mapping from previous-session-`CrateNum` to
|
||||
@ -430,10 +498,12 @@ struct CacheDecoder<'a, 'tcx> {
|
||||
opaque: opaque::Decoder<'a>,
|
||||
source_map: &'a SourceMap,
|
||||
cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>,
|
||||
synthetic_syntax_contexts: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
||||
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>,
|
||||
alloc_decoding_session: AllocDecodingSession<'a>,
|
||||
syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
|
||||
expn_data: &'a FxHashMap<u32, AbsoluteBytePos>,
|
||||
hygiene_context: &'a HygieneContext,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
|
||||
@ -577,6 +647,43 @@ fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
|
||||
|
||||
implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<SyntaxContext> for CacheDecoder<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<SyntaxContext, Self::Error> {
|
||||
let syntax_contexts = self.syntax_contexts;
|
||||
rustc_span::hygiene::decode_syntax_context(self, self.hygiene_context, |this, id| {
|
||||
// This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
|
||||
// We look up the position of the associated `SyntaxData` and decode it.
|
||||
let pos = syntax_contexts.get(&id).unwrap();
|
||||
this.with_position(pos.to_usize(), |decoder| {
|
||||
let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT)?;
|
||||
Ok(data)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<ExpnId> for CacheDecoder<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<ExpnId, Self::Error> {
|
||||
let expn_data = self.expn_data;
|
||||
rustc_span::hygiene::decode_expn_id(
|
||||
self,
|
||||
ExpnDataDecodeMode::incr_comp(self.hygiene_context),
|
||||
|this, index| {
|
||||
// This closure is invoked if we haven't already decoded the data for the `ExpnId` we are deserializing.
|
||||
// We look up the position of the associated `ExpnData` and decode it.
|
||||
let pos = expn_data
|
||||
.get(&index)
|
||||
.unwrap_or_else(|| panic!("Bad index {:?} (map {:?})", index, expn_data));
|
||||
|
||||
this.with_position(pos.to_usize(), |decoder| {
|
||||
let data: ExpnData = decode_tagged(decoder, TAG_EXPN_DATA)?;
|
||||
Ok(data)
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
|
||||
let alloc_decoding_session = self.alloc_decoding_session;
|
||||
@ -598,48 +705,13 @@ fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
|
||||
let line_lo = usize::decode(self)?;
|
||||
let col_lo = BytePos::decode(self)?;
|
||||
let len = BytePos::decode(self)?;
|
||||
let ctxt = SyntaxContext::decode(self)?;
|
||||
|
||||
let file_lo = self.file_index_to_file(file_lo_index);
|
||||
let lo = file_lo.lines[line_lo - 1] + col_lo;
|
||||
let hi = lo + len;
|
||||
|
||||
let expn_data_tag = u8::decode(self)?;
|
||||
|
||||
// FIXME(mw): This method does not restore `ExpnData::parent` or
|
||||
// `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things
|
||||
// don't seem to be used after HIR lowering, so everything should be fine
|
||||
// until we want incremental compilation to serialize Spans that we need
|
||||
// full hygiene information for.
|
||||
let location = || Span::with_root_ctxt(lo, hi);
|
||||
let recover_from_expn_data = |this: &Self, expn_data, transparency, pos| {
|
||||
let span = location().fresh_expansion_with_transparency(expn_data, transparency);
|
||||
this.synthetic_syntax_contexts.borrow_mut().insert(pos, span.ctxt());
|
||||
span
|
||||
};
|
||||
Ok(match expn_data_tag {
|
||||
TAG_NO_EXPN_DATA => location(),
|
||||
TAG_EXPN_DATA_INLINE => {
|
||||
let (expn_data, transparency) = Decodable::decode(self)?;
|
||||
recover_from_expn_data(
|
||||
self,
|
||||
expn_data,
|
||||
transparency,
|
||||
AbsoluteBytePos::new(self.opaque.position()),
|
||||
)
|
||||
}
|
||||
TAG_EXPN_DATA_SHORTHAND => {
|
||||
let pos = AbsoluteBytePos::decode(self)?;
|
||||
let cached_ctxt = self.synthetic_syntax_contexts.borrow().get(&pos).cloned();
|
||||
if let Some(ctxt) = cached_ctxt {
|
||||
Span::new(lo, hi, ctxt)
|
||||
} else {
|
||||
let (expn_data, transparency) =
|
||||
self.with_position(pos.to_usize(), |this| Decodable::decode(this))?;
|
||||
recover_from_expn_data(self, expn_data, transparency, pos)
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
Ok(Span::new(lo, hi, ctxt))
|
||||
}
|
||||
}
|
||||
|
||||
@ -695,7 +767,6 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
|
||||
encoder: &'a mut E,
|
||||
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
|
||||
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
|
||||
expn_data_shorthands: FxHashMap<ExpnId, AbsoluteBytePos>,
|
||||
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
|
||||
interpret_allocs_inverse: Vec<interpret::AllocId>,
|
||||
source_map: CachingSourceMapView<'tcx>,
|
||||
@ -750,6 +821,24 @@ fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Se
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<SyntaxContext> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, ctxt: &SyntaxContext) -> Result<(), Self::Error> {
|
||||
rustc_span::hygiene::raw_encode_syntax_context(*ctxt, self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<ExpnId> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, expn: &ExpnId) -> Result<(), Self::Error> {
|
||||
rustc_span::hygiene::raw_encode_expn_id(*expn, ExpnDataEncodeMode::IncrComp, self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + TyEncoder,
|
||||
@ -779,21 +868,8 @@ fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
|
||||
line_lo.encode(self)?;
|
||||
col_lo.encode(self)?;
|
||||
len.encode(self)?;
|
||||
|
||||
if span_data.ctxt == SyntaxContext::root() {
|
||||
TAG_NO_EXPN_DATA.encode(self)
|
||||
} else {
|
||||
let (expn_id, transparency, expn_data) = span_data.ctxt.outer_mark_with_data();
|
||||
if let Some(pos) = self.expn_data_shorthands.get(&expn_id).cloned() {
|
||||
TAG_EXPN_DATA_SHORTHAND.encode(self)?;
|
||||
pos.encode(self)
|
||||
} else {
|
||||
TAG_EXPN_DATA_INLINE.encode(self)?;
|
||||
let pos = AbsoluteBytePos::new(self.position());
|
||||
self.expn_data_shorthands.insert(expn_id, pos);
|
||||
(expn_data, transparency).encode(self)
|
||||
}
|
||||
}
|
||||
span_data.ctxt.encode(self)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@
|
||||
use rustc_span::hygiene::{ExpnId, MacroKind};
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_span::Span;
|
||||
|
||||
use log::debug;
|
||||
use std::cell::Cell;
|
||||
@ -130,8 +130,8 @@ impl<'a> Resolver<'a> {
|
||||
parent,
|
||||
kind,
|
||||
def_id,
|
||||
ExpnId::root(),
|
||||
DUMMY_SP,
|
||||
self.cstore().module_expansion_untracked(def_id, &self.session),
|
||||
self.cstore().get_span_untracked(def_id, &self.session),
|
||||
));
|
||||
self.extern_module_map.insert(def_id, module);
|
||||
module
|
||||
@ -888,7 +888,7 @@ fn build_reduced_graph_for_block(&mut self, block: &Block) {
|
||||
fn build_reduced_graph_for_external_crate_res(&mut self, child: Export<NodeId>) {
|
||||
let parent = self.parent_scope.module;
|
||||
let Export { ident, res, vis, span } = child;
|
||||
let expansion = ExpnId::root(); // FIXME(jseyfried) intercrate hygiene
|
||||
let expansion = self.parent_scope.expansion;
|
||||
// Record primary definitions.
|
||||
match res {
|
||||
Res::Def(kind @ (DefKind::Mod | DefKind::Enum | DefKind::Trait), def_id) => {
|
||||
|
@ -434,7 +434,7 @@ pub fn name(&self) -> Option<Symbol> {
|
||||
///
|
||||
/// Multiple bindings in the same module can have the same key (in a valid
|
||||
/// program) if all but one of them come from glob imports.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct BindingKey {
|
||||
/// The identifier for the binding, aways the `normalize_to_macros_2_0` version of the
|
||||
/// identifier.
|
||||
@ -1988,6 +1988,7 @@ fn resolve_ident_in_module_ext(
|
||||
}
|
||||
|
||||
fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
|
||||
debug!("resolve_crate_root({:?})", ident);
|
||||
let mut ctxt = ident.span.ctxt();
|
||||
let mark = if ident.name == kw::DollarCrate {
|
||||
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
||||
@ -1997,6 +1998,10 @@ fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
|
||||
// definitions actually produced by `macro` and `macro` definitions produced by
|
||||
// `macro_rules!`, but at least such configurations are not stable yet.
|
||||
ctxt = ctxt.normalize_to_macro_rules();
|
||||
debug!(
|
||||
"resolve_crate_root: marks={:?}",
|
||||
ctxt.marks().into_iter().map(|(i, t)| (i.expn_data(), t)).collect::<Vec<_>>()
|
||||
);
|
||||
let mut iter = ctxt.marks().into_iter().rev().peekable();
|
||||
let mut result = None;
|
||||
// Find the last opaque mark from the end if it exists.
|
||||
@ -2008,6 +2013,11 @@ fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
|
||||
break;
|
||||
}
|
||||
}
|
||||
debug!(
|
||||
"resolve_crate_root: found opaque mark {:?} {:?}",
|
||||
result,
|
||||
result.map(|r| r.expn_data())
|
||||
);
|
||||
// Then find the last semi-transparent mark from the end if it exists.
|
||||
for (mark, transparency) in iter {
|
||||
if transparency == Transparency::SemiTransparent {
|
||||
@ -2016,16 +2026,36 @@ fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
|
||||
break;
|
||||
}
|
||||
}
|
||||
debug!(
|
||||
"resolve_crate_root: found semi-transparent mark {:?} {:?}",
|
||||
result,
|
||||
result.map(|r| r.expn_data())
|
||||
);
|
||||
result
|
||||
} else {
|
||||
debug!("resolve_crate_root: not DollarCrate");
|
||||
ctxt = ctxt.normalize_to_macros_2_0();
|
||||
ctxt.adjust(ExpnId::root())
|
||||
};
|
||||
let module = match mark {
|
||||
Some(def) => self.macro_def_scope(def),
|
||||
None => return self.graph_root,
|
||||
None => {
|
||||
debug!(
|
||||
"resolve_crate_root({:?}): found no mark (ident.span = {:?})",
|
||||
ident, ident.span
|
||||
);
|
||||
return self.graph_root;
|
||||
}
|
||||
};
|
||||
self.get_module(DefId { index: CRATE_DEF_INDEX, ..module.normal_ancestor_id })
|
||||
let module = self.get_module(DefId { index: CRATE_DEF_INDEX, ..module.normal_ancestor_id });
|
||||
debug!(
|
||||
"resolve_crate_root({:?}): got module {:?} ({:?}) (ident.span = {:?})",
|
||||
ident,
|
||||
module,
|
||||
module.kind.name(),
|
||||
ident.span
|
||||
);
|
||||
module
|
||||
}
|
||||
|
||||
fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> {
|
||||
|
@ -789,7 +789,7 @@ pub fn get_macro_use_data(&self, span: Span) -> Option<MacroRef> {
|
||||
let callee = span.source_callee()?;
|
||||
|
||||
let mac_name = match callee.kind {
|
||||
ExpnKind::Macro(mac_kind, name) => match mac_kind {
|
||||
ExpnKind::Macro(kind, name) => match kind {
|
||||
MacroKind::Bang => name,
|
||||
|
||||
// Ignore attribute macros, their spans are usually mangled
|
||||
|
@ -247,3 +247,9 @@ fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||
hcx.hash_def_id(*self, hasher)
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX: HashStableContext> HashStable<CTX> for CrateNum {
|
||||
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||
hcx.hash_crate_num(*self, hasher)
|
||||
}
|
||||
}
|
||||
|
@ -24,24 +24,27 @@
|
||||
// because getting it wrong can lead to nested `HygieneData::with` calls that
|
||||
// trigger runtime aborts. (Fortunately these are obvious and easy to fix.)
|
||||
|
||||
use crate::def_id::{DefId, CRATE_DEF_INDEX};
|
||||
use crate::edition::Edition;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::SESSION_GLOBALS;
|
||||
use crate::{Span, DUMMY_SP};
|
||||
|
||||
use crate::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
use log::*;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::sync::{Lock, Lrc};
|
||||
use rustc_macros::HashStable_Generic;
|
||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
use rustc_serialize::{
|
||||
Decodable, Decoder, Encodable, Encoder, UseSpecializedDecodable, UseSpecializedEncodable,
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
/// A `SyntaxContext` represents a chain of pairs `(ExpnId, Transparency)` named "marks".
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SyntaxContext(u32);
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SyntaxContextData {
|
||||
#[derive(Debug, RustcEncodable, RustcDecodable, Clone)]
|
||||
pub struct SyntaxContextData {
|
||||
outer_expn: ExpnId,
|
||||
outer_transparency: Transparency,
|
||||
parent: SyntaxContext,
|
||||
@ -77,6 +80,8 @@ pub enum Transparency {
|
||||
Opaque,
|
||||
}
|
||||
|
||||
pub(crate) const NUM_TRANSPARENCIES: usize = 3;
|
||||
|
||||
impl ExpnId {
|
||||
pub fn fresh(expn_data: Option<ExpnData>) -> Self {
|
||||
HygieneData::with(|data| data.fresh_expn(expn_data))
|
||||
@ -104,10 +109,11 @@ pub fn expn_data(self) -> ExpnData {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn set_expn_data(self, expn_data: ExpnData) {
|
||||
pub fn set_expn_data(self, mut expn_data: ExpnData) {
|
||||
HygieneData::with(|data| {
|
||||
let old_expn_data = &mut data.expn_data[self.0 as usize];
|
||||
assert!(old_expn_data.is_none(), "expansion data is reset for an expansion ID");
|
||||
expn_data.orig_id.replace(self.as_u32()).expect_none("orig_id should be None");
|
||||
*old_expn_data = Some(expn_data);
|
||||
})
|
||||
}
|
||||
@ -143,7 +149,7 @@ pub fn expansion_cause(mut self) -> Option<Span> {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
crate struct HygieneData {
|
||||
pub struct HygieneData {
|
||||
/// Each expansion should have an associated expansion data, but sometimes there's a delay
|
||||
/// between creation of an expansion ID and obtaining its data (e.g. macros are collected
|
||||
/// first and then resolved later), so we use an `Option` here.
|
||||
@ -154,13 +160,16 @@ pub fn expansion_cause(mut self) -> Option<Span> {
|
||||
|
||||
impl HygieneData {
|
||||
crate fn new(edition: Edition) -> Self {
|
||||
let mut root_data = ExpnData::default(
|
||||
ExpnKind::Root,
|
||||
DUMMY_SP,
|
||||
edition,
|
||||
Some(DefId::local(CRATE_DEF_INDEX)),
|
||||
);
|
||||
root_data.orig_id = Some(0);
|
||||
|
||||
HygieneData {
|
||||
expn_data: vec![Some(ExpnData::default(
|
||||
ExpnKind::Root,
|
||||
DUMMY_SP,
|
||||
edition,
|
||||
Some(DefId::local(CRATE_DEF_INDEX)),
|
||||
))],
|
||||
expn_data: vec![Some(root_data)],
|
||||
syntax_context_data: vec![SyntaxContextData {
|
||||
outer_expn: ExpnId::root(),
|
||||
outer_transparency: Transparency::Opaque,
|
||||
@ -173,13 +182,17 @@ impl HygieneData {
|
||||
}
|
||||
}
|
||||
|
||||
fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
|
||||
pub fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
|
||||
SESSION_GLOBALS.with(|session_globals| f(&mut *session_globals.hygiene_data.borrow_mut()))
|
||||
}
|
||||
|
||||
fn fresh_expn(&mut self, expn_data: Option<ExpnData>) -> ExpnId {
|
||||
fn fresh_expn(&mut self, mut expn_data: Option<ExpnData>) -> ExpnId {
|
||||
let raw_id = self.expn_data.len() as u32;
|
||||
if let Some(data) = expn_data.as_mut() {
|
||||
data.orig_id.replace(raw_id).expect_none("orig_id should be None");
|
||||
}
|
||||
self.expn_data.push(expn_data);
|
||||
ExpnId(self.expn_data.len() as u32 - 1)
|
||||
ExpnId(raw_id)
|
||||
}
|
||||
|
||||
fn expn_data(&self, expn_id: ExpnId) -> &ExpnData {
|
||||
@ -226,6 +239,7 @@ fn remove_mark(&self, ctxt: &mut SyntaxContext) -> (ExpnId, Transparency) {
|
||||
fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
|
||||
let mut marks = Vec::new();
|
||||
while ctxt != SyntaxContext::root() {
|
||||
debug!("marks: getting parent of {:?}", ctxt);
|
||||
marks.push(self.outer_mark(ctxt));
|
||||
ctxt = self.parent_ctxt(ctxt);
|
||||
}
|
||||
@ -234,8 +248,14 @@ fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
|
||||
}
|
||||
|
||||
fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span {
|
||||
debug!("walk_chain({:?}, {:?})", span, to);
|
||||
debug!("walk_chain: span ctxt = {:?}", span.ctxt());
|
||||
while span.from_expansion() && span.ctxt() != to {
|
||||
span = self.expn_data(self.outer_expn(span.ctxt())).call_site;
|
||||
let outer_expn = self.outer_expn(span.ctxt());
|
||||
debug!("walk_chain({:?}): outer_expn={:?}", span, outer_expn);
|
||||
let expn_data = self.expn_data(outer_expn);
|
||||
debug!("walk_chain({:?}): expn_data={:?}", span, expn_data);
|
||||
span = expn_data.call_site;
|
||||
}
|
||||
span
|
||||
}
|
||||
@ -682,6 +702,16 @@ pub struct ExpnData {
|
||||
/// The `DefId` of the macro being invoked,
|
||||
/// if this `ExpnData` corresponds to a macro invocation
|
||||
pub macro_def_id: Option<DefId>,
|
||||
/// The crate that originally created this `ExpnData. During
|
||||
/// metadata serialization, we only encode `ExpnData`s that were
|
||||
/// created locally - when our serialized metadata is decoded,
|
||||
/// foreign `ExpnId`s will have their `ExpnData` looked up
|
||||
/// from the crate specified by `Crate
|
||||
pub krate: CrateNum,
|
||||
/// The raw that this `ExpnData` had in its original crate.
|
||||
/// An `ExpnData` can be created before being assigned an `ExpnId`,
|
||||
/// so this might be `None` until `set_expn_data` is called
|
||||
pub orig_id: Option<u32>,
|
||||
}
|
||||
|
||||
impl ExpnData {
|
||||
@ -702,6 +732,8 @@ pub fn default(
|
||||
local_inner_macros: false,
|
||||
edition,
|
||||
macro_def_id,
|
||||
krate: LOCAL_CRATE,
|
||||
orig_id: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -789,7 +821,7 @@ pub fn article(self) -> &'static str {
|
||||
}
|
||||
|
||||
/// The kind of AST transform.
|
||||
#[derive(Clone, Copy, PartialEq, Debug, RustcEncodable, RustcDecodable, HashStable_Generic)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable_Generic)]
|
||||
pub enum AstPass {
|
||||
StdImports,
|
||||
TestHarness,
|
||||
@ -847,14 +879,234 @@ fn descr(self) -> &'static str {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for ExpnId {
|
||||
fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
|
||||
Ok(()) // FIXME(jseyfried) intercrate hygiene
|
||||
impl UseSpecializedEncodable for ExpnId {}
|
||||
impl UseSpecializedDecodable for ExpnId {}
|
||||
|
||||
/// Additional information used to assist in decoding hygiene data
|
||||
pub struct HygieneContext {
|
||||
// Maps serialized `SyntaxContext` ids to a `SyntaxContext` in the current
|
||||
// global `HygieneData`. When we deserialize a `SyntaxContext`, we need to create
|
||||
// a new id in the global `HygieneData`. This map tracks the ID we end up picking,
|
||||
// so that multiple occurences of the same serialized id are decoded to the same
|
||||
// `SyntaxContext`
|
||||
remapped_ctxts: Lock<Vec<Option<SyntaxContext>>>,
|
||||
// The same as `remapepd_ctxts`, but for `ExpnId`s
|
||||
remapped_expns: Lock<Vec<Option<ExpnId>>>,
|
||||
}
|
||||
|
||||
impl HygieneContext {
|
||||
pub fn new() -> HygieneContext {
|
||||
HygieneContext {
|
||||
remapped_ctxts: Lock::new(Vec::new()),
|
||||
remapped_expns: Lock::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for ExpnId {
|
||||
fn decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
|
||||
Ok(ExpnId::root()) // FIXME(jseyfried) intercrate hygiene
|
||||
pub fn decode_expn_id<
|
||||
'a,
|
||||
D: Decoder,
|
||||
F: FnOnce(&mut D, u32) -> Result<ExpnData, D::Error>,
|
||||
G: FnOnce(CrateNum) -> &'a HygieneContext,
|
||||
>(
|
||||
d: &mut D,
|
||||
mode: ExpnDataDecodeMode<'a, G>,
|
||||
decode_data: F,
|
||||
) -> Result<ExpnId, D::Error> {
|
||||
let index = u32::decode(d)?;
|
||||
let context = match mode {
|
||||
ExpnDataDecodeMode::IncrComp(context) => context,
|
||||
ExpnDataDecodeMode::Metadata(get_context) => {
|
||||
let krate = CrateNum::decode(d)?;
|
||||
get_context(krate)
|
||||
}
|
||||
};
|
||||
|
||||
// Do this after decoding, so that we decode a `CrateNum`
|
||||
// if necessary
|
||||
if index == ExpnId::root().as_u32() {
|
||||
debug!("decode_expn_id: deserialized root");
|
||||
return Ok(ExpnId::root());
|
||||
}
|
||||
|
||||
let outer_expns = &context.remapped_expns;
|
||||
|
||||
// Ensure that the lock() temporary is dropped early
|
||||
{
|
||||
if let Some(expn_id) = outer_expns.lock().get(index as usize).copied().flatten() {
|
||||
return Ok(expn_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Don't decode the data inside `HygieneData::with`, since we need to recursively decode
|
||||
// other ExpnIds
|
||||
let expn_data = decode_data(d, index)?;
|
||||
|
||||
let expn_id = HygieneData::with(|hygiene_data| {
|
||||
let expn_id = ExpnId(hygiene_data.expn_data.len() as u32);
|
||||
hygiene_data.expn_data.push(Some(expn_data));
|
||||
|
||||
// Drop lock() temporary early
|
||||
{
|
||||
let mut expns = outer_expns.lock();
|
||||
let new_len = index as usize + 1;
|
||||
if expns.len() < new_len {
|
||||
expns.resize(new_len, None);
|
||||
}
|
||||
expns[index as usize] = Some(expn_id);
|
||||
}
|
||||
expn_id
|
||||
});
|
||||
return Ok(expn_id);
|
||||
}
|
||||
|
||||
// Decodes `SyntaxContext`, using the provided `HygieneContext`
|
||||
// to track which `SyntaxContext`s we have already decoded.
|
||||
// The provided closure will be invoked to deserialize a `SyntaxContextData`
|
||||
// if we haven't already seen the id of the `SyntaxContext` we are deserializing.
|
||||
pub fn decode_syntax_context<
|
||||
D: Decoder,
|
||||
F: FnOnce(&mut D, u32) -> Result<SyntaxContextData, D::Error>,
|
||||
>(
|
||||
d: &mut D,
|
||||
context: &HygieneContext,
|
||||
decode_data: F,
|
||||
) -> Result<SyntaxContext, D::Error> {
|
||||
let raw_id: u32 = Decodable::decode(d)?;
|
||||
if raw_id == 0 {
|
||||
debug!("decode_syntax_context: deserialized root");
|
||||
// The root is special
|
||||
return Ok(SyntaxContext::root());
|
||||
}
|
||||
|
||||
let outer_ctxts = &context.remapped_ctxts;
|
||||
|
||||
// Ensure that the lock() temporary is dropped early
|
||||
{
|
||||
if let Some(ctxt) = outer_ctxts.lock().get(raw_id as usize).copied().flatten() {
|
||||
return Ok(ctxt);
|
||||
}
|
||||
}
|
||||
|
||||
// Allocate and store SyntaxContext id *before* calling the decoder function,
|
||||
// as the SyntaxContextData may reference itself.
|
||||
let new_ctxt = HygieneData::with(|hygiene_data| {
|
||||
let new_ctxt = SyntaxContext(hygiene_data.syntax_context_data.len() as u32);
|
||||
// Push a dummy SyntaxContextData to ensure that nobody else can get the
|
||||
// same ID as us. This will be overwritten after call `decode_Data`
|
||||
hygiene_data.syntax_context_data.push(SyntaxContextData {
|
||||
outer_expn: ExpnId::root(),
|
||||
outer_transparency: Transparency::Transparent,
|
||||
parent: SyntaxContext::root(),
|
||||
opaque: SyntaxContext::root(),
|
||||
opaque_and_semitransparent: SyntaxContext::root(),
|
||||
dollar_crate_name: kw::Invalid,
|
||||
});
|
||||
// Ensure that the lock() temporary is dropped early
|
||||
{
|
||||
let mut ctxts = outer_ctxts.lock();
|
||||
let new_len = raw_id as usize + 1;
|
||||
if ctxts.len() < new_len {
|
||||
ctxts.resize(new_len, None);
|
||||
}
|
||||
ctxts[raw_id as usize] = Some(new_ctxt);
|
||||
}
|
||||
new_ctxt
|
||||
});
|
||||
|
||||
// Don't try to decode data while holding the lock, since we need to
|
||||
// be able to recursively decode a SyntaxContext
|
||||
let mut ctxt_data = decode_data(d, raw_id)?;
|
||||
// Reset `dollar_crate_name` so that it will be updated by `update_dollar_crate_names`
|
||||
// We don't care what the encoding crate set this to - we want to resolve it
|
||||
// from the perspective of the current compilation session
|
||||
ctxt_data.dollar_crate_name = kw::DollarCrate;
|
||||
|
||||
// Overwrite the dummy data with our decoded SyntaxContextData
|
||||
HygieneData::with(|hygiene_data| {
|
||||
let dummy = std::mem::replace(
|
||||
&mut hygiene_data.syntax_context_data[new_ctxt.as_u32() as usize],
|
||||
ctxt_data,
|
||||
);
|
||||
// Make sure nothing weird happening while `decode_data` was running
|
||||
assert_eq!(dummy.dollar_crate_name, kw::Invalid);
|
||||
});
|
||||
|
||||
return Ok(new_ctxt);
|
||||
}
|
||||
|
||||
pub fn num_syntax_ctxts() -> usize {
|
||||
HygieneData::with(|data| data.syntax_context_data.len())
|
||||
}
|
||||
|
||||
pub fn for_all_data_in<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
|
||||
ctxts: impl Iterator<Item = SyntaxContext>,
|
||||
mut f: F,
|
||||
) -> Result<(), E> {
|
||||
let all_data: Vec<_> = HygieneData::with(|data| {
|
||||
ctxts.map(|ctxt| (ctxt, data.syntax_context_data[ctxt.0 as usize].clone())).collect()
|
||||
});
|
||||
for (ctxt, data) in all_data.into_iter() {
|
||||
f((ctxt.0, ctxt, &data))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn for_all_data<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
|
||||
mut f: F,
|
||||
) -> Result<(), E> {
|
||||
let all_data = HygieneData::with(|data| data.syntax_context_data.clone());
|
||||
for (i, data) in all_data.into_iter().enumerate() {
|
||||
f((i as u32, SyntaxContext(i as u32), &data))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn for_all_expn_data<E, F: FnMut(u32, &ExpnData) -> Result<(), E>>(mut f: F) -> Result<(), E> {
|
||||
let all_data = HygieneData::with(|data| data.expn_data.clone());
|
||||
for (i, data) in all_data.into_iter().enumerate() {
|
||||
f(i as u32, &data.unwrap_or_else(|| panic!("Missing ExpnData!")))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn raw_encode_syntax_context<E: Encoder>(
|
||||
ctxt: SyntaxContext,
|
||||
e: &mut E,
|
||||
) -> Result<(), E::Error> {
|
||||
ctxt.0.encode(e)
|
||||
}
|
||||
|
||||
pub fn raw_encode_expn_id<E: Encoder>(
|
||||
expn: ExpnId,
|
||||
mode: ExpnDataEncodeMode,
|
||||
e: &mut E,
|
||||
) -> Result<(), E::Error> {
|
||||
match mode {
|
||||
ExpnDataEncodeMode::IncrComp => expn.0.encode(e),
|
||||
ExpnDataEncodeMode::Metadata => {
|
||||
let data = expn.expn_data();
|
||||
data.orig_id.expect("Missing orig_id").encode(e)?;
|
||||
data.krate.encode(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ExpnDataEncodeMode {
|
||||
IncrComp,
|
||||
Metadata,
|
||||
}
|
||||
|
||||
pub enum ExpnDataDecodeMode<'a, F: FnOnce(CrateNum) -> &'a HygieneContext> {
|
||||
IncrComp(&'a HygieneContext),
|
||||
Metadata(F),
|
||||
}
|
||||
|
||||
impl<'a> ExpnDataDecodeMode<'a, Box<dyn FnOnce(CrateNum) -> &'a HygieneContext>> {
|
||||
pub fn incr_comp(ctxt: &'a HygieneContext) -> Self {
|
||||
ExpnDataDecodeMode::IncrComp(ctxt)
|
||||
}
|
||||
}
|
||||
|
||||
impl UseSpecializedEncodable for SyntaxContext {}
|
||||
impl UseSpecializedDecodable for SyntaxContext {}
|
||||
|
@ -12,6 +12,7 @@
|
||||
#![feature(nll)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(option_expect_none)]
|
||||
|
||||
// FIXME(#56935): Work around ICEs during cross-compilation.
|
||||
#[allow(unused)]
|
||||
@ -30,8 +31,8 @@
|
||||
use edition::Edition;
|
||||
pub mod hygiene;
|
||||
pub use hygiene::SyntaxContext;
|
||||
use hygiene::Transparency;
|
||||
pub use hygiene::{DesugaringKind, ExpnData, ExpnId, ExpnKind, ForLoopLoc, MacroKind};
|
||||
use hygiene::{Transparency, NUM_TRANSPARENCIES};
|
||||
pub mod def_id;
|
||||
use def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
mod span_encoding;
|
||||
@ -44,7 +45,6 @@
|
||||
pub mod fatal_error;
|
||||
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{Lock, Lrc};
|
||||
|
||||
@ -86,6 +86,9 @@ pub fn new(edition: Edition) -> SessionGlobals {
|
||||
}
|
||||
}
|
||||
|
||||
// If this ever becomes non thread-local, `decode_syntax_context`
|
||||
// and `decode_expn_id` will need to be updated to handle concurrent
|
||||
// deserialization.
|
||||
scoped_tls::scoped_thread_local!(pub static SESSION_GLOBALS: SessionGlobals);
|
||||
|
||||
// FIXME: Perhaps this should not implement Rustc{Decodable, Encodable}
|
||||
@ -1733,8 +1736,9 @@ fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize {
|
||||
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||
/// instead of implementing everything in librustc_middle.
|
||||
pub trait HashStableContext {
|
||||
fn hash_spans(&self) -> bool;
|
||||
fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher);
|
||||
fn hash_crate_num(&mut self, _: CrateNum, hasher: &mut StableHasher);
|
||||
fn hash_spans(&self) -> bool;
|
||||
fn byte_pos_to_line_and_col(
|
||||
&mut self,
|
||||
byte: BytePos,
|
||||
@ -1757,15 +1761,14 @@ impl<CTX> HashStable<CTX> for Span
|
||||
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
const TAG_VALID_SPAN: u8 = 0;
|
||||
const TAG_INVALID_SPAN: u8 = 1;
|
||||
const TAG_EXPANSION: u8 = 0;
|
||||
const TAG_NO_EXPANSION: u8 = 1;
|
||||
|
||||
if !ctx.hash_spans() {
|
||||
return;
|
||||
}
|
||||
|
||||
if *self == DUMMY_SP {
|
||||
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||
std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||
return;
|
||||
}
|
||||
|
||||
// If this is not an empty or invalid span, we want to hash the last
|
||||
@ -1775,12 +1778,16 @@ fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
let (file_lo, line_lo, col_lo) = match ctx.byte_pos_to_line_and_col(span.lo) {
|
||||
Some(pos) => pos,
|
||||
None => {
|
||||
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||
std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||
span.ctxt.hash_stable(ctx, hasher);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if !file_lo.contains(span.hi) {
|
||||
return std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||
std::hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
|
||||
span.ctxt.hash_stable(ctx, hasher);
|
||||
return;
|
||||
}
|
||||
|
||||
std::hash::Hash::hash(&TAG_VALID_SPAN, hasher);
|
||||
@ -1793,8 +1800,16 @@ fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
let len = ((span.hi - span.lo).0 as u64) << 32;
|
||||
let line_col_len = col | line | len;
|
||||
std::hash::Hash::hash(&line_col_len, hasher);
|
||||
span.ctxt.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
if span.ctxt == SyntaxContext::root() {
|
||||
impl<CTX: HashStableContext> HashStable<CTX> for SyntaxContext {
|
||||
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
const TAG_EXPANSION: u8 = 0;
|
||||
const TAG_NO_EXPANSION: u8 = 1;
|
||||
|
||||
if *self == SyntaxContext::root() {
|
||||
TAG_NO_EXPANSION.hash_stable(ctx, hasher);
|
||||
} else {
|
||||
TAG_EXPANSION.hash_stable(ctx, hasher);
|
||||
@ -1803,21 +1818,39 @@ fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
// times, we cache a stable hash of it and hash that instead of
|
||||
// recursing every time.
|
||||
thread_local! {
|
||||
static CACHE: RefCell<FxHashMap<hygiene::ExpnId, u64>> = Default::default();
|
||||
static CACHE: RefCell<Vec<Option<[Option<u64>; NUM_TRANSPARENCIES]>>> = Default::default();
|
||||
}
|
||||
|
||||
let sub_hash: u64 = CACHE.with(|cache| {
|
||||
let expn_id = span.ctxt.outer_expn();
|
||||
let (expn_id, transparency, _) = self.outer_mark_with_data();
|
||||
let index = expn_id.as_u32() as usize;
|
||||
|
||||
if let Some(&sub_hash) = cache.borrow().get(&expn_id) {
|
||||
return sub_hash;
|
||||
if let Some(sub_hash_cache) = cache.borrow().get(index).copied().flatten() {
|
||||
if let Some(sub_hash) = sub_hash_cache[transparency as usize] {
|
||||
return sub_hash;
|
||||
}
|
||||
}
|
||||
|
||||
let new_len = index + 1;
|
||||
|
||||
let mut hasher = StableHasher::new();
|
||||
expn_id.expn_data().hash_stable(ctx, &mut hasher);
|
||||
transparency.hash_stable(ctx, &mut hasher);
|
||||
|
||||
let sub_hash: Fingerprint = hasher.finish();
|
||||
let sub_hash = sub_hash.to_smaller_hash();
|
||||
cache.borrow_mut().insert(expn_id, sub_hash);
|
||||
|
||||
let mut cache = cache.borrow_mut();
|
||||
if cache.len() < new_len {
|
||||
cache.resize(new_len, None);
|
||||
}
|
||||
if let Some(mut sub_hash_cache) = cache[index] {
|
||||
sub_hash_cache[transparency as usize] = Some(sub_hash);
|
||||
} else {
|
||||
let mut sub_hash_cache = [None; NUM_TRANSPARENCIES];
|
||||
sub_hash_cache[transparency as usize] = Some(sub_hash);
|
||||
cache[index] = Some(sub_hash_cache);
|
||||
}
|
||||
sub_hash
|
||||
});
|
||||
|
||||
|
5
src/test/ui/hygiene/auxiliary/needs_hygiene.rs
Normal file
5
src/test/ui/hygiene/auxiliary/needs_hygiene.rs
Normal file
@ -0,0 +1,5 @@
|
||||
#![feature(decl_macro)]
|
||||
macro x() { struct MyStruct; }
|
||||
|
||||
x!();
|
||||
x!();
|
8
src/test/ui/hygiene/cross_crate_hygiene.rs
Normal file
8
src/test/ui/hygiene/cross_crate_hygiene.rs
Normal file
@ -0,0 +1,8 @@
|
||||
// check-pass
|
||||
// aux-build:needs_hygiene.rs
|
||||
|
||||
extern crate needs_hygiene;
|
||||
|
||||
use needs_hygiene::*;
|
||||
|
||||
fn main() {}
|
10
src/test/ui/hygiene/panic-location.rs
Normal file
10
src/test/ui/hygiene/panic-location.rs
Normal file
@ -0,0 +1,10 @@
|
||||
// run-fail
|
||||
// check-run-results
|
||||
// exec-env:RUST_BACKTRACE=0
|
||||
//
|
||||
// Regression test for issue #70963
|
||||
// The captured stderr from this test reports a location
|
||||
// inside `VecDeque::with_capacity`, instead of `<::core::macros::panic macros>`
|
||||
fn main() {
|
||||
std::collections::VecDeque::<String>::with_capacity(!0);
|
||||
}
|
2
src/test/ui/hygiene/panic-location.run.stderr
Normal file
2
src/test/ui/hygiene/panic-location.run.stderr
Normal file
@ -0,0 +1,2 @@
|
||||
thread 'main' panicked at 'capacity overflow', $SRC_DIR/liballoc/collections/vec_deque.rs:LL:COL
|
||||
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
|
@ -2,79 +2,79 @@ PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "M",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
|
@ -2,109 +2,109 @@ PRINT-ATTR INPUT (DISPLAY): struct A(identity ! ($crate :: S)) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "identity",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: '!',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): struct B(identity ! ($crate :: S)) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "B",
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "identity",
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: '!',
|
||||
spacing: Alone,
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #10 bytes(LO..HI),
|
||||
span: #13 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
|
@ -2,239 +2,239 @@ PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "M",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
|
||||
PRINT-DERIVE INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "D",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #3 bytes(LO..HI),
|
||||
span: #6 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "M",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "A",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
|
||||
PRINT-DERIVE INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "D",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "$crate",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Joint,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ':',
|
||||
spacing: Alone,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Ident {
|
||||
ident: "S",
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
],
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
Punct {
|
||||
ch: ';',
|
||||
spacing: Alone,
|
||||
span: #13 bytes(LO..HI),
|
||||
span: #16 bytes(LO..HI),
|
||||
},
|
||||
]
|
||||
|
@ -8,7 +8,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
span: #0 bytes(402..403),
|
||||
},
|
||||
],
|
||||
span: #3 bytes(269..271),
|
||||
span: #6 bytes(269..271),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): const A : u8 = 0 ;
|
||||
|
@ -1,4 +1,4 @@
|
||||
Def site: $DIR/auxiliary/make-macro.rs:7:9: 10:10 (#3)
|
||||
Def site: $DIR/auxiliary/make-macro.rs:7:9: 10:10 (#6)
|
||||
#![feature /* 0#0 */(prelude_import)]
|
||||
#[prelude_import /* 0#1 */]
|
||||
use std /* 0#1 */::prelude /* 0#1 */::v1 /* 0#1 */::*;
|
||||
@ -21,12 +21,19 @@ Expansions:
|
||||
0: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: Root
|
||||
1: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: AstPass(StdImports)
|
||||
2: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "meta_macro::print_def_site")
|
||||
3: parent: ExpnId(0), call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "cfg_if")
|
||||
4: parent: ExpnId(3), call_site_ctxt: #4, def_site_ctxt: #0, kind: Macro(Bang, "$crate::cfg_if")
|
||||
5: parent: ExpnId(4), call_site_ctxt: #5, def_site_ctxt: #0, kind: Macro(Bang, "$crate::cfg_if")
|
||||
6: parent: ExpnId(5), call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "features")
|
||||
|
||||
SyntaxContexts:
|
||||
#0: parent: #0, outer_mark: (ExpnId(0), Opaque)
|
||||
#1: parent: #0, outer_mark: (ExpnId(1), Opaque)
|
||||
#2: parent: #0, outer_mark: (ExpnId(1), Transparent)
|
||||
#3: parent: #0, outer_mark: (ExpnId(2), Opaque)
|
||||
#4: parent: #0, outer_mark: (ExpnId(2), Transparent)
|
||||
#5: parent: #0, outer_mark: (ExpnId(2), SemiTransparent)
|
||||
#3: parent: #0, outer_mark: (ExpnId(6), SemiTransparent)
|
||||
#4: parent: #0, outer_mark: (ExpnId(3), SemiTransparent)
|
||||
#5: parent: #0, outer_mark: (ExpnId(4), SemiTransparent)
|
||||
#6: parent: #0, outer_mark: (ExpnId(2), Opaque)
|
||||
#7: parent: #0, outer_mark: (ExpnId(2), Transparent)
|
||||
#8: parent: #0, outer_mark: (ExpnId(2), SemiTransparent)
|
||||
*/
|
||||
|
@ -1 +1 @@
|
||||
Def site: $DIR/auxiliary/make-macro.rs:7:9: 10:10 (#3)
|
||||
Def site: $DIR/auxiliary/make-macro.rs:7:9: 10:10 (#6)
|
||||
|
@ -5,10 +5,10 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "FirstStruct",
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#3),
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#8),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#3),
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#7),
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): SecondStruct
|
||||
@ -18,9 +18,9 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "SecondStruct",
|
||||
span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#9),
|
||||
span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#14),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#8),
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#13),
|
||||
},
|
||||
]
|
||||
|
@ -4,7 +4,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
kind: Str,
|
||||
symbol: "hi",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#3),
|
||||
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#6),
|
||||
},
|
||||
Group {
|
||||
delimiter: None,
|
||||
@ -44,7 +44,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
span: $DIR/nodelim-groups.rs:17:27: 17:28 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#3),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#6),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
@ -53,21 +53,21 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#3),
|
||||
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#6),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#3),
|
||||
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#6),
|
||||
},
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#3),
|
||||
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#6),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#3),
|
||||
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#6),
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1)
|
||||
@ -77,7 +77,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
kind: Str,
|
||||
symbol: "hi",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#11),
|
||||
},
|
||||
Group {
|
||||
delimiter: None,
|
||||
@ -86,49 +86,49 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
kind: Str,
|
||||
symbol: "hello",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "world",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#11),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
@ -137,20 +137,20 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#11),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#11),
|
||||
},
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#11),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#8),
|
||||
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#11),
|
||||
},
|
||||
]
|
||||
|
Loading…
Reference in New Issue
Block a user