Allow to create definitions inside the query system.

This commit is contained in:
Camille GILLOT 2021-07-12 22:19:25 +02:00
parent 3dcb616888
commit 43bb31b954
23 changed files with 260 additions and 150 deletions

View File

@ -93,8 +93,9 @@ pub fn for_vtable_ty(
/// Right now this takes the form of a hex-encoded opaque hash value.
pub fn generate_unique_id_string(self, tcx: TyCtxt<'tcx>) -> String {
let mut hasher = StableHasher::new();
let mut hcx = tcx.create_stable_hashing_context();
hcx.while_hashing_spans(false, |hcx| self.hash_stable(hcx, &mut hasher));
tcx.with_stable_hashing_context(|mut hcx| {
hcx.while_hashing_spans(false, |hcx| self.hash_stable(hcx, &mut hasher))
});
hasher.finish::<Fingerprint>().to_hex()
}

View File

@ -701,16 +701,20 @@ fn push_const_param<'tcx>(tcx: TyCtxt<'tcx>, ct: ty::Const<'tcx>, output: &mut S
// If we cannot evaluate the constant to a known type, we fall back
// to emitting a stable hash value of the constant. This isn't very pretty
// but we get a deterministic, virtually unique value for the constant.
let hcx = &mut tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
let ct = ct.eval(tcx, ty::ParamEnv::reveal_all());
hcx.while_hashing_spans(false, |hcx| ct.to_valtree().hash_stable(hcx, &mut hasher));
//
// Let's only emit 64 bits of the hash value. That should be plenty for
// avoiding collisions and will make the emitted type names shorter.
// Note: Don't use `StableHashResult` impl of `u64` here directly, since that
// would lead to endianness problems.
let hash: u128 = hasher.finish();
let hash_short = (hash.to_le() as u64).to_le();
let hash_short = tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
let ct = ct.eval(tcx, ty::ParamEnv::reveal_all());
hcx.while_hashing_spans(false, |hcx| {
ct.to_valtree().hash_stable(hcx, &mut hasher)
});
// Note: Don't use `StableHashResult` impl of `u64` here directly, since that
// would lead to endianness problems.
let hash: u128 = hasher.finish();
(hash.to_le() as u64).to_le()
});
if cpp_like_debuginfo(tcx) {
write!(output, "CONST${:x}", hash_short)

View File

@ -10,6 +10,7 @@
#![feature(array_windows)]
#![feature(associated_type_bounds)]
#![feature(auto_traits)]
#![feature(cell_leak)]
#![feature(control_flow_enum)]
#![feature(extend_one)]
#![feature(let_else)]

View File

@ -539,6 +539,33 @@ pub fn borrow(&self) -> ReadGuard<'_, T> {
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
self.write()
}
#[cfg(not(parallel_compiler))]
#[inline(always)]
pub fn clone_guard<'a>(rg: &ReadGuard<'a, T>) -> ReadGuard<'a, T> {
ReadGuard::clone(rg)
}
#[cfg(parallel_compiler)]
#[inline(always)]
pub fn clone_guard<'a>(rg: &ReadGuard<'a, T>) -> ReadGuard<'a, T> {
ReadGuard::rwlock(&rg).read()
}
#[cfg(not(parallel_compiler))]
#[inline(always)]
pub fn leak(&self) -> &T {
ReadGuard::leak(self.read())
}
#[cfg(parallel_compiler)]
#[inline(always)]
pub fn leak(&self) -> &T {
let guard = self.read();
let ret = unsafe { &*(&*guard as *const T) };
std::mem::forget(guard);
ret
}
}
// FIXME: Probably a bad idea

View File

@ -423,7 +423,7 @@ fn encode_info_for_items(&mut self) {
}
fn encode_def_path_table(&mut self) {
let table = self.tcx.definitions_untracked().def_path_table();
let table = self.tcx.def_path_table();
if self.is_proc_macro {
for def_index in std::iter::once(CRATE_DEF_INDEX)
.chain(self.tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index))
@ -443,9 +443,7 @@ fn encode_def_path_table(&mut self) {
}
fn encode_def_path_hash_map(&mut self) -> LazyValue<DefPathHashMapRef<'static>> {
self.lazy(DefPathHashMapRef::BorrowedFromTcx(
self.tcx.definitions_untracked().def_path_hash_to_def_index_map(),
))
self.lazy(DefPathHashMapRef::BorrowedFromTcx(self.tcx.def_path_hash_to_def_index_map()))
}
fn encode_source_map(&mut self) -> LazyArray<rustc_span::SourceFile> {
@ -614,7 +612,8 @@ fn encode_crate_root(&mut self) -> LazyValue<CrateRoot> {
let interpret_alloc_index_bytes = self.position() - i;
// Encode the proc macro data. This affects 'tables',
// so we need to do this before we encode the tables
// so we need to do this before we encode the tables.
// This overwrites def_keys, so it must happen after encode_def_path_table.
i = self.position();
let proc_macro_data = self.encode_proc_macros();
let proc_macro_data_bytes = self.position() - i;
@ -992,8 +991,7 @@ fn encode_def_ids(&mut self) {
return;
}
let tcx = self.tcx;
let hir = tcx.hir();
for local_id in hir.iter_local_def_id() {
for local_id in tcx.iter_local_def_id() {
let def_id = local_id.to_def_id();
let def_kind = tcx.opt_def_kind(local_id);
let Some(def_kind) = def_kind else { continue };
@ -1854,12 +1852,13 @@ fn encode_incoherent_impls(&mut self) -> LazyArray<IncoherentImpls> {
debug!("EncodeContext::encode_traits_and_impls()");
empty_proc_macro!(self);
let tcx = self.tcx;
let mut ctx = tcx.create_stable_hashing_context();
let mut all_impls: Vec<_> = tcx.crate_inherent_impls(()).incoherent_impls.iter().collect();
all_impls.sort_by_cached_key(|&(&simp, _)| {
let mut hasher = StableHasher::new();
simp.hash_stable(&mut ctx, &mut hasher);
hasher.finish::<Fingerprint>();
tcx.with_stable_hashing_context(|mut ctx| {
all_impls.sort_by_cached_key(|&(&simp, _)| {
let mut hasher = StableHasher::new();
simp.hash_stable(&mut ctx, &mut hasher);
hasher.finish::<Fingerprint>()
})
});
let all_impls: Vec<_> = all_impls
.into_iter()

View File

@ -71,8 +71,8 @@ impl<'tcx> DepContext for TyCtxt<'tcx> {
type DepKind = DepKind;
#[inline]
fn create_stable_hashing_context(&self) -> StableHashingContext<'_> {
TyCtxt::create_stable_hashing_context(*self)
fn with_stable_hashing_context<R>(&self, f: impl FnOnce(StableHashingContext<'_>) -> R) -> R {
TyCtxt::with_stable_hashing_context(*self, f)
}
#[inline]

View File

@ -218,13 +218,6 @@ pub fn local_def_id_to_hir_id(self, def_id: LocalDefId) -> HirId {
self.tcx.local_def_id_to_hir_id(def_id)
}
pub fn iter_local_def_id(self) -> impl Iterator<Item = LocalDefId> + 'hir {
// Create a dependency to the crate to be sure we re-execute this when the amount of
// definitions change.
self.tcx.ensure().hir_crate(());
self.tcx.definitions_untracked().iter_local_def_id()
}
/// Do not call this function directly. The query should be called.
pub(super) fn opt_def_kind(self, local_def_id: LocalDefId) -> Option<DefKind> {
let hir_id = self.local_def_id_to_hir_id(local_def_id);
@ -1141,34 +1134,35 @@ pub(super) fn crate_hash(tcx: TyCtxt<'_>, crate_num: CrateNum) -> Svh {
source_file_names.sort_unstable();
let mut hcx = tcx.create_stable_hashing_context();
let mut stable_hasher = StableHasher::new();
hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher);
upstream_crates.hash_stable(&mut hcx, &mut stable_hasher);
source_file_names.hash_stable(&mut hcx, &mut stable_hasher);
if tcx.sess.opts.debugging_opts.incremental_relative_spans {
let definitions = &tcx.definitions_untracked();
let mut owner_spans: Vec<_> = krate
.owners
.iter_enumerated()
.filter_map(|(def_id, info)| {
let _ = info.as_owner()?;
let def_path_hash = definitions.def_path_hash(def_id);
let span = resolutions.source_span[def_id];
debug_assert_eq!(span.parent(), None);
Some((def_path_hash, span))
})
.collect();
owner_spans.sort_unstable_by_key(|bn| bn.0);
owner_spans.hash_stable(&mut hcx, &mut stable_hasher);
}
tcx.sess.opts.dep_tracking_hash(true).hash_stable(&mut hcx, &mut stable_hasher);
tcx.sess.local_stable_crate_id().hash_stable(&mut hcx, &mut stable_hasher);
// Hash visibility information since it does not appear in HIR.
resolutions.visibilities.hash_stable(&mut hcx, &mut stable_hasher);
resolutions.has_pub_restricted.hash_stable(&mut hcx, &mut stable_hasher);
let crate_hash: Fingerprint = tcx.with_stable_hashing_context(|mut hcx| {
let mut stable_hasher = StableHasher::new();
hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher);
upstream_crates.hash_stable(&mut hcx, &mut stable_hasher);
source_file_names.hash_stable(&mut hcx, &mut stable_hasher);
if tcx.sess.opts.debugging_opts.incremental_relative_spans {
let definitions = tcx.definitions_untracked();
let mut owner_spans: Vec<_> = krate
.owners
.iter_enumerated()
.filter_map(|(def_id, info)| {
let _ = info.as_owner()?;
let def_path_hash = definitions.def_path_hash(def_id);
let span = resolutions.source_span[def_id];
debug_assert_eq!(span.parent(), None);
Some((def_path_hash, span))
})
.collect();
owner_spans.sort_unstable_by_key(|bn| bn.0);
owner_spans.hash_stable(&mut hcx, &mut stable_hasher);
}
tcx.sess.opts.dep_tracking_hash(true).hash_stable(&mut hcx, &mut stable_hasher);
tcx.sess.local_stable_crate_id().hash_stable(&mut hcx, &mut stable_hasher);
// Hash visibility information since it does not appear in HIR.
resolutions.visibilities.hash_stable(&mut hcx, &mut stable_hasher);
resolutions.has_pub_restricted.hash_stable(&mut hcx, &mut stable_hasher);
stable_hasher.finish()
});
let crate_hash: Fingerprint = stable_hasher.finish();
Svh::new(crate_hash.to_smaller_hash())
}

View File

@ -20,6 +20,12 @@
desc { "trigger a delay span bug" }
}
/// Create a new definition within the incr. comp. engine.
query register_def(_: ty::RawLocalDefId) -> LocalDefId {
eval_always
desc { "register a DefId with the incr. comp. engine" }
}
query resolutions(_: ()) -> &'tcx ty::ResolverOutputs {
eval_always
no_hash

View File

@ -32,12 +32,13 @@
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{self, Lock, Lrc, WorkerLocal};
use rustc_data_structures::sync::{self, Lock, Lrc, ReadGuard, RwLock, WorkerLocal};
use rustc_data_structures::vec_map::VecMap;
use rustc_errors::{DecorateLint, ErrorGuaranteed, LintDiagnosticBuilder, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LOCAL_CRATE};
use rustc_hir::definitions::Definitions;
use rustc_hir::intravisit::Visitor;
use rustc_hir::lang_items::LangItem;
use rustc_hir::{
@ -122,6 +123,9 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
type PlaceholderRegion = ty::PlaceholderRegion;
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
pub struct RawLocalDefId(LocalDefId);
/// A type that is not publicly constructable. This prevents people from making [`TyKind::Error`]s
/// except through the error-reporting functions on a [`tcx`][TyCtxt].
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
@ -1069,7 +1073,7 @@ pub struct GlobalCtxt<'tcx> {
/// Common consts, pre-interned for your convenience.
pub consts: CommonConsts<'tcx>,
definitions: rustc_hir::definitions::Definitions,
definitions: RwLock<Definitions>,
cstore: Box<CrateStoreDyn>,
/// Output of the resolver.
@ -1233,7 +1237,7 @@ pub fn create_global_ctxt(
s: &'tcx Session,
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
arena: &'tcx WorkerLocal<Arena<'tcx>>,
definitions: rustc_hir::definitions::Definitions,
definitions: Definitions,
cstore: Box<CrateStoreDyn>,
untracked_resolutions: ty::ResolverOutputs,
krate: &'tcx hir::Crate<'tcx>,
@ -1265,7 +1269,7 @@ pub fn create_global_ctxt(
arena,
interners,
dep_graph,
definitions,
definitions: RwLock::new(definitions),
cstore,
untracked_resolutions,
prof: s.prof.clone(),
@ -1368,7 +1372,7 @@ pub fn features(self) -> &'tcx rustc_feature::Features {
pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey {
// Accessing the DefKey is ok, since it is part of DefPathHash.
if let Some(id) = id.as_local() {
self.definitions.def_key(id)
self.definitions_untracked().def_key(id)
} else {
self.cstore.def_key(id)
}
@ -1382,7 +1386,7 @@ pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey {
pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath {
// Accessing the DefPath is ok, since it is part of DefPathHash.
if let Some(id) = id.as_local() {
self.definitions.def_path(id)
self.definitions_untracked().def_path(id)
} else {
self.cstore.def_path(id)
}
@ -1392,7 +1396,7 @@ pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath {
pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash {
// Accessing the DefPathHash is ok, it is incr. comp. stable.
if let Some(def_id) = def_id.as_local() {
self.definitions.def_path_hash(def_id)
self.definitions_untracked().def_path_hash(def_id)
} else {
self.cstore.def_path_hash(def_id)
}
@ -1429,7 +1433,7 @@ pub fn def_path_hash_to_def_id(self, hash: DefPathHash, err: &mut dyn FnMut() ->
// If this is a DefPathHash from the local crate, we can look up the
// DefId in the tcx's `Definitions`.
if stable_crate_id == self.sess.local_stable_crate_id() {
self.definitions.local_def_path_hash_to_def_id(hash, err).to_def_id()
self.definitions.read().local_def_path_hash_to_def_id(hash, err).to_def_id()
} else {
// If this is a DefPathHash from an upstream crate, let the CrateStore map
// it to a DefId.
@ -1460,6 +1464,65 @@ pub fn def_path_debug_str(self, def_id: DefId) -> String {
)
}
/// Create a new definition within the incr. comp. engine.
pub fn create_def(self, parent: LocalDefId, data: hir::definitions::DefPathData) -> LocalDefId {
// The following call has the side effect of modifying the tables inside `definitions`.
// These very tables are relied on by the incr. comp. engine to decode DepNodes and to
// decode the on-disk cache.
let def_id = self.definitions.write().create_def(parent, data);
// We need to ensure that these side effects are re-run by the incr. comp. engine.
// When the incr. comp. engine considers marking this query as green, eval_always requires
// we run the function to run. To invoke it, the parameter cannot be reconstructed from
// the DepNode, so the caller query is run. Luckily, we are inside the caller query,
// therefore the definition is properly created.
debug_assert!({
use rustc_query_system::dep_graph::{DepContext, DepNodeParams};
self.is_eval_always(crate::dep_graph::DepKind::register_def)
&& !<RawLocalDefId as DepNodeParams<TyCtxt<'_>>>::fingerprint_style()
.reconstructible()
});
// Any LocalDefId which is used within queries, either as key or result, either:
// - has been created before the construction of the TyCtxt;
// - has been created by this call to `register_def`.
// As a consequence, this LocalDefId is always re-created before it is needed by the incr.
// comp. engine itself.
self.register_def(RawLocalDefId(def_id))
}
pub fn iter_local_def_id(self) -> impl Iterator<Item = LocalDefId> + 'tcx {
// Create a dependency to the crate to be sure we re-execute this when the amount of
// definitions change.
self.ensure().hir_crate(());
// Leak a read lock once we start iterating on definitions, to prevent adding new onces
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
let definitions = self.definitions.leak();
definitions.iter_local_def_id()
}
pub fn def_path_table(self) -> &'tcx rustc_hir::definitions::DefPathTable {
// Create a dependency to the crate to be sure we reexcute this when the amount of
// definitions change.
self.ensure().hir_crate(());
// Leak a read lock once we start iterating on definitions, to prevent adding new onces
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
let definitions = self.definitions.leak();
definitions.def_path_table()
}
pub fn def_path_hash_to_def_index_map(
self,
) -> &'tcx rustc_hir::def_path_hash_map::DefPathHashMap {
// Create a dependency to the crate to be sure we reexcute this when the amount of
// definitions change.
self.ensure().hir_crate(());
// Leak a read lock once we start iterating on definitions, to prevent adding new onces
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
let definitions = self.definitions.leak();
definitions.def_path_hash_to_def_index_map()
}
/// Note that this is *untracked* and should only be used within the query
/// system if the result is otherwise tracked through queries
pub fn cstore_untracked(self) -> &'tcx CrateStoreDyn {
@ -1468,8 +1531,9 @@ pub fn cstore_untracked(self) -> &'tcx CrateStoreDyn {
/// Note that this is *untracked* and should only be used within the query
/// system if the result is otherwise tracked through queries
pub fn definitions_untracked(self) -> &'tcx hir::definitions::Definitions {
&self.definitions
#[inline]
pub fn definitions_untracked(self) -> ReadGuard<'tcx, Definitions> {
self.definitions.read()
}
/// Note that this is *untracked* and should only be used within the query
@ -1480,23 +1544,33 @@ pub fn source_span_untracked(self, def_id: LocalDefId) -> Span {
}
#[inline(always)]
pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> {
StableHashingContext::new(
pub fn with_stable_hashing_context<R>(
self,
f: impl FnOnce(StableHashingContext<'_>) -> R,
) -> R {
let definitions = self.definitions_untracked();
let hcx = StableHashingContext::new(
self.sess,
&self.definitions,
&*definitions,
&*self.cstore,
&self.untracked_resolutions.source_span,
)
);
f(hcx)
}
#[inline(always)]
pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> {
StableHashingContext::ignore_spans(
pub fn with_no_span_stable_hashing_context<R>(
self,
f: impl FnOnce(StableHashingContext<'_>) -> R,
) -> R {
let definitions = self.definitions_untracked();
let hcx = StableHashingContext::ignore_spans(
self.sess,
&self.definitions,
&*definitions,
&*self.cstore,
&self.untracked_resolutions.source_span,
)
);
f(hcx)
}
pub fn serialize_query_result_cache(self, encoder: FileEncoder) -> FileEncodeResult {
@ -2304,7 +2378,7 @@ pub fn mk_ty(self, st: TyKind<'tcx>) -> Ty<'tcx> {
self.interners.intern_ty(
st,
self.sess,
&self.definitions,
&self.definitions.read(),
&*self.cstore,
// This is only used to create a stable hashing context.
&self.untracked_resolutions.source_span,
@ -2953,4 +3027,5 @@ pub fn provide(providers: &mut ty::query::Providers) {
// We want to check if the panic handler was defined in this crate
tcx.lang_items().panic_impl().map_or(false, |did| did.is_local())
};
providers.register_def = |_, raw_id| raw_id.0;
}

View File

@ -72,8 +72,8 @@
pub use self::context::{
tls, CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations,
CtxtInterners, DelaySpanBugEmitted, FreeRegionInfo, GeneratorDiagnosticData,
GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, TyCtxt, TypeckResults, UserType,
UserTypeAnnotationIndex,
GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, RawLocalDefId, TyCtxt,
TypeckResults, UserType, UserTypeAnnotationIndex,
};
pub use self::instance::{Instance, InstanceDef};
pub use self::list::List;

View File

@ -142,16 +142,16 @@ impl<'tcx> TyCtxt<'tcx> {
/// Creates a hash of the type `Ty` which will be the same no matter what crate
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
let mut hasher = StableHasher::new();
let mut hcx = self.create_stable_hashing_context();
// We want the type_id be independent of the types free regions, so we
// erase them. The erase_regions() call will also anonymize bound
// regions, which is desirable too.
let ty = self.erase_regions(ty);
hcx.while_hashing_spans(false, |hcx| ty.hash_stable(hcx, &mut hasher));
hasher.finish()
self.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
hcx.while_hashing_spans(false, |hcx| ty.hash_stable(hcx, &mut hasher));
hasher.finish()
})
}
pub fn res_generics_def_id(self, res: Res) -> Option<DefId> {

View File

@ -15,7 +15,6 @@
use crate::MirPass;
use rustc_data_structures::graph::WithNumNodes;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
use rustc_index::vec::IndexVec;
use rustc_middle::hir;
@ -576,12 +575,6 @@ fn get_body_span<'tcx>(
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 {
// FIXME(cjgillot) Stop hashing HIR manually here.
let mut hcx = tcx.create_no_span_stable_hashing_context();
let mut stable_hasher = StableHasher::new();
let owner = hir_body.id().hir_id.owner;
let bodies = &tcx.hir_owner_nodes(owner).unwrap().bodies;
hcx.with_hir_bodies(false, owner, bodies, |hcx| {
hir_body.value.hash_stable(hcx, &mut stable_hasher)
});
stable_hasher.finish()
tcx.hir_owner_nodes(owner).unwrap().hash_including_bodies.to_smaller_hash()
}

View File

@ -588,7 +588,7 @@ fn dest_needs_borrow(place: Place<'_>) -> bool {
);
expn_data.def_site = callee_body.span;
let expn_data =
LocalExpnId::fresh(expn_data, self.tcx.create_stable_hashing_context());
self.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
let mut integrator = Integrator {
args: &args,
new_locals: Local::new(caller_body.local_decls.len())..,

View File

@ -39,6 +39,17 @@ fn default_span(&self, _: TyCtxt<'_>) -> Span {
}
}
impl Key for ty::RawLocalDefId {
#[inline(always)]
fn query_crate_is_local(&self) -> bool {
true
}
fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
impl<'tcx> Key for ty::InstanceDef<'tcx> {
#[inline(always)]
fn query_crate_is_local(&self) -> bool {

View File

@ -653,12 +653,11 @@ fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Self {
#[cfg(debug_assertions)]
{
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
let mut hcx = decoder.tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
hcx.while_hashing_spans(true, |hcx| {
expn_id.expn_data().hash_stable(hcx, &mut hasher)
let local_hash: u64 = decoder.tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
expn_id.expn_data().hash_stable(&mut hcx, &mut hasher);
hasher.finish()
});
let local_hash: u64 = hasher.finish();
debug_assert_eq!(hash.local_hash(), local_hash);
}

View File

@ -291,11 +291,12 @@ pub fn $name<$tcx>(tcx: QueryCtxt<$tcx>, key: query_keys::$name<$tcx>) -> QueryS
.and_then(|def_id| tcx.opt_def_kind(def_id))
};
let hash = || {
let mut hcx = tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
std::mem::discriminant(&kind).hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<u64>()
tcx.with_stable_hashing_context(|mut hcx|{
let mut hasher = StableHasher::new();
std::mem::discriminant(&kind).hash_stable(&mut hcx, &mut hasher);
key.hash_stable(&mut hcx, &mut hasher);
hasher.finish::<u64>()
})
};
QueryStackFrame::new(name, description, span, def_kind, hash)

View File

@ -131,12 +131,11 @@ impl<Ctxt: DepContext, T> DepNodeParams<Ctxt> for T
#[inline(always)]
default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint {
let mut hcx = tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
self.hash_stable(&mut hcx, &mut hasher);
hasher.finish()
tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
self.hash_stable(&mut hcx, &mut hasher);
hasher.finish()
})
}
#[inline(always)]

View File

@ -328,10 +328,8 @@ fn with_task_impl<Ctxt: HasDepContext<DepKind = K>, A: Debug, R>(
let dcx = cx.dep_context();
let hashing_timer = dcx.profiler().incr_result_hashing();
let current_fingerprint = hash_result.map(|f| {
let mut hcx = dcx.create_stable_hashing_context();
f(&mut hcx, &result)
});
let current_fingerprint =
hash_result.map(|f| dcx.with_stable_hashing_context(|mut hcx| f(&mut hcx, &result)));
let print_status = cfg!(debug_assertions) && dcx.sess().opts.debugging_opts.dep_tasks;

View File

@ -23,7 +23,7 @@ pub trait DepContext: Copy {
type DepKind: self::DepKind;
/// Create a hashing context for hashing new results.
fn create_stable_hashing_context(&self) -> StableHashingContext<'_>;
fn with_stable_hashing_context<R>(&self, f: impl FnOnce(StableHashingContext<'_>) -> R) -> R;
/// Access the DepGraph.
fn dep_graph(&self) -> &DepGraph<Self::DepKind>;

View File

@ -1,4 +1,5 @@
use crate::ich;
use rustc_ast as ast;
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher};

View File

@ -542,8 +542,7 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
debug!("BEGIN verify_ich({:?})", dep_node);
let new_hash = query.hash_result.map_or(Fingerprint::ZERO, |f| {
let mut hcx = tcx.create_stable_hashing_context();
f(&mut hcx, result)
tcx.with_stable_hashing_context(|mut hcx| f(&mut hcx, result))
});
let old_hash = tcx.dep_graph().prev_fingerprint_of(dep_node);
debug!("END verify_ich({:?})", dep_node);

View File

@ -96,47 +96,48 @@ fn get_symbol_hash<'tcx>(
let substs = instance.substs;
debug!("get_symbol_hash(def_id={:?}, parameters={:?})", def_id, substs);
let mut hasher = StableHasher::new();
let mut hcx = tcx.create_stable_hashing_context();
tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
// the main symbol name is not necessarily unique; hash in the
// compiler's internal def-path, guaranteeing each symbol has a
// truly unique path
tcx.def_path_hash(def_id).hash_stable(&mut hcx, &mut hasher);
record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
// the main symbol name is not necessarily unique; hash in the
// compiler's internal def-path, guaranteeing each symbol has a
// truly unique path
tcx.def_path_hash(def_id).hash_stable(&mut hcx, &mut hasher);
// Include the main item-type. Note that, in this case, the
// assertions about `needs_subst` may not hold, but this item-type
// ought to be the same for every reference anyway.
assert!(!item_type.has_erasable_regions());
hcx.while_hashing_spans(false, |hcx| {
item_type.hash_stable(hcx, &mut hasher);
// Include the main item-type. Note that, in this case, the
// assertions about `needs_subst` may not hold, but this item-type
// ought to be the same for every reference anyway.
assert!(!item_type.has_erasable_regions());
hcx.while_hashing_spans(false, |hcx| {
item_type.hash_stable(hcx, &mut hasher);
// If this is a function, we hash the signature as well.
// This is not *strictly* needed, but it may help in some
// situations, see the `run-make/a-b-a-linker-guard` test.
if let ty::FnDef(..) = item_type.kind() {
item_type.fn_sig(tcx).hash_stable(hcx, &mut hasher);
}
// If this is a function, we hash the signature as well.
// This is not *strictly* needed, but it may help in some
// situations, see the `run-make/a-b-a-linker-guard` test.
if let ty::FnDef(..) = item_type.kind() {
item_type.fn_sig(tcx).hash_stable(hcx, &mut hasher);
}
// also include any type parameters (for generic items)
substs.hash_stable(hcx, &mut hasher);
// also include any type parameters (for generic items)
substs.hash_stable(hcx, &mut hasher);
if let Some(instantiating_crate) = instantiating_crate {
tcx.def_path_hash(instantiating_crate.as_def_id())
.stable_crate_id()
.hash_stable(hcx, &mut hasher);
}
if let Some(instantiating_crate) = instantiating_crate {
tcx.def_path_hash(instantiating_crate.as_def_id())
.stable_crate_id()
.hash_stable(hcx, &mut hasher);
}
// We want to avoid accidental collision between different types of instances.
// Especially, `VtableShim`s and `ReifyShim`s may overlap with their original
// instances without this.
discriminant(&instance.def).hash_stable(hcx, &mut hasher);
// We want to avoid accidental collision between different types of instances.
// Especially, `VtableShim`s and `ReifyShim`s may overlap with their original
// instances without this.
discriminant(&instance.def).hash_stable(hcx, &mut hasher);
});
});
});
// 64 bits should be enough to avoid collisions.
hasher.finish::<u64>()
// 64 bits should be enough to avoid collisions.
hasher.finish::<u64>()
})
}
// Follow C++ namespace-mangling style, see

View File

@ -53,7 +53,8 @@ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeB
None,
None,
);
let expn_id = LocalExpnId::fresh(expn_data, self.cx.tcx.create_stable_hashing_context());
let expn_id =
self.cx.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
let span = DUMMY_SP.fresh_expansion(expn_id);
let is_empty = rustc_driver::catch_fatal_errors(|| {