Auto merge of #42332 - michaelwoerister:no-more-retracing, r=nikomatsakis
incr.comp.: Use DefPathHash-based DepNodes in the serialized DepGraph and remove obsolete DefIdDirectory With this PR we don't store the dep-graph as a set of `DepNode<IndexIntoDefIdDirectory>` anymore but instead as a set of `DepNode<DefPathHash>`. Since a `DefPathHash` is a global identifier that is valid across compilation sessions, we don't need the `DefIdDirectory` anymore. Since a `DepNode<DefPathHash>` is bigger than a `DepNode<IndexIntoDefIdDirectory>` and our on-disk encoding of the dep-graph is inefficient, this PR will probably increase the amount of space the dep-graph takes up on disk. I'm in the process of gathering some performance data. The changes in here are a step towards implementing ICH-based `DepNodes` (#42294). r? @nikomatsakis
This commit is contained in:
commit
2f2d7413ff
@ -36,7 +36,7 @@
|
||||
pub struct DefPathTable {
|
||||
index_to_key: [Vec<DefKey>; 2],
|
||||
key_to_index: FxHashMap<DefKey, DefIndex>,
|
||||
def_path_hashes: [Vec<Fingerprint>; 2],
|
||||
def_path_hashes: [Vec<DefPathHash>; 2],
|
||||
}
|
||||
|
||||
// Unfortunately we have to provide a manual impl of Clone because of the
|
||||
@ -57,7 +57,7 @@ impl DefPathTable {
|
||||
|
||||
fn allocate(&mut self,
|
||||
key: DefKey,
|
||||
def_path_hash: Fingerprint,
|
||||
def_path_hash: DefPathHash,
|
||||
address_space: DefIndexAddressSpace)
|
||||
-> DefIndex {
|
||||
let index = {
|
||||
@ -81,7 +81,7 @@ pub fn def_key(&self, index: DefIndex) -> DefKey {
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn def_path_hash(&self, index: DefIndex) -> Fingerprint {
|
||||
pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
|
||||
self.def_path_hashes[index.address_space().index()]
|
||||
[index.as_array_index()]
|
||||
}
|
||||
@ -126,6 +126,30 @@ pub fn retrace_path(&self,
|
||||
|
||||
Some(index)
|
||||
}
|
||||
|
||||
pub fn add_def_path_hashes_to(&self,
|
||||
cnum: CrateNum,
|
||||
out: &mut FxHashMap<DefPathHash, DefId>) {
|
||||
for address_space in &[DefIndexAddressSpace::Low, DefIndexAddressSpace::High] {
|
||||
let start_index = address_space.start();
|
||||
out.extend(
|
||||
(&self.def_path_hashes[address_space.index()])
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, &hash)| {
|
||||
let def_id = DefId {
|
||||
krate: cnum,
|
||||
index: DefIndex::new(index + start_index),
|
||||
};
|
||||
(hash, def_id)
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn size(&self) -> usize {
|
||||
self.key_to_index.len()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -148,8 +172,8 @@ fn decode<D: Decoder>(d: &mut D) -> Result<DefPathTable, D::Error> {
|
||||
let index_to_key_lo: Vec<DefKey> = Decodable::decode(d)?;
|
||||
let index_to_key_hi: Vec<DefKey> = Decodable::decode(d)?;
|
||||
|
||||
let def_path_hashes_lo: Vec<Fingerprint> = Decodable::decode(d)?;
|
||||
let def_path_hashes_hi: Vec<Fingerprint> = Decodable::decode(d)?;
|
||||
let def_path_hashes_lo: Vec<DefPathHash> = Decodable::decode(d)?;
|
||||
let def_path_hashes_hi: Vec<DefPathHash> = Decodable::decode(d)?;
|
||||
|
||||
let index_to_key = [index_to_key_lo, index_to_key_hi];
|
||||
let def_path_hashes = [def_path_hashes_lo, def_path_hashes_hi];
|
||||
@ -216,7 +240,7 @@ pub struct DefKey {
|
||||
}
|
||||
|
||||
impl DefKey {
|
||||
fn compute_stable_hash(&self, parent_hash: Fingerprint) -> Fingerprint {
|
||||
fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash {
|
||||
let mut hasher = StableHasher::new();
|
||||
|
||||
// We hash a 0u8 here to disambiguate between regular DefPath hashes,
|
||||
@ -224,17 +248,17 @@ fn compute_stable_hash(&self, parent_hash: Fingerprint) -> Fingerprint {
|
||||
0u8.hash(&mut hasher);
|
||||
parent_hash.hash(&mut hasher);
|
||||
self.disambiguated_data.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
DefPathHash(hasher.finish())
|
||||
}
|
||||
|
||||
fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> Fingerprint {
|
||||
fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash {
|
||||
let mut hasher = StableHasher::new();
|
||||
// Disambiguate this from a regular DefPath hash,
|
||||
// see compute_stable_hash() above.
|
||||
1u8.hash(&mut hasher);
|
||||
crate_name.hash(&mut hasher);
|
||||
crate_disambiguator.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
DefPathHash(hasher.finish())
|
||||
}
|
||||
}
|
||||
|
||||
@ -296,7 +320,9 @@ pub fn to_string(&self, tcx: TyCtxt) -> String {
|
||||
|
||||
s.push_str(&tcx.original_crate_name(self.krate).as_str());
|
||||
s.push_str("/");
|
||||
s.push_str(&tcx.crate_disambiguator(self.krate).as_str());
|
||||
// Don't print the whole crate disambiguator. That's just annoying in
|
||||
// debug output.
|
||||
s.push_str(&tcx.crate_disambiguator(self.krate).as_str()[..7]);
|
||||
|
||||
for component in &self.data {
|
||||
write!(s,
|
||||
@ -372,6 +398,12 @@ pub enum DefPathData {
|
||||
Typeof,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug,
|
||||
RustcEncodable, RustcDecodable)]
|
||||
pub struct DefPathHash(pub Fingerprint);
|
||||
|
||||
impl_stable_hash_for!(tuple_struct DefPathHash { fingerprint });
|
||||
|
||||
impl Definitions {
|
||||
/// Create new empty definition map.
|
||||
pub fn new() -> Definitions {
|
||||
@ -404,7 +436,7 @@ pub fn def_key(&self, index: DefIndex) -> DefKey {
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn def_path_hash(&self, index: DefIndex) -> Fingerprint {
|
||||
pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
|
||||
self.table.def_path_hash(index)
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
use self::collector::NodeCollector;
|
||||
pub use self::def_collector::{DefCollector, MacroInvocationData};
|
||||
pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
|
||||
DisambiguatedDefPathData};
|
||||
DisambiguatedDefPathData, DefPathHash};
|
||||
|
||||
use dep_graph::{DepGraph, DepNode};
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
use hir;
|
||||
use hir::def_id::DefId;
|
||||
use hir::map::DefPathHash;
|
||||
use ich::{self, CachingCodemapView};
|
||||
use session::config::DebugInfoLevel::NoDebugInfo;
|
||||
use ty;
|
||||
@ -115,7 +116,7 @@ pub fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn def_path_hash(&mut self, def_id: DefId) -> ich::Fingerprint {
|
||||
pub fn def_path_hash(&mut self, def_id: DefId) -> DefPathHash {
|
||||
self.tcx.def_path_hash(def_id)
|
||||
}
|
||||
|
||||
|
@ -26,7 +26,8 @@
|
||||
use dep_graph::DepNode;
|
||||
use hir::def_id::{CrateNum, DefId, DefIndex};
|
||||
use hir::map as hir_map;
|
||||
use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData};
|
||||
use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData,
|
||||
DefPathTable};
|
||||
use hir::svh::Svh;
|
||||
use ich;
|
||||
use middle::lang_items;
|
||||
@ -281,7 +282,8 @@ fn retrace_path(&self,
|
||||
-> Option<DefId>;
|
||||
fn def_key(&self, def: DefId) -> DefKey;
|
||||
fn def_path(&self, def: DefId) -> hir_map::DefPath;
|
||||
fn def_path_hash(&self, def: DefId) -> ich::Fingerprint;
|
||||
fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash;
|
||||
fn def_path_table(&self, cnum: CrateNum) -> Rc<DefPathTable>;
|
||||
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>;
|
||||
fn item_children(&self, did: DefId) -> Vec<def::Export>;
|
||||
fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro;
|
||||
@ -412,8 +414,11 @@ fn def_key(&self, def: DefId) -> DefKey { bug!("def_key") }
|
||||
fn def_path(&self, def: DefId) -> hir_map::DefPath {
|
||||
bug!("relative_def_path")
|
||||
}
|
||||
fn def_path_hash(&self, def: DefId) -> ich::Fingerprint {
|
||||
bug!("wa")
|
||||
fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash {
|
||||
bug!("def_path_hash")
|
||||
}
|
||||
fn def_path_table(&self, cnum: CrateNum) -> Rc<DefPathTable> {
|
||||
bug!("def_path_table")
|
||||
}
|
||||
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") }
|
||||
fn item_children(&self, did: DefId) -> Vec<def::Export> { bug!("item_children") }
|
||||
|
@ -18,7 +18,7 @@
|
||||
use hir::def::{Def, ExportMap};
|
||||
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use hir::map as hir_map;
|
||||
use hir::map::DisambiguatedDefPathData;
|
||||
use hir::map::{DisambiguatedDefPathData, DefPathHash};
|
||||
use middle::free_region::FreeRegionMap;
|
||||
use middle::lang_items;
|
||||
use middle::resolve_lifetime;
|
||||
@ -461,6 +461,10 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
pub hir: hir_map::Map<'tcx>,
|
||||
|
||||
/// A map from DefPathHash -> DefId. Includes DefIds from the local crate
|
||||
/// as well as all upstream crates. Only populated in incremental mode.
|
||||
pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
|
||||
|
||||
pub maps: maps::Maps<'tcx>,
|
||||
|
||||
pub mir_passes: Rc<Passes>,
|
||||
@ -686,6 +690,40 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
|
||||
let max_cnum = s.cstore.crates().iter().map(|c| c.as_usize()).max().unwrap_or(0);
|
||||
let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
|
||||
providers[LOCAL_CRATE] = local_providers;
|
||||
|
||||
let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
|
||||
let upstream_def_path_tables: Vec<(CrateNum, Rc<_>)> = s
|
||||
.cstore
|
||||
.crates()
|
||||
.iter()
|
||||
.map(|&cnum| (cnum, s.cstore.def_path_table(cnum)))
|
||||
.collect();
|
||||
|
||||
let def_path_tables = || {
|
||||
upstream_def_path_tables
|
||||
.iter()
|
||||
.map(|&(cnum, ref rc)| (cnum, &**rc))
|
||||
.chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
|
||||
};
|
||||
|
||||
// Precompute the capacity of the hashmap so we don't have to
|
||||
// re-allocate when populating it.
|
||||
let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
|
||||
|
||||
let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
|
||||
capacity,
|
||||
::std::default::Default::default()
|
||||
);
|
||||
|
||||
for (cnum, def_path_table) in def_path_tables() {
|
||||
def_path_table.add_def_path_hashes_to(cnum, &mut map);
|
||||
}
|
||||
|
||||
Some(map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
tls::enter_global(GlobalCtxt {
|
||||
sess: s,
|
||||
trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
|
||||
@ -699,6 +737,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
|
||||
export_map: resolutions.export_map,
|
||||
fulfilled_predicates: RefCell::new(fulfilled_predicates),
|
||||
hir: hir,
|
||||
def_path_hash_to_def_id: def_path_hash_to_def_id,
|
||||
maps: maps::Maps::new(providers),
|
||||
mir_passes,
|
||||
freevars: RefCell::new(resolutions.freevars),
|
||||
|
@ -19,7 +19,7 @@
|
||||
use hir::{map as hir_map, FreevarMap, TraitMap};
|
||||
use hir::def::{Def, CtorKind, ExportMap};
|
||||
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
use ich::{self, StableHashingContext};
|
||||
use ich::StableHashingContext;
|
||||
use middle::const_val::ConstVal;
|
||||
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
|
||||
use middle::privacy::AccessLevels;
|
||||
@ -2167,7 +2167,7 @@ pub fn def_path(self, id: DefId) -> hir_map::DefPath {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn def_path_hash(self, def_id: DefId) -> ich::Fingerprint {
|
||||
pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
|
||||
if def_id.is_local() {
|
||||
self.hir.definitions().def_path_hash(def_id.index)
|
||||
} else {
|
||||
|
@ -11,6 +11,7 @@
|
||||
//! This module contains TypeVariants and its major components
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use hir::map::DefPathHash;
|
||||
|
||||
use middle::region;
|
||||
use ty::subst::Substs;
|
||||
@ -29,7 +30,6 @@
|
||||
use serialize;
|
||||
|
||||
use hir;
|
||||
use ich;
|
||||
|
||||
use self::InferTy::*;
|
||||
use self::TypeVariants::*;
|
||||
@ -873,7 +873,7 @@ pub fn item_name(&self) -> Name {
|
||||
self.item_name // safe to skip the binder to access a name
|
||||
}
|
||||
|
||||
pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (ich::Fingerprint, InternedString) {
|
||||
pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (DefPathHash, InternedString) {
|
||||
// We want something here that is stable across crate boundaries.
|
||||
// The DefId isn't but the `deterministic_hash` of the corresponding
|
||||
// DefPath is.
|
||||
@ -908,7 +908,7 @@ pub fn item_name(&self) -> Name {
|
||||
self.skip_binder().item_name()
|
||||
}
|
||||
|
||||
pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (ich::Fingerprint, InternedString) {
|
||||
pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (DefPathHash, InternedString) {
|
||||
self.skip_binder().sort_key(tcx)
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use ich::Fingerprint;
|
||||
use hir::map::DefPathHash;
|
||||
use traits::specialization_graph;
|
||||
use ty::fast_reject;
|
||||
use ty::fold::TypeFoldable;
|
||||
@ -33,7 +33,7 @@ pub struct TraitDef {
|
||||
|
||||
/// The ICH of this trait's DefPath, cached here so it doesn't have to be
|
||||
/// recomputed all the time.
|
||||
pub def_path_hash: Fingerprint,
|
||||
pub def_path_hash: DefPathHash,
|
||||
}
|
||||
|
||||
// We don't store the list of impls in a flat list because each cached list of
|
||||
@ -95,7 +95,7 @@ pub fn new(def_id: DefId,
|
||||
unsafety: hir::Unsafety,
|
||||
paren_sugar: bool,
|
||||
has_default_impl: bool,
|
||||
def_path_hash: Fingerprint)
|
||||
def_path_hash: DefPathHash)
|
||||
-> TraitDef {
|
||||
TraitDef {
|
||||
def_id,
|
||||
|
@ -32,6 +32,7 @@
|
||||
use rustc::dep_graph::DepNode;
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
|
||||
use rustc::hir::map::DefPathHash;
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use rustc::ich::{Fingerprint, StableHashingContext};
|
||||
use rustc::ty::TyCtxt;
|
||||
@ -218,7 +219,7 @@ fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
|
||||
{
|
||||
let tcx = self.hcx.tcx();
|
||||
|
||||
let mut impls: Vec<(Fingerprint, Fingerprint)> = krate
|
||||
let mut impls: Vec<(DefPathHash, Fingerprint)> = krate
|
||||
.trait_impls
|
||||
.iter()
|
||||
.map(|(&trait_id, impls)| {
|
||||
|
@ -12,22 +12,30 @@
|
||||
|
||||
use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId};
|
||||
use rustc::hir::def_id::DefIndex;
|
||||
use rustc::hir::map::DefPathHash;
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::middle::cstore::EncodedMetadataHash;
|
||||
use std::sync::Arc;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
||||
use super::directory::DefPathIndex;
|
||||
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
|
||||
|
||||
/// Data for use when recompiling the **current crate**.
|
||||
#[derive(Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct SerializedDepGraph {
|
||||
pub edges: Vec<SerializedEdgeSet>,
|
||||
/// The set of all DepNodes in the graph
|
||||
pub nodes: IndexVec<DepNodeIndex, DepNode<DefPathHash>>,
|
||||
/// For each DepNode, stores the list of edges originating from that
|
||||
/// DepNode. Encoded as a [start, end) pair indexing into edge_list_data,
|
||||
/// which holds the actual DepNodeIndices of the target nodes.
|
||||
pub edge_list_indices: Vec<(u32, u32)>,
|
||||
/// A flattened list of all edge targets in the graph. Edge sources are
|
||||
/// implicit in edge_list_indices.
|
||||
pub edge_list_data: Vec<DepNodeIndex>,
|
||||
|
||||
/// These are output nodes that have no incoming edges. We track
|
||||
/// these separately so that when we reload all edges, we don't
|
||||
/// lose track of these nodes.
|
||||
pub bootstrap_outputs: Vec<DepNode<DefPathIndex>>,
|
||||
pub bootstrap_outputs: Vec<DepNode<DefPathHash>>,
|
||||
|
||||
/// These are hashes of two things:
|
||||
/// - the HIR nodes in this crate
|
||||
@ -51,18 +59,36 @@ pub struct SerializedDepGraph {
|
||||
pub hashes: Vec<SerializedHash>,
|
||||
}
|
||||
|
||||
/// Represents a set of "reduced" dependency edge. We group the
|
||||
/// outgoing edges from a single source together.
|
||||
#[derive(Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct SerializedEdgeSet {
|
||||
pub source: DepNode<DefPathIndex>,
|
||||
pub targets: Vec<DepNode<DefPathIndex>>
|
||||
/// The index of a DepNode in the SerializedDepGraph::nodes array.
|
||||
#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug,
|
||||
RustcEncodable, RustcDecodable)]
|
||||
pub struct DepNodeIndex(pub u32);
|
||||
|
||||
impl DepNodeIndex {
|
||||
#[inline]
|
||||
pub fn new(idx: usize) -> DepNodeIndex {
|
||||
assert!(idx <= ::std::u32::MAX as usize);
|
||||
DepNodeIndex(idx as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl Idx for DepNodeIndex {
|
||||
#[inline]
|
||||
fn new(idx: usize) -> Self {
|
||||
assert!(idx <= ::std::u32::MAX as usize);
|
||||
DepNodeIndex(idx as u32)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn index(self) -> usize {
|
||||
self.0 as usize
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct SerializedHash {
|
||||
/// def-id of thing being hashed
|
||||
pub dep_node: DepNode<DefPathIndex>,
|
||||
pub dep_node: DepNode<DefPathHash>,
|
||||
|
||||
/// the hash as of previous compilation, computed by code in
|
||||
/// `hash` module
|
||||
@ -115,5 +141,5 @@ pub struct SerializedMetadataHashes {
|
||||
/// is only populated if -Z query-dep-graph is specified. It will be
|
||||
/// empty otherwise. Importing crates are perfectly happy with just having
|
||||
/// the DefIndex.
|
||||
pub index_map: FxHashMap<DefIndex, DefPathIndex>
|
||||
pub index_map: FxHashMap<DefIndex, DefPathHash>
|
||||
}
|
||||
|
@ -1,204 +0,0 @@
|
||||
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Code to convert a DefId into a DefPath (when serializing) and then
|
||||
//! back again (when deserializing). Note that the new DefId
|
||||
//! necessarily will not be the same as the old (and of course the
|
||||
//! item might even be removed in the meantime).
|
||||
|
||||
use rustc::dep_graph::DepNode;
|
||||
use rustc::hir::map::DefPath;
|
||||
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::util::nodemap::DefIdMap;
|
||||
use std::fmt::{self, Debug};
|
||||
use std::iter::once;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Index into the DefIdDirectory
|
||||
#[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
|
||||
RustcEncodable, RustcDecodable)]
|
||||
pub struct DefPathIndex {
|
||||
index: u32
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct DefIdDirectory {
|
||||
// N.B. don't use Removable here because these def-ids are loaded
|
||||
// directly without remapping, so loading them should not fail.
|
||||
paths: Vec<DefPath>,
|
||||
|
||||
// For each crate, saves the crate-name/disambiguator so that
|
||||
// later we can match crate-numbers up again.
|
||||
krates: Vec<CrateInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct CrateInfo {
|
||||
krate: CrateNum,
|
||||
name: String,
|
||||
disambiguator: String,
|
||||
}
|
||||
|
||||
impl DefIdDirectory {
|
||||
pub fn new(krates: Vec<CrateInfo>) -> DefIdDirectory {
|
||||
DefIdDirectory { paths: vec![], krates: krates }
|
||||
}
|
||||
|
||||
fn max_current_crate(&self, tcx: TyCtxt) -> CrateNum {
|
||||
tcx.sess.cstore.crates()
|
||||
.into_iter()
|
||||
.max()
|
||||
.unwrap_or(LOCAL_CRATE)
|
||||
}
|
||||
|
||||
/// Returns a string form for `index`; useful for debugging
|
||||
pub fn def_path_string(&self, tcx: TyCtxt, index: DefPathIndex) -> String {
|
||||
let path = &self.paths[index.index as usize];
|
||||
if self.krate_still_valid(tcx, self.max_current_crate(tcx), path.krate) {
|
||||
path.to_string(tcx)
|
||||
} else {
|
||||
format!("<crate {} changed>", path.krate)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn krate_still_valid(&self,
|
||||
tcx: TyCtxt,
|
||||
max_current_crate: CrateNum,
|
||||
krate: CrateNum) -> bool {
|
||||
// Check that the crate-number still matches. For now, if it
|
||||
// doesn't, just return None. We could do better, such as
|
||||
// finding the new number.
|
||||
|
||||
if krate > max_current_crate {
|
||||
false
|
||||
} else {
|
||||
let old_info = &self.krates[krate.as_usize()];
|
||||
assert_eq!(old_info.krate, krate);
|
||||
let old_name: &str = &old_info.name;
|
||||
let old_disambiguator: &str = &old_info.disambiguator;
|
||||
let new_name: &str = &tcx.crate_name(krate).as_str();
|
||||
let new_disambiguator: &str = &tcx.crate_disambiguator(krate).as_str();
|
||||
old_name == new_name && old_disambiguator == new_disambiguator
|
||||
}
|
||||
}
|
||||
|
||||
pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory {
|
||||
|
||||
fn make_key(name: &str, disambiguator: &str) -> String {
|
||||
format!("{}/{}", name, disambiguator)
|
||||
}
|
||||
|
||||
let new_krates: HashMap<_, _> =
|
||||
once(LOCAL_CRATE)
|
||||
.chain(tcx.sess.cstore.crates())
|
||||
.map(|krate| (make_key(&tcx.crate_name(krate).as_str(),
|
||||
&tcx.crate_disambiguator(krate).as_str()), krate))
|
||||
.collect();
|
||||
|
||||
let ids = self.paths.iter()
|
||||
.map(|path| {
|
||||
let old_krate_id = path.krate.as_usize();
|
||||
assert!(old_krate_id < self.krates.len());
|
||||
let old_crate_info = &self.krates[old_krate_id];
|
||||
let old_crate_key = make_key(&old_crate_info.name,
|
||||
&old_crate_info.disambiguator);
|
||||
if let Some(&new_crate_key) = new_krates.get(&old_crate_key) {
|
||||
tcx.retrace_path(new_crate_key, &path.data)
|
||||
} else {
|
||||
debug!("crate {:?} no longer exists", old_crate_key);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
RetracedDefIdDirectory { ids: ids }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct RetracedDefIdDirectory {
|
||||
ids: Vec<Option<DefId>>
|
||||
}
|
||||
|
||||
impl RetracedDefIdDirectory {
|
||||
pub fn def_id(&self, index: DefPathIndex) -> Option<DefId> {
|
||||
self.ids[index.index as usize]
|
||||
}
|
||||
|
||||
pub fn map(&self, node: &DepNode<DefPathIndex>) -> Option<DepNode<DefId>> {
|
||||
node.map_def(|&index| self.def_id(index))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DefIdDirectoryBuilder<'a,'tcx:'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
hash: DefIdMap<DefPathIndex>,
|
||||
directory: DefIdDirectory,
|
||||
}
|
||||
|
||||
impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> {
|
||||
let mut krates: Vec<_> =
|
||||
once(LOCAL_CRATE)
|
||||
.chain(tcx.sess.cstore.crates())
|
||||
.map(|krate| {
|
||||
CrateInfo {
|
||||
krate: krate,
|
||||
name: tcx.crate_name(krate).to_string(),
|
||||
disambiguator: tcx.crate_disambiguator(krate).to_string()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// the result of crates() is not in order, so sort list of
|
||||
// crates so that we can just index it later
|
||||
krates.sort_by_key(|k| k.krate);
|
||||
|
||||
DefIdDirectoryBuilder {
|
||||
tcx: tcx,
|
||||
hash: DefIdMap(),
|
||||
directory: DefIdDirectory::new(krates),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
pub fn add(&mut self, def_id: DefId) -> DefPathIndex {
|
||||
debug!("DefIdDirectoryBuilder: def_id={:?}", def_id);
|
||||
let tcx = self.tcx;
|
||||
let paths = &mut self.directory.paths;
|
||||
self.hash.entry(def_id)
|
||||
.or_insert_with(|| {
|
||||
let def_path = tcx.def_path(def_id);
|
||||
let index = paths.len() as u32;
|
||||
paths.push(def_path);
|
||||
DefPathIndex { index: index }
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
|
||||
node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
|
||||
}
|
||||
|
||||
pub fn directory(&self) -> &DefIdDirectory {
|
||||
&self.directory
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for DefIdDirectory {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fmt.debug_list()
|
||||
.entries(self.paths.iter().enumerate())
|
||||
.finish()
|
||||
}
|
||||
}
|
@ -40,7 +40,6 @@
|
||||
//! previous revision to compare things to.
|
||||
//!
|
||||
|
||||
use super::directory::RetracedDefIdDirectory;
|
||||
use super::load::DirtyNodes;
|
||||
use rustc::dep_graph::{DepGraphQuery, DepNode};
|
||||
use rustc::hir;
|
||||
@ -58,18 +57,23 @@
|
||||
const CFG: &'static str = "cfg";
|
||||
|
||||
pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
dirty_inputs: &DirtyNodes,
|
||||
retraced: &RetracedDefIdDirectory) {
|
||||
dirty_inputs: &DirtyNodes) {
|
||||
// can't add `#[rustc_dirty]` etc without opting in to this feature
|
||||
if !tcx.sess.features.borrow().rustc_attrs {
|
||||
return;
|
||||
}
|
||||
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
|
||||
let dirty_inputs: FxHashSet<DepNode<DefId>> =
|
||||
dirty_inputs.keys()
|
||||
.filter_map(|d| retraced.map(d))
|
||||
.filter_map(|dep_node| {
|
||||
dep_node.map_def(|def_path_hash| {
|
||||
def_path_hash_to_def_id.get(def_path_hash).cloned()
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let query = tcx.dep_graph.query();
|
||||
debug!("query-nodes: {:?}", query.nodes());
|
||||
let krate = tcx.hir.krate();
|
||||
|
@ -12,6 +12,7 @@
|
||||
|
||||
use rustc::dep_graph::{DepNode, WorkProductId};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::map::DefPathHash;
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::session::Session;
|
||||
@ -19,12 +20,12 @@
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_serialize::Decodable as RustcDecodable;
|
||||
use rustc_serialize::opaque::Decoder;
|
||||
use std::default::Default;
|
||||
use std::path::{Path};
|
||||
use std::sync::Arc;
|
||||
|
||||
use IncrementalHashesMap;
|
||||
use super::data::*;
|
||||
use super::directory::*;
|
||||
use super::dirty_clean;
|
||||
use super::hash::*;
|
||||
use super::fs::*;
|
||||
@ -33,7 +34,7 @@
|
||||
|
||||
// The key is a dirty node. The value is **some** base-input that we
|
||||
// can blame it on.
|
||||
pub type DirtyNodes = FxHashMap<DepNode<DefPathIndex>, DepNode<DefPathIndex>>;
|
||||
pub type DirtyNodes = FxHashMap<DepNode<DefPathHash>, DepNode<DefPathHash>>;
|
||||
|
||||
/// If we are in incremental mode, and a previous dep-graph exists,
|
||||
/// then load up those nodes/edges that are still valid into the
|
||||
@ -118,6 +119,16 @@ fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Try to convert a DepNode from the old dep-graph into a DepNode in the
|
||||
/// current graph by mapping the DefPathHash to a valid DefId. This will fail
|
||||
/// if the DefPathHash refers to something that has been removed (because
|
||||
/// there is no DefId for that thing anymore).
|
||||
fn retrace(tcx: TyCtxt, dep_node: &DepNode<DefPathHash>) -> Option<DepNode<DefId>> {
|
||||
dep_node.map_def(|def_path_hash| {
|
||||
tcx.def_path_hash_to_def_id.as_ref().unwrap().get(def_path_hash).cloned()
|
||||
})
|
||||
}
|
||||
|
||||
/// Decode the dep graph and load the edges/nodes that are still clean
|
||||
/// into `tcx.dep_graph`.
|
||||
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
@ -149,16 +160,25 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let directory = DefIdDirectory::decode(&mut dep_graph_decoder)?;
|
||||
let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?;
|
||||
|
||||
let edge_map: FxHashMap<_, _> = serialized_dep_graph.edges
|
||||
.into_iter()
|
||||
.map(|s| (s.source, s.targets))
|
||||
.collect();
|
||||
let edge_map: FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>> = {
|
||||
let capacity = serialized_dep_graph.edge_list_data.len();
|
||||
let mut edge_map = FxHashMap::with_capacity_and_hasher(capacity, Default::default());
|
||||
|
||||
// Retrace the paths in the directory to find their current location (if any).
|
||||
let retraced = directory.retrace(tcx);
|
||||
for (node_index, source) in serialized_dep_graph.nodes.iter().enumerate() {
|
||||
let (start, end) = serialized_dep_graph.edge_list_indices[node_index];
|
||||
let targets =
|
||||
(&serialized_dep_graph.edge_list_data[start as usize .. end as usize])
|
||||
.into_iter()
|
||||
.map(|&node_index| serialized_dep_graph.nodes[node_index].clone())
|
||||
.collect();
|
||||
|
||||
edge_map.insert(source.clone(), targets);
|
||||
}
|
||||
|
||||
edge_map
|
||||
};
|
||||
|
||||
// Compute the set of nodes from the old graph where some input
|
||||
// has changed or been removed. These are "raw" source nodes,
|
||||
@ -169,8 +189,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
// the current compilation).
|
||||
let dirty_raw_nodes = initial_dirty_nodes(tcx,
|
||||
incremental_hashes_map,
|
||||
&serialized_dep_graph.hashes,
|
||||
&retraced);
|
||||
&serialized_dep_graph.hashes);
|
||||
let dirty_raw_nodes = transitive_dirty_nodes(&edge_map, dirty_raw_nodes);
|
||||
|
||||
// Recreate the edges in the graph that are still clean.
|
||||
@ -179,7 +198,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let mut extra_edges = vec![];
|
||||
for (source, targets) in &edge_map {
|
||||
for target in targets {
|
||||
process_edges(tcx, source, target, &edge_map, &directory, &retraced, &dirty_raw_nodes,
|
||||
process_edges(tcx, source, target, &edge_map, &dirty_raw_nodes,
|
||||
&mut clean_work_products, &mut dirty_work_products, &mut extra_edges);
|
||||
}
|
||||
}
|
||||
@ -187,7 +206,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
// Recreate bootstrap outputs, which are outputs that have no incoming edges (and hence cannot
|
||||
// be dirty).
|
||||
for bootstrap_output in &serialized_dep_graph.bootstrap_outputs {
|
||||
if let Some(n) = retraced.map(bootstrap_output) {
|
||||
if let Some(n) = retrace(tcx, bootstrap_output) {
|
||||
if let DepNode::WorkProduct(ref wp) = n {
|
||||
clean_work_products.insert(wp.clone());
|
||||
}
|
||||
@ -214,7 +233,7 @@ fn create_node((): (), (): ()) {
|
||||
// the edge from `Hir(X)` to `Bar` (or, if `Bar` itself cannot be
|
||||
// recreated, to the targets of `Bar`).
|
||||
while let Some((source, target)) = extra_edges.pop() {
|
||||
process_edges(tcx, source, target, &edge_map, &directory, &retraced, &dirty_raw_nodes,
|
||||
process_edges(tcx, source, target, &edge_map, &dirty_raw_nodes,
|
||||
&mut clean_work_products, &mut dirty_work_products, &mut extra_edges);
|
||||
}
|
||||
|
||||
@ -222,10 +241,9 @@ fn create_node((): (), (): ()) {
|
||||
// dirty.
|
||||
reconcile_work_products(tcx, work_products, &clean_work_products);
|
||||
|
||||
dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_nodes, &retraced);
|
||||
dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_nodes);
|
||||
|
||||
load_prev_metadata_hashes(tcx,
|
||||
&retraced,
|
||||
&mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
|
||||
Ok(())
|
||||
}
|
||||
@ -234,8 +252,7 @@ fn create_node((): (), (): ()) {
|
||||
/// a bit vector where the index is the DefPathIndex.
|
||||
fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: &IncrementalHashesMap,
|
||||
serialized_hashes: &[SerializedHash],
|
||||
retraced: &RetracedDefIdDirectory)
|
||||
serialized_hashes: &[SerializedHash])
|
||||
-> DirtyNodes {
|
||||
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
|
||||
let mut dirty_nodes = FxHashMap();
|
||||
@ -249,7 +266,7 @@ fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
};
|
||||
|
||||
for hash in serialized_hashes {
|
||||
if let Some(dep_node) = retraced.map(&hash.dep_node) {
|
||||
if let Some(dep_node) = retrace(tcx, &hash.dep_node) {
|
||||
if let Some(current_hash) = hcx.hash(&dep_node) {
|
||||
if current_hash == hash.hash {
|
||||
debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
|
||||
@ -282,11 +299,11 @@ fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
dirty_nodes
|
||||
}
|
||||
|
||||
fn transitive_dirty_nodes(edge_map: &FxHashMap<DepNode<DefPathIndex>, Vec<DepNode<DefPathIndex>>>,
|
||||
fn transitive_dirty_nodes(edge_map: &FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>>,
|
||||
mut dirty_nodes: DirtyNodes)
|
||||
-> DirtyNodes
|
||||
{
|
||||
let mut stack: Vec<(DepNode<DefPathIndex>, DepNode<DefPathIndex>)> = vec![];
|
||||
let mut stack: Vec<(DepNode<DefPathHash>, DepNode<DefPathHash>)> = vec![];
|
||||
stack.extend(dirty_nodes.iter().map(|(s, b)| (s.clone(), b.clone())));
|
||||
while let Some((source, blame)) = stack.pop() {
|
||||
// we know the source is dirty (because of the node `blame`)...
|
||||
@ -348,7 +365,6 @@ fn delete_dirty_work_product(tcx: TyCtxt,
|
||||
}
|
||||
|
||||
fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
retraced: &RetracedDefIdDirectory,
|
||||
output: &mut FxHashMap<DefId, Fingerprint>) {
|
||||
if !tcx.sess.opts.debugging_opts.query_dep_graph {
|
||||
return
|
||||
@ -388,9 +404,11 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
debug!("load_prev_metadata_hashes() - Mapping DefIds");
|
||||
|
||||
assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len());
|
||||
let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
|
||||
|
||||
for serialized_hash in serialized_hashes.entry_hashes {
|
||||
let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
|
||||
if let Some(def_id) = retraced.def_id(def_path_index) {
|
||||
let def_path_hash = serialized_hashes.index_map[&serialized_hash.def_index];
|
||||
if let Some(&def_id) = def_path_hash_to_def_id.get(&def_path_hash) {
|
||||
let old = output.insert(def_id, serialized_hash.hash);
|
||||
assert!(old.is_none(), "already have hash for {:?}", def_id);
|
||||
}
|
||||
@ -402,15 +420,13 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
|
||||
fn process_edges<'a, 'tcx, 'edges>(
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
source: &'edges DepNode<DefPathIndex>,
|
||||
target: &'edges DepNode<DefPathIndex>,
|
||||
edges: &'edges FxHashMap<DepNode<DefPathIndex>, Vec<DepNode<DefPathIndex>>>,
|
||||
directory: &DefIdDirectory,
|
||||
retraced: &RetracedDefIdDirectory,
|
||||
source: &'edges DepNode<DefPathHash>,
|
||||
target: &'edges DepNode<DefPathHash>,
|
||||
edges: &'edges FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>>,
|
||||
dirty_raw_nodes: &DirtyNodes,
|
||||
clean_work_products: &mut FxHashSet<Arc<WorkProductId>>,
|
||||
dirty_work_products: &mut FxHashSet<Arc<WorkProductId>>,
|
||||
extra_edges: &mut Vec<(&'edges DepNode<DefPathIndex>, &'edges DepNode<DefPathIndex>)>)
|
||||
extra_edges: &mut Vec<(&'edges DepNode<DefPathHash>, &'edges DepNode<DefPathHash>)>)
|
||||
{
|
||||
// If the target is dirty, skip the edge. If this is an edge
|
||||
// that targets a work-product, we can print the blame
|
||||
@ -419,14 +435,21 @@ fn process_edges<'a, 'tcx, 'edges>(
|
||||
if let DepNode::WorkProduct(ref wp) = *target {
|
||||
if tcx.sess.opts.debugging_opts.incremental_info {
|
||||
if dirty_work_products.insert(wp.clone()) {
|
||||
// It'd be nice to pretty-print these paths better than just
|
||||
// using the `Debug` impls, but wev.
|
||||
// Try to reconstruct the human-readable version of the
|
||||
// DepNode. This cannot be done for things that where
|
||||
// removed.
|
||||
let readable_blame = if let Some(dep_node) = retrace(tcx, blame) {
|
||||
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id).to_string(tcx)))
|
||||
.unwrap()
|
||||
} else {
|
||||
blame.map_def(|def_path_hash| Some(format!("{:?}", def_path_hash)))
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
println!("incremental: module {:?} is dirty because {:?} \
|
||||
changed or was removed",
|
||||
wp,
|
||||
blame.map_def(|&index| {
|
||||
Some(directory.def_path_string(tcx, index))
|
||||
}).unwrap());
|
||||
readable_blame);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -439,8 +462,8 @@ fn process_edges<'a, 'tcx, 'edges>(
|
||||
// Retrace the source -> target edges to def-ids and then create
|
||||
// an edge in the graph. Retracing may yield none if some of the
|
||||
// data happens to have been removed.
|
||||
if let Some(source_node) = retraced.map(source) {
|
||||
if let Some(target_node) = retraced.map(target) {
|
||||
if let Some(source_node) = retrace(tcx, source) {
|
||||
if let Some(target_node) = retrace(tcx, target) {
|
||||
let _task = tcx.dep_graph.in_task(target_node);
|
||||
tcx.dep_graph.read(source_node);
|
||||
if let DepNode::WorkProduct(ref wp) = *target {
|
||||
|
@ -13,7 +13,6 @@
|
||||
//! various HIR nodes.
|
||||
|
||||
mod data;
|
||||
mod directory;
|
||||
mod dirty_clean;
|
||||
mod fs;
|
||||
mod hash;
|
||||
|
@ -11,11 +11,14 @@
|
||||
use rustc::dep_graph::DepNode;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc::hir::map::DefPathHash;
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::middle::cstore::EncodedMetadataHashes;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
use rustc_serialize::Encodable as RustcEncodable;
|
||||
use rustc_serialize::opaque::Encoder;
|
||||
use std::io::{self, Cursor, Write};
|
||||
@ -24,7 +27,6 @@
|
||||
|
||||
use IncrementalHashesMap;
|
||||
use super::data::*;
|
||||
use super::directory::*;
|
||||
use super::hash::*;
|
||||
use super::preds::*;
|
||||
use super::fs::*;
|
||||
@ -43,7 +45,6 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
return;
|
||||
}
|
||||
|
||||
let mut builder = DefIdDirectoryBuilder::new(tcx);
|
||||
let query = tcx.dep_graph.query();
|
||||
|
||||
if tcx.sess.opts.debugging_opts.incremental_info {
|
||||
@ -65,14 +66,13 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|e| encode_metadata_hashes(tcx,
|
||||
svh,
|
||||
metadata_hashes,
|
||||
&mut builder,
|
||||
&mut current_metadata_hashes,
|
||||
e));
|
||||
}
|
||||
|
||||
save_in(sess,
|
||||
dep_graph_path(sess),
|
||||
|e| encode_dep_graph(&preds, &mut builder, e));
|
||||
|e| encode_dep_graph(tcx, &preds, e));
|
||||
|
||||
let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();
|
||||
dirty_clean::check_dirty_clean_metadata(tcx,
|
||||
@ -167,81 +167,91 @@ fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_dep_graph(preds: &Predecessors,
|
||||
builder: &mut DefIdDirectoryBuilder,
|
||||
pub fn encode_dep_graph(tcx: TyCtxt,
|
||||
preds: &Predecessors,
|
||||
encoder: &mut Encoder)
|
||||
-> io::Result<()> {
|
||||
// First encode the commandline arguments hash
|
||||
let tcx = builder.tcx();
|
||||
tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
|
||||
|
||||
// Create a flat list of (Input, WorkProduct) edges for
|
||||
// serialization.
|
||||
let mut edges = FxHashMap();
|
||||
for edge in preds.reduced_graph.all_edges() {
|
||||
let source = *preds.reduced_graph.node_data(edge.source());
|
||||
let target = *preds.reduced_graph.node_data(edge.target());
|
||||
match *target {
|
||||
DepNode::MetaData(ref def_id) => {
|
||||
// Metadata *targets* are always local metadata nodes. We have
|
||||
// already handled those in `encode_metadata_hashes`.
|
||||
assert!(def_id.is_local());
|
||||
continue;
|
||||
}
|
||||
_ => (),
|
||||
let to_hash_based_node = |dep_node: &DepNode<DefId>| {
|
||||
dep_node.map_def(|&def_id| Some(tcx.def_path_hash(def_id))).unwrap()
|
||||
};
|
||||
|
||||
// NB: We rely on this Vec being indexable by reduced_graph's NodeIndex.
|
||||
let nodes: IndexVec<DepNodeIndex, DepNode<DefPathHash>> = preds
|
||||
.reduced_graph
|
||||
.all_nodes()
|
||||
.iter()
|
||||
.map(|node| to_hash_based_node(node.data))
|
||||
.collect();
|
||||
|
||||
let mut edge_list_indices = Vec::with_capacity(nodes.len());
|
||||
let mut edge_list_data = Vec::with_capacity(preds.reduced_graph.len_edges());
|
||||
|
||||
for node_index in 0 .. nodes.len() {
|
||||
let start = edge_list_data.len() as u32;
|
||||
|
||||
for target in preds.reduced_graph.successor_nodes(graph::NodeIndex(node_index)) {
|
||||
edge_list_data.push(DepNodeIndex::new(target.node_id()));
|
||||
}
|
||||
debug!("serialize edge: {:?} -> {:?}", source, target);
|
||||
let source = builder.map(source);
|
||||
let target = builder.map(target);
|
||||
edges.entry(source).or_insert(vec![]).push(target);
|
||||
|
||||
let end = edge_list_data.len() as u32;
|
||||
debug_assert_eq!(node_index, edge_list_indices.len());
|
||||
edge_list_indices.push((start, end));
|
||||
}
|
||||
|
||||
// Let's make we had no overflow there.
|
||||
assert!(edge_list_data.len() <= ::std::u32::MAX as usize);
|
||||
// Check that we have a consistent number of edges.
|
||||
assert_eq!(edge_list_data.len(), preds.reduced_graph.len_edges());
|
||||
|
||||
let bootstrap_outputs = preds
|
||||
.bootstrap_outputs
|
||||
.iter()
|
||||
.map(|n| to_hash_based_node(n))
|
||||
.collect();
|
||||
|
||||
let hashes = preds
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|(&dep_node, &hash)| {
|
||||
SerializedHash {
|
||||
dep_node: to_hash_based_node(dep_node),
|
||||
hash: hash,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let graph = SerializedDepGraph {
|
||||
nodes,
|
||||
edge_list_indices,
|
||||
edge_list_data,
|
||||
bootstrap_outputs,
|
||||
hashes,
|
||||
};
|
||||
|
||||
// Encode the graph data.
|
||||
graph.encode(encoder)?;
|
||||
|
||||
if tcx.sess.opts.debugging_opts.incremental_info {
|
||||
println!("incremental: {} nodes in reduced dep-graph", graph.nodes.len());
|
||||
println!("incremental: {} edges in serialized dep-graph", graph.edge_list_data.len());
|
||||
println!("incremental: {} hashes in serialized dep-graph", graph.hashes.len());
|
||||
}
|
||||
|
||||
if tcx.sess.opts.debugging_opts.incremental_dump_hash {
|
||||
for (dep_node, hash) in &preds.hashes {
|
||||
println!("HIR hash for {:?} is {}", dep_node, hash);
|
||||
println!("ICH for {:?} is {}", dep_node, hash);
|
||||
}
|
||||
}
|
||||
|
||||
// Create the serialized dep-graph.
|
||||
let bootstrap_outputs = preds.bootstrap_outputs.iter()
|
||||
.map(|n| builder.map(n))
|
||||
.collect();
|
||||
let edges = edges.into_iter()
|
||||
.map(|(k, v)| SerializedEdgeSet { source: k, targets: v })
|
||||
.collect();
|
||||
let graph = SerializedDepGraph {
|
||||
bootstrap_outputs,
|
||||
edges,
|
||||
hashes: preds.hashes
|
||||
.iter()
|
||||
.map(|(&dep_node, &hash)| {
|
||||
SerializedHash {
|
||||
dep_node: builder.map(dep_node),
|
||||
hash: hash,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
if tcx.sess.opts.debugging_opts.incremental_info {
|
||||
println!("incremental: {} nodes in reduced dep-graph", preds.reduced_graph.len_nodes());
|
||||
println!("incremental: {} edges in serialized dep-graph", graph.edges.len());
|
||||
println!("incremental: {} hashes in serialized dep-graph", graph.hashes.len());
|
||||
}
|
||||
|
||||
debug!("graph = {:#?}", graph);
|
||||
|
||||
// Encode the directory and then the graph data.
|
||||
builder.directory().encode(encoder)?;
|
||||
graph.encode(encoder)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn encode_metadata_hashes(tcx: TyCtxt,
|
||||
svh: Svh,
|
||||
metadata_hashes: &EncodedMetadataHashes,
|
||||
builder: &mut DefIdDirectoryBuilder,
|
||||
current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
|
||||
encoder: &mut Encoder)
|
||||
-> io::Result<()> {
|
||||
@ -256,8 +266,8 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
|
||||
let def_id = DefId::local(serialized_hash.def_index);
|
||||
|
||||
// Store entry in the index_map
|
||||
let def_path_index = builder.add(def_id);
|
||||
serialized_hashes.index_map.insert(def_id.index, def_path_index);
|
||||
let def_path_hash = tcx.def_path_hash(def_id);
|
||||
serialized_hashes.index_map.insert(def_id.index, def_path_hash);
|
||||
|
||||
// Record hash in current_metadata_hashes
|
||||
current_metadata_hashes.insert(def_id, serialized_hash.hash);
|
||||
|
@ -326,7 +326,7 @@ fn register_crate(&mut self,
|
||||
let mut cmeta = cstore::CrateMetadata {
|
||||
name: name,
|
||||
extern_crate: Cell::new(None),
|
||||
def_path_table: def_path_table,
|
||||
def_path_table: Rc::new(def_path_table),
|
||||
exported_symbols: exported_symbols,
|
||||
trait_impls: trait_impls,
|
||||
proc_macros: crate_root.macro_derive_registrar.map(|_| {
|
||||
|
@ -76,7 +76,7 @@ pub struct CrateMetadata {
|
||||
/// hashmap, which gives the reverse mapping. This allows us to
|
||||
/// quickly retrace a `DefPath`, which is needed for incremental
|
||||
/// compilation support.
|
||||
pub def_path_table: DefPathTable,
|
||||
pub def_path_table: Rc<DefPathTable>,
|
||||
|
||||
pub exported_symbols: Tracked<FxHashSet<DefIndex>>,
|
||||
|
||||
|
@ -17,7 +17,6 @@
|
||||
ExternCrate, NativeLibrary, MetadataLoader, LinkMeta,
|
||||
LinkagePreference, LoadedMacro, EncodedMetadata};
|
||||
use rustc::hir::def;
|
||||
use rustc::ich;
|
||||
use rustc::middle::lang_items;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::{self, TyCtxt};
|
||||
@ -25,7 +24,8 @@
|
||||
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
|
||||
use rustc::dep_graph::{DepNode, GlobalMetaDataKind};
|
||||
use rustc::hir::map::{DefKey, DefPath, DisambiguatedDefPathData};
|
||||
use rustc::hir::map::{DefKey, DefPath, DisambiguatedDefPathData, DefPathHash};
|
||||
use rustc::hir::map::definitions::DefPathTable;
|
||||
use rustc::util::nodemap::{NodeSet, DefIdMap};
|
||||
use rustc_back::PanicStrategy;
|
||||
|
||||
@ -334,10 +334,14 @@ fn def_path(&self, def: DefId) -> DefPath {
|
||||
self.get_crate_data(def.krate).def_path(def.index)
|
||||
}
|
||||
|
||||
fn def_path_hash(&self, def: DefId) -> ich::Fingerprint {
|
||||
fn def_path_hash(&self, def: DefId) -> DefPathHash {
|
||||
self.get_crate_data(def.krate).def_path_hash(def.index)
|
||||
}
|
||||
|
||||
fn def_path_table(&self, cnum: CrateNum) -> Rc<DefPathTable> {
|
||||
self.get_crate_data(cnum).def_path_table.clone()
|
||||
}
|
||||
|
||||
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>
|
||||
{
|
||||
self.dep_graph.read(DepNode::MetaData(def));
|
||||
|
@ -14,9 +14,8 @@
|
||||
use schema::*;
|
||||
|
||||
use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
|
||||
use rustc::hir::map::{DefKey, DefPath, DefPathData};
|
||||
use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash};
|
||||
use rustc::hir;
|
||||
use rustc::ich;
|
||||
|
||||
use rustc::middle::cstore::LinkagePreference;
|
||||
use rustc::hir::def::{self, Def, CtorKind};
|
||||
@ -1109,7 +1108,7 @@ pub fn def_path(&self, id: DefIndex) -> DefPath {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn def_path_hash(&self, index: DefIndex) -> ich::Fingerprint {
|
||||
pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
|
||||
self.def_path_table.def_path_hash(index)
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user