2016-03-28 17:42:39 -04:00
|
|
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2016-04-05 18:37:51 -04:00
|
|
|
use rbml::opaque::Encoder;
|
2016-03-28 17:42:39 -04:00
|
|
|
use rustc::dep_graph::DepNode;
|
2016-04-06 17:54:40 -04:00
|
|
|
use rustc::middle::cstore::LOCAL_CRATE;
|
2016-05-03 04:56:42 +03:00
|
|
|
use rustc::ty::TyCtxt;
|
2016-03-28 17:42:39 -04:00
|
|
|
use rustc_serialize::{Encodable as RustcEncodable};
|
2016-04-06 17:28:59 -04:00
|
|
|
use std::hash::{Hasher, SipHasher};
|
2016-04-05 18:37:51 -04:00
|
|
|
use std::io::{self, Cursor, Write};
|
2016-03-28 17:42:39 -04:00
|
|
|
use std::fs::{self, File};
|
2016-04-06 17:28:59 -04:00
|
|
|
use std::path::PathBuf;
|
2016-03-28 17:42:39 -04:00
|
|
|
|
|
|
|
use super::data::*;
|
|
|
|
use super::directory::*;
|
2016-05-06 15:09:31 -04:00
|
|
|
use super::hash::*;
|
2016-03-28 17:42:39 -04:00
|
|
|
use super::util::*;
|
|
|
|
|
2016-05-03 05:23:22 +03:00
|
|
|
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
2016-03-28 17:42:39 -04:00
|
|
|
let _ignore = tcx.dep_graph.in_ignore();
|
2016-05-06 15:09:31 -04:00
|
|
|
let mut hcx = HashContext::new(tcx);
|
|
|
|
save_in(&mut hcx, dep_graph_path(tcx), encode_dep_graph);
|
|
|
|
save_in(&mut hcx, metadata_hash_path(tcx, LOCAL_CRATE), encode_metadata_hashes);
|
2016-04-06 17:28:59 -04:00
|
|
|
}
|
2016-03-28 17:42:39 -04:00
|
|
|
|
2016-05-06 15:09:31 -04:00
|
|
|
fn save_in<'a, 'tcx, F>(hcx: &mut HashContext<'a, 'tcx>,
|
|
|
|
opt_path_buf: Option<PathBuf>,
|
|
|
|
encode: F)
|
|
|
|
where F: FnOnce(&mut HashContext<'a, 'tcx>, &mut Encoder) -> io::Result<()>
|
2016-04-06 17:28:59 -04:00
|
|
|
{
|
2016-05-06 15:09:31 -04:00
|
|
|
let tcx = hcx.tcx;
|
|
|
|
|
2016-04-06 17:28:59 -04:00
|
|
|
let path_buf = match opt_path_buf {
|
|
|
|
Some(p) => p,
|
|
|
|
None => return
|
|
|
|
};
|
|
|
|
|
|
|
|
// FIXME(#32754) lock file?
|
|
|
|
|
|
|
|
// delete the old dep-graph, if any
|
|
|
|
if path_buf.exists() {
|
|
|
|
match fs::remove_file(&path_buf) {
|
2016-03-28 17:42:39 -04:00
|
|
|
Ok(()) => { }
|
|
|
|
Err(err) => {
|
|
|
|
tcx.sess.err(
|
2016-04-06 17:28:59 -04:00
|
|
|
&format!("unable to delete old dep-graph at `{}`: {}",
|
|
|
|
path_buf.display(), err));
|
2016-03-28 17:42:39 -04:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
2016-04-06 17:28:59 -04:00
|
|
|
}
|
2016-03-28 17:42:39 -04:00
|
|
|
|
2016-04-06 17:28:59 -04:00
|
|
|
// generate the data in a memory buffer
|
|
|
|
let mut wr = Cursor::new(Vec::new());
|
2016-05-06 15:09:31 -04:00
|
|
|
match encode(hcx, &mut Encoder::new(&mut wr)) {
|
2016-04-06 17:28:59 -04:00
|
|
|
Ok(()) => { }
|
|
|
|
Err(err) => {
|
|
|
|
tcx.sess.err(
|
|
|
|
&format!("could not encode dep-graph to `{}`: {}",
|
|
|
|
path_buf.display(), err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// write the data out
|
|
|
|
let data = wr.into_inner();
|
|
|
|
match
|
|
|
|
File::create(&path_buf)
|
|
|
|
.and_then(|mut file| file.write_all(&data))
|
|
|
|
{
|
|
|
|
Ok(_) => { }
|
|
|
|
Err(err) => {
|
|
|
|
tcx.sess.err(
|
|
|
|
&format!("failed to write dep-graph to `{}`: {}",
|
|
|
|
path_buf.display(), err));
|
|
|
|
return;
|
2016-03-28 17:42:39 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-06 15:09:31 -04:00
|
|
|
pub fn encode_dep_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
|
2016-05-03 05:23:22 +03:00
|
|
|
encoder: &mut Encoder)
|
2016-05-06 15:09:31 -04:00
|
|
|
-> io::Result<()>
|
|
|
|
{
|
|
|
|
let tcx = hcx.tcx;
|
2016-03-28 17:42:39 -04:00
|
|
|
let query = tcx.dep_graph.query();
|
|
|
|
|
|
|
|
let mut builder = DefIdDirectoryBuilder::new(tcx);
|
|
|
|
|
2016-04-06 17:28:59 -04:00
|
|
|
// Create hashes for inputs.
|
2016-03-28 17:42:39 -04:00
|
|
|
let hashes =
|
|
|
|
query.nodes()
|
|
|
|
.into_iter()
|
2016-04-06 17:28:59 -04:00
|
|
|
.filter_map(|dep_node| {
|
2016-05-26 06:11:16 -04:00
|
|
|
hcx.hash(&dep_node)
|
2016-05-06 15:09:31 -04:00
|
|
|
.map(|hash| {
|
|
|
|
let node = builder.map(dep_node);
|
|
|
|
SerializedHash { node: node, hash: hash }
|
|
|
|
})
|
2016-03-28 17:42:39 -04:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2016-05-06 15:09:31 -04:00
|
|
|
// Create the serialized dep-graph.
|
2016-03-28 17:42:39 -04:00
|
|
|
let graph = SerializedDepGraph {
|
|
|
|
nodes: query.nodes().into_iter()
|
2016-05-06 15:09:31 -04:00
|
|
|
.map(|node| builder.map(node))
|
2016-03-28 17:42:39 -04:00
|
|
|
.collect(),
|
|
|
|
edges: query.edges().into_iter()
|
2016-05-06 15:09:31 -04:00
|
|
|
.map(|(source_node, target_node)| {
|
|
|
|
let source = builder.map(source_node);
|
|
|
|
let target = builder.map(target_node);
|
|
|
|
(source, target)
|
2016-03-28 17:42:39 -04:00
|
|
|
})
|
|
|
|
.collect(),
|
|
|
|
hashes: hashes,
|
|
|
|
};
|
|
|
|
|
|
|
|
debug!("graph = {:#?}", graph);
|
|
|
|
|
2016-04-05 18:37:51 -04:00
|
|
|
// Encode the directory and then the graph data.
|
2016-03-28 17:42:39 -04:00
|
|
|
let directory = builder.into_directory();
|
|
|
|
try!(directory.encode(encoder));
|
2016-04-05 18:37:51 -04:00
|
|
|
try!(graph.encode(encoder));
|
2016-03-28 17:42:39 -04:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-05-06 15:09:31 -04:00
|
|
|
pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
|
2016-04-06 17:28:59 -04:00
|
|
|
encoder: &mut Encoder)
|
|
|
|
-> io::Result<()>
|
|
|
|
{
|
2016-05-06 15:09:31 -04:00
|
|
|
let tcx = hcx.tcx;
|
2016-04-06 17:28:59 -04:00
|
|
|
let query = tcx.dep_graph.query();
|
|
|
|
|
|
|
|
let serialized_hashes = {
|
|
|
|
// Identify the `MetaData(X)` nodes where `X` is local. These are
|
|
|
|
// the metadata items we export. Downstream crates will want to
|
|
|
|
// see a hash that tells them whether we might have changed the
|
|
|
|
// metadata for a given item since they last compiled.
|
|
|
|
let meta_data_def_ids =
|
|
|
|
query.nodes()
|
|
|
|
.into_iter()
|
2016-05-26 06:11:16 -04:00
|
|
|
.filter_map(|dep_node| match *dep_node {
|
2016-04-06 17:28:59 -04:00
|
|
|
DepNode::MetaData(def_id) if def_id.is_local() => Some(def_id),
|
|
|
|
_ => None,
|
|
|
|
});
|
|
|
|
|
|
|
|
// To create the hash for each item `X`, we don't hash the raw
|
2016-05-06 15:09:31 -04:00
|
|
|
// bytes of the metadata (though in principle we
|
|
|
|
// could). Instead, we walk the predecessors of `MetaData(X)`
|
|
|
|
// from the dep-graph. This corresponds to all the inputs that
|
|
|
|
// were read to construct the metadata. To create the hash for
|
|
|
|
// the metadata, we hash (the hash of) all of those inputs.
|
2016-04-06 17:28:59 -04:00
|
|
|
let hashes =
|
|
|
|
meta_data_def_ids
|
|
|
|
.map(|def_id| {
|
2016-05-06 15:09:31 -04:00
|
|
|
assert!(def_id.is_local());
|
|
|
|
let dep_node = DepNode::MetaData(def_id);
|
2016-04-06 17:28:59 -04:00
|
|
|
let mut state = SipHasher::new();
|
2016-05-06 15:09:31 -04:00
|
|
|
debug!("save: computing metadata hash for {:?}", dep_node);
|
2016-05-26 06:11:16 -04:00
|
|
|
for node in query.transitive_predecessors(&dep_node) {
|
|
|
|
if let Some(hash) = hcx.hash(&node) {
|
2016-05-06 15:09:31 -04:00
|
|
|
debug!("save: predecessor {:?} has hash {}", node, hash);
|
2016-04-06 17:28:59 -04:00
|
|
|
state.write_u64(hash.to_le());
|
2016-05-06 15:09:31 -04:00
|
|
|
} else {
|
|
|
|
debug!("save: predecessor {:?} cannot be hashed", node);
|
2016-04-06 17:28:59 -04:00
|
|
|
}
|
|
|
|
}
|
2016-05-06 15:09:31 -04:00
|
|
|
let hash = state.finish();
|
|
|
|
debug!("save: metadata hash for {:?} is {}", dep_node, hash);
|
|
|
|
SerializedMetadataHash {
|
|
|
|
def_index: def_id.index,
|
|
|
|
hash: hash,
|
2016-04-06 17:28:59 -04:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Collect these up into a vector.
|
|
|
|
SerializedMetadataHashes {
|
|
|
|
hashes: hashes.collect()
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Encode everything.
|
|
|
|
try!(serialized_hashes.encode(encoder));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|