2020-02-18 12:11:32 +01:00
|
|
|
//! Fully type-check project and print various stats, like the number of type
|
|
|
|
//! errors.
|
2019-09-30 11:58:53 +03:00
|
|
|
|
2020-07-29 19:49:10 +02:00
|
|
|
use std::{
|
2020-07-30 22:38:24 +02:00
|
|
|
path::PathBuf,
|
2020-07-30 09:44:21 +02:00
|
|
|
time::{SystemTime, UNIX_EPOCH},
|
2020-07-29 19:49:10 +02:00
|
|
|
};
|
2020-06-29 18:07:52 +03:00
|
|
|
|
2019-12-08 12:44:14 +01:00
|
|
|
use hir::{
|
2020-02-29 15:31:07 +01:00
|
|
|
db::{AstDatabase, DefDatabase, HirDatabase},
|
2020-12-08 19:01:27 +01:00
|
|
|
AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
|
2019-12-08 12:44:14 +01:00
|
|
|
};
|
|
|
|
use hir_def::FunctionId;
|
|
|
|
use hir_ty::{Ty, TypeWalk};
|
2020-10-24 11:39:57 +03:00
|
|
|
use ide_db::base_db::{
|
|
|
|
salsa::{self, ParallelDatabase},
|
|
|
|
SourceDatabaseExt,
|
|
|
|
};
|
2020-07-25 10:35:45 +02:00
|
|
|
use itertools::Itertools;
|
2020-07-29 19:49:10 +02:00
|
|
|
use oorandom::Rand32;
|
2020-07-25 10:35:45 +02:00
|
|
|
use rayon::prelude::*;
|
|
|
|
use rustc_hash::FxHashSet;
|
2020-03-28 11:20:34 +01:00
|
|
|
use stdx::format_to;
|
2020-08-12 18:26:51 +02:00
|
|
|
use syntax::AstNode;
|
2019-02-09 18:27:11 +01:00
|
|
|
|
2020-12-11 18:24:27 +01:00
|
|
|
use crate::cli::{
|
|
|
|
load_cargo::load_cargo, print_memory_usage, progress_report::ProgressReport, report_metric,
|
|
|
|
Result, Verbosity,
|
2020-07-15 12:14:51 +02:00
|
|
|
};
|
2020-08-12 16:32:36 +02:00
|
|
|
use profile::StopWatch;
|
2019-02-09 18:27:11 +01:00
|
|
|
|
2020-06-30 17:00:17 +02:00
|
|
|
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
|
|
|
|
struct Snap<DB>(DB);
|
|
|
|
impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
|
|
|
|
fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
|
|
|
|
Snap(self.0.snapshot())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
pub struct AnalysisStatsCmd {
|
|
|
|
pub randomize: bool,
|
|
|
|
pub parallel: bool,
|
|
|
|
pub memory_usage: bool,
|
|
|
|
pub only: Option<String>,
|
|
|
|
pub with_deps: bool,
|
|
|
|
pub path: PathBuf,
|
|
|
|
pub load_output_dirs: bool,
|
|
|
|
pub with_proc_macro: bool,
|
|
|
|
}
|
2020-07-29 19:49:10 +02:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
impl AnalysisStatsCmd {
|
|
|
|
pub fn run(self, verbosity: Verbosity) -> Result<()> {
|
|
|
|
let mut rng = {
|
|
|
|
let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
|
|
|
|
Rand32::new(seed)
|
|
|
|
};
|
2020-07-30 09:44:21 +02:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut db_load_sw = self.stop_watch();
|
|
|
|
let (host, vfs) = load_cargo(&self.path, self.load_output_dirs, self.with_proc_macro)?;
|
|
|
|
let db = host.raw_database();
|
2021-01-11 22:17:30 +03:00
|
|
|
eprintln!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
|
2019-10-14 15:15:47 +03:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut analysis_sw = self.stop_watch();
|
|
|
|
let mut num_crates = 0;
|
|
|
|
let mut visited_modules = FxHashSet::default();
|
|
|
|
let mut visit_queue = Vec::new();
|
2019-10-14 15:15:47 +03:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut krates = Crate::all(db);
|
|
|
|
if self.randomize {
|
|
|
|
shuffle(&mut rng, &mut krates);
|
|
|
|
}
|
|
|
|
for krate in krates {
|
2020-08-09 18:52:19 -04:00
|
|
|
let module = krate.root_module(db);
|
2020-07-30 22:38:24 +02:00
|
|
|
let file_id = module.definition_source(db).file_id;
|
|
|
|
let file_id = file_id.original_file(db);
|
|
|
|
let source_root = db.file_source_root(file_id);
|
|
|
|
let source_root = db.source_root(source_root);
|
|
|
|
if !source_root.is_library || self.with_deps {
|
|
|
|
num_crates += 1;
|
|
|
|
visit_queue.push(module);
|
|
|
|
}
|
|
|
|
}
|
2020-02-15 18:00:14 +01:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
if self.randomize {
|
|
|
|
shuffle(&mut rng, &mut visit_queue);
|
|
|
|
}
|
2019-02-09 18:27:11 +01:00
|
|
|
|
2021-01-11 22:17:30 +03:00
|
|
|
eprint!(" crates: {}", num_crates);
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut num_decls = 0;
|
|
|
|
let mut funcs = Vec::new();
|
|
|
|
while let Some(module) = visit_queue.pop() {
|
|
|
|
if visited_modules.insert(module) {
|
|
|
|
visit_queue.extend(module.children(db));
|
2019-02-09 18:27:11 +01:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
for decl in module.declarations(db) {
|
2019-02-09 18:27:11 +01:00
|
|
|
num_decls += 1;
|
2020-07-30 22:38:24 +02:00
|
|
|
if let ModuleDef::Function(f) = decl {
|
2019-06-03 10:01:10 -04:00
|
|
|
funcs.push(f);
|
2019-02-09 18:27:11 +01:00
|
|
|
}
|
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
|
|
|
|
for impl_def in module.impl_defs(db) {
|
|
|
|
for item in impl_def.items(db) {
|
|
|
|
num_decls += 1;
|
|
|
|
if let AssocItem::Function(f) = item {
|
|
|
|
funcs.push(f);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-09 18:27:11 +01:00
|
|
|
}
|
|
|
|
}
|
2021-01-11 22:17:30 +03:00
|
|
|
eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len());
|
|
|
|
eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
|
2020-02-15 18:00:14 +01:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
if self.randomize {
|
|
|
|
shuffle(&mut rng, &mut funcs);
|
|
|
|
}
|
2019-09-12 11:45:33 +03:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut bar = match verbosity {
|
|
|
|
Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
|
|
|
|
_ if self.parallel => ProgressReport::hidden(),
|
|
|
|
_ => ProgressReport::new(funcs.len() as u64),
|
|
|
|
};
|
2020-06-30 17:00:17 +02:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
if self.parallel {
|
|
|
|
let mut inference_sw = self.stop_watch();
|
|
|
|
let snap = Snap(db.snapshot());
|
|
|
|
funcs
|
|
|
|
.par_iter()
|
|
|
|
.map_with(snap, |snap, &f| {
|
|
|
|
let f_id = FunctionId::from(f);
|
|
|
|
snap.0.body(f_id.into());
|
|
|
|
snap.0.infer(f_id.into());
|
|
|
|
})
|
|
|
|
.count();
|
2021-01-11 22:17:30 +03:00
|
|
|
eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
|
2019-02-09 18:27:11 +01:00
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
|
|
|
|
let mut inference_sw = self.stop_watch();
|
|
|
|
bar.tick();
|
|
|
|
let mut num_exprs = 0;
|
|
|
|
let mut num_exprs_unknown = 0;
|
|
|
|
let mut num_exprs_partially_unknown = 0;
|
|
|
|
let mut num_type_mismatches = 0;
|
|
|
|
for f in funcs {
|
|
|
|
let name = f.name(db);
|
|
|
|
let full_name = f
|
|
|
|
.module(db)
|
|
|
|
.path_to_root(db)
|
|
|
|
.into_iter()
|
|
|
|
.rev()
|
|
|
|
.filter_map(|it| it.name(db))
|
|
|
|
.chain(Some(f.name(db)))
|
|
|
|
.join("::");
|
|
|
|
if let Some(only_name) = self.only.as_deref() {
|
|
|
|
if name.to_string() != only_name && full_name != only_name {
|
|
|
|
continue;
|
2019-02-09 18:27:11 +01:00
|
|
|
}
|
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut msg = format!("processing: {}", full_name);
|
|
|
|
if verbosity.is_verbose() {
|
2021-01-01 14:38:23 +11:00
|
|
|
if let Some(src) = f.source(db) {
|
|
|
|
let original_file = src.file_id.original_file(db);
|
|
|
|
let path = vfs.file_path(original_file);
|
|
|
|
let syntax_range = src.value.syntax().text_range();
|
|
|
|
format_to!(msg, " ({} {:?})", path, syntax_range);
|
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
}
|
|
|
|
if verbosity.is_spammy() {
|
|
|
|
bar.println(msg.to_string());
|
|
|
|
}
|
|
|
|
bar.set_message(&msg);
|
|
|
|
let f_id = FunctionId::from(f);
|
|
|
|
let body = db.body(f_id.into());
|
|
|
|
let inference_result = db.infer(f_id.into());
|
|
|
|
let (previous_exprs, previous_unknown, previous_partially_unknown) =
|
|
|
|
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
|
|
|
|
for (expr_id, _) in body.exprs.iter() {
|
|
|
|
let ty = &inference_result[expr_id];
|
|
|
|
num_exprs += 1;
|
|
|
|
if let Ty::Unknown = ty {
|
|
|
|
num_exprs_unknown += 1;
|
2020-02-15 18:00:14 +01:00
|
|
|
} else {
|
2020-07-30 22:38:24 +02:00
|
|
|
let mut is_partially_unknown = false;
|
|
|
|
ty.walk(&mut |ty| {
|
|
|
|
if let Ty::Unknown = ty {
|
|
|
|
is_partially_unknown = true;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
if is_partially_unknown {
|
|
|
|
num_exprs_partially_unknown += 1;
|
|
|
|
}
|
2020-02-15 18:00:14 +01:00
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
if self.only.is_some() && verbosity.is_spammy() {
|
|
|
|
// in super-verbose mode for just one function, we print every single expression
|
2019-12-08 12:44:14 +01:00
|
|
|
let (_, sm) = db.body_with_source_map(f_id.into());
|
|
|
|
let src = sm.expr_syntax(expr_id);
|
2020-03-06 14:44:44 +01:00
|
|
|
if let Ok(src) = src {
|
2020-07-30 22:38:24 +02:00
|
|
|
let node = {
|
|
|
|
let root = db.parse_or_expand(src.file_id).unwrap();
|
|
|
|
src.value.to_node(&root)
|
|
|
|
};
|
|
|
|
let original_file = src.file_id.original_file(db);
|
|
|
|
let line_index = host.analysis().file_line_index(original_file).unwrap();
|
|
|
|
let text_range = node.syntax().text_range();
|
2019-08-30 20:16:28 +02:00
|
|
|
let (start, end) = (
|
2019-09-03 11:04:38 +03:00
|
|
|
line_index.line_col(text_range.start()),
|
|
|
|
line_index.line_col(text_range.end()),
|
2019-08-30 20:16:28 +02:00
|
|
|
);
|
|
|
|
bar.println(format!(
|
2020-07-30 22:38:24 +02:00
|
|
|
"{}:{}-{}:{}: {}",
|
2019-08-25 19:28:32 +02:00
|
|
|
start.line + 1,
|
|
|
|
start.col_utf16,
|
|
|
|
end.line + 1,
|
2019-08-30 20:16:28 +02:00
|
|
|
end.col_utf16,
|
2020-07-30 22:38:24 +02:00
|
|
|
ty.display(db)
|
2019-08-30 20:16:28 +02:00
|
|
|
));
|
|
|
|
} else {
|
2020-07-30 22:38:24 +02:00
|
|
|
bar.println(format!("unknown location: {}", ty.display(db)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
|
|
|
|
num_type_mismatches += 1;
|
|
|
|
if verbosity.is_verbose() {
|
|
|
|
let (_, sm) = db.body_with_source_map(f_id.into());
|
|
|
|
let src = sm.expr_syntax(expr_id);
|
|
|
|
if let Ok(src) = src {
|
|
|
|
// FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly
|
|
|
|
// But also, we should just turn the type mismatches into diagnostics and provide these
|
|
|
|
let root = db.parse_or_expand(src.file_id).unwrap();
|
|
|
|
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
2020-12-08 19:01:27 +01:00
|
|
|
let original_range = node.as_ref().original_file_range(db);
|
2020-07-30 22:38:24 +02:00
|
|
|
let path = vfs.file_path(original_range.file_id);
|
|
|
|
let line_index =
|
|
|
|
host.analysis().file_line_index(original_range.file_id).unwrap();
|
|
|
|
let text_range = original_range.range;
|
|
|
|
let (start, end) = (
|
|
|
|
line_index.line_col(text_range.start()),
|
|
|
|
line_index.line_col(text_range.end()),
|
|
|
|
);
|
|
|
|
bar.println(format!(
|
|
|
|
"{} {}:{}-{}:{}: Expected {}, got {}",
|
|
|
|
path,
|
|
|
|
start.line + 1,
|
|
|
|
start.col_utf16,
|
|
|
|
end.line + 1,
|
|
|
|
end.col_utf16,
|
|
|
|
mismatch.expected.display(db),
|
|
|
|
mismatch.actual.display(db)
|
|
|
|
));
|
|
|
|
} else {
|
|
|
|
bar.println(format!(
|
|
|
|
"{}: Expected {}, got {}",
|
|
|
|
name,
|
|
|
|
mismatch.expected.display(db),
|
|
|
|
mismatch.actual.display(db)
|
|
|
|
));
|
|
|
|
}
|
2019-08-30 20:16:28 +02:00
|
|
|
}
|
2019-08-25 19:28:32 +02:00
|
|
|
}
|
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
if verbosity.is_spammy() {
|
|
|
|
bar.println(format!(
|
|
|
|
"In {}: {} exprs, {} unknown, {} partial",
|
|
|
|
full_name,
|
|
|
|
num_exprs - previous_exprs,
|
|
|
|
num_exprs_unknown - previous_unknown,
|
|
|
|
num_exprs_partially_unknown - previous_partially_unknown
|
|
|
|
));
|
|
|
|
}
|
|
|
|
bar.inc(1);
|
2019-02-09 18:27:11 +01:00
|
|
|
}
|
2020-07-30 22:38:24 +02:00
|
|
|
bar.finish_and_clear();
|
|
|
|
eprintln!(
|
2021-01-11 22:17:30 +03:00
|
|
|
" exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
|
|
|
|
num_exprs,
|
2020-07-30 22:38:24 +02:00
|
|
|
num_exprs_unknown,
|
2021-01-11 22:17:30 +03:00
|
|
|
percentage(num_exprs_unknown, num_exprs),
|
2020-07-30 22:38:24 +02:00
|
|
|
num_exprs_partially_unknown,
|
2021-01-11 22:17:30 +03:00
|
|
|
percentage(num_exprs_partially_unknown, num_exprs),
|
|
|
|
num_type_mismatches
|
2020-07-30 22:38:24 +02:00
|
|
|
);
|
2021-01-11 22:17:30 +03:00
|
|
|
report_metric("unknown type", num_exprs_unknown, "#");
|
2020-07-30 22:38:24 +02:00
|
|
|
report_metric("type mismatches", num_type_mismatches, "#");
|
2020-07-25 10:35:45 +02:00
|
|
|
|
2021-01-11 22:17:30 +03:00
|
|
|
eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
|
2020-07-25 10:35:45 +02:00
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
let total_span = analysis_sw.elapsed();
|
2021-01-11 22:17:30 +03:00
|
|
|
eprintln!("{:<20} {}", "Total:", total_span);
|
2020-07-30 22:38:24 +02:00
|
|
|
report_metric("total time", total_span.time.as_millis() as u64, "ms");
|
|
|
|
if let Some(instructions) = total_span.instructions {
|
|
|
|
report_metric("total instructions", instructions, "#instr");
|
|
|
|
}
|
|
|
|
if let Some(memory) = total_span.memory {
|
|
|
|
report_metric("total memory", memory.allocated.megabytes() as u64, "MB");
|
|
|
|
}
|
2019-06-30 14:40:01 +03:00
|
|
|
|
2021-01-10 20:58:02 +03:00
|
|
|
if self.memory_usage && verbosity.is_verbose() {
|
2020-07-30 22:38:24 +02:00
|
|
|
print_memory_usage(host, vfs);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
2019-06-30 14:40:01 +03:00
|
|
|
}
|
|
|
|
|
2020-07-30 22:38:24 +02:00
|
|
|
fn stop_watch(&self) -> StopWatch {
|
|
|
|
StopWatch::start().memory(self.memory_usage)
|
|
|
|
}
|
2019-02-09 18:27:11 +01:00
|
|
|
}
|
2020-07-29 19:49:10 +02:00
|
|
|
|
|
|
|
fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
|
2020-07-30 15:40:15 +02:00
|
|
|
for i in 0..slice.len() {
|
|
|
|
randomize_first(rng, &mut slice[i..]);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
|
|
|
|
assert!(!slice.is_empty());
|
|
|
|
let idx = rng.rand_range(0..slice.len() as u32) as usize;
|
|
|
|
slice.swap(0, idx);
|
2020-07-29 19:49:10 +02:00
|
|
|
}
|
|
|
|
}
|
2021-01-11 22:17:30 +03:00
|
|
|
|
|
|
|
fn percentage(n: u64, total: u64) -> u64 {
|
|
|
|
(n * 100).checked_div(total).unwrap_or(100)
|
|
|
|
}
|