rust/crates/rust-analyzer/src/cli/analysis_stats.rs

325 lines
12 KiB
Rust
Raw Normal View History

2020-02-18 12:11:32 +01:00
//! Fully type-check project and print various stats, like the number of type
//! errors.
2020-07-29 19:49:10 +02:00
use std::{
path::PathBuf,
time::{SystemTime, UNIX_EPOCH},
2020-07-29 19:49:10 +02:00
};
2020-06-29 18:07:52 +03:00
2019-12-08 12:44:14 +01:00
use hir::{
db::{AstDatabase, DefDatabase, HirDatabase},
AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
2019-12-08 12:44:14 +01:00
};
use hir_def::FunctionId;
use hir_ty::{Ty, TypeWalk};
2020-10-24 11:39:57 +03:00
use ide_db::base_db::{
salsa::{self, ParallelDatabase},
SourceDatabaseExt,
};
2020-07-25 10:35:45 +02:00
use itertools::Itertools;
2020-07-29 19:49:10 +02:00
use oorandom::Rand32;
2020-07-25 10:35:45 +02:00
use rayon::prelude::*;
use rustc_hash::FxHashSet;
2020-03-28 11:20:34 +01:00
use stdx::format_to;
2020-08-12 18:26:51 +02:00
use syntax::AstNode;
2020-12-11 18:24:27 +01:00
use crate::cli::{
load_cargo::load_cargo, print_memory_usage, progress_report::ProgressReport, report_metric,
Result, Verbosity,
2020-07-15 12:14:51 +02:00
};
2020-08-12 16:32:36 +02:00
use profile::StopWatch;
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
struct Snap<DB>(DB);
impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
fn clone(&self) -> Snap<salsa::Snapshot<DB>> {
Snap(self.0.snapshot())
}
}
pub struct AnalysisStatsCmd {
pub randomize: bool,
pub parallel: bool,
pub memory_usage: bool,
pub only: Option<String>,
pub with_deps: bool,
pub path: PathBuf,
pub load_output_dirs: bool,
pub with_proc_macro: bool,
}
2020-07-29 19:49:10 +02:00
impl AnalysisStatsCmd {
pub fn run(self, verbosity: Verbosity) -> Result<()> {
let mut rng = {
let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
Rand32::new(seed)
};
let mut db_load_sw = self.stop_watch();
let (host, vfs) = load_cargo(&self.path, self.load_output_dirs, self.with_proc_macro)?;
let db = host.raw_database();
2021-01-11 22:17:30 +03:00
eprintln!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
let mut analysis_sw = self.stop_watch();
let mut num_crates = 0;
let mut visited_modules = FxHashSet::default();
let mut visit_queue = Vec::new();
let mut krates = Crate::all(db);
if self.randomize {
shuffle(&mut rng, &mut krates);
}
for krate in krates {
let module = krate.root_module(db);
let file_id = module.definition_source(db).file_id;
let file_id = file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
if !source_root.is_library || self.with_deps {
num_crates += 1;
visit_queue.push(module);
}
}
if self.randomize {
shuffle(&mut rng, &mut visit_queue);
}
2021-01-11 22:17:30 +03:00
eprint!(" crates: {}", num_crates);
let mut num_decls = 0;
let mut funcs = Vec::new();
while let Some(module) = visit_queue.pop() {
if visited_modules.insert(module) {
visit_queue.extend(module.children(db));
for decl in module.declarations(db) {
num_decls += 1;
if let ModuleDef::Function(f) = decl {
2019-06-03 10:01:10 -04:00
funcs.push(f);
}
}
for impl_def in module.impl_defs(db) {
for item in impl_def.items(db) {
num_decls += 1;
if let AssocItem::Function(f) = item {
funcs.push(f);
}
}
}
}
}
2021-01-11 22:17:30 +03:00
eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len());
eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
if self.randomize {
shuffle(&mut rng, &mut funcs);
}
2019-09-12 11:45:33 +03:00
let mut bar = match verbosity {
Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
_ if self.parallel => ProgressReport::hidden(),
_ => ProgressReport::new(funcs.len() as u64),
};
if self.parallel {
let mut inference_sw = self.stop_watch();
let snap = Snap(db.snapshot());
funcs
.par_iter()
.map_with(snap, |snap, &f| {
let f_id = FunctionId::from(f);
snap.0.body(f_id.into());
snap.0.infer(f_id.into());
})
.count();
2021-01-11 22:17:30 +03:00
eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
}
let mut inference_sw = self.stop_watch();
bar.tick();
let mut num_exprs = 0;
let mut num_exprs_unknown = 0;
let mut num_exprs_partially_unknown = 0;
let mut num_type_mismatches = 0;
for f in funcs {
let name = f.name(db);
let full_name = f
.module(db)
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
.chain(Some(f.name(db)))
.join("::");
if let Some(only_name) = self.only.as_deref() {
if name.to_string() != only_name && full_name != only_name {
continue;
}
}
let mut msg = format!("processing: {}", full_name);
if verbosity.is_verbose() {
if let Some(src) = f.source(db) {
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file);
let syntax_range = src.value.syntax().text_range();
format_to!(msg, " ({} {:?})", path, syntax_range);
}
}
if verbosity.is_spammy() {
bar.println(msg.to_string());
}
bar.set_message(&msg);
let f_id = FunctionId::from(f);
let body = db.body(f_id.into());
let inference_result = db.infer(f_id.into());
let (previous_exprs, previous_unknown, previous_partially_unknown) =
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
for (expr_id, _) in body.exprs.iter() {
let ty = &inference_result[expr_id];
num_exprs += 1;
if let Ty::Unknown = ty {
num_exprs_unknown += 1;
} else {
let mut is_partially_unknown = false;
ty.walk(&mut |ty| {
if let Ty::Unknown = ty {
is_partially_unknown = true;
}
});
if is_partially_unknown {
num_exprs_partially_unknown += 1;
}
}
if self.only.is_some() && verbosity.is_spammy() {
// in super-verbose mode for just one function, we print every single expression
2019-12-08 12:44:14 +01:00
let (_, sm) = db.body_with_source_map(f_id.into());
let src = sm.expr_syntax(expr_id);
2020-03-06 14:44:44 +01:00
if let Ok(src) = src {
let node = {
let root = db.parse_or_expand(src.file_id).unwrap();
src.value.to_node(&root)
};
let original_file = src.file_id.original_file(db);
let line_index = host.analysis().file_line_index(original_file).unwrap();
let text_range = node.syntax().text_range();
let (start, end) = (
line_index.line_col(text_range.start()),
line_index.line_col(text_range.end()),
);
bar.println(format!(
"{}:{}-{}:{}: {}",
start.line + 1,
start.col_utf16,
end.line + 1,
end.col_utf16,
ty.display(db)
));
} else {
bar.println(format!("unknown location: {}", ty.display(db)));
}
}
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
num_type_mismatches += 1;
if verbosity.is_verbose() {
let (_, sm) = db.body_with_source_map(f_id.into());
let src = sm.expr_syntax(expr_id);
if let Ok(src) = src {
// FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly
// But also, we should just turn the type mismatches into diagnostics and provide these
let root = db.parse_or_expand(src.file_id).unwrap();
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
let line_index =
host.analysis().file_line_index(original_range.file_id).unwrap();
let text_range = original_range.range;
let (start, end) = (
line_index.line_col(text_range.start()),
line_index.line_col(text_range.end()),
);
bar.println(format!(
"{} {}:{}-{}:{}: Expected {}, got {}",
path,
start.line + 1,
start.col_utf16,
end.line + 1,
end.col_utf16,
mismatch.expected.display(db),
mismatch.actual.display(db)
));
} else {
bar.println(format!(
"{}: Expected {}, got {}",
name,
mismatch.expected.display(db),
mismatch.actual.display(db)
));
}
}
}
}
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} exprs, {} unknown, {} partial",
full_name,
num_exprs - previous_exprs,
num_exprs_unknown - previous_unknown,
num_exprs_partially_unknown - previous_partially_unknown
));
}
bar.inc(1);
}
bar.finish_and_clear();
eprintln!(
2021-01-11 22:17:30 +03:00
" exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
num_exprs,
num_exprs_unknown,
2021-01-11 22:17:30 +03:00
percentage(num_exprs_unknown, num_exprs),
num_exprs_partially_unknown,
2021-01-11 22:17:30 +03:00
percentage(num_exprs_partially_unknown, num_exprs),
num_type_mismatches
);
2021-01-11 22:17:30 +03:00
report_metric("unknown type", num_exprs_unknown, "#");
report_metric("type mismatches", num_type_mismatches, "#");
2020-07-25 10:35:45 +02:00
2021-01-11 22:17:30 +03:00
eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
2020-07-25 10:35:45 +02:00
let total_span = analysis_sw.elapsed();
2021-01-11 22:17:30 +03:00
eprintln!("{:<20} {}", "Total:", total_span);
report_metric("total time", total_span.time.as_millis() as u64, "ms");
if let Some(instructions) = total_span.instructions {
report_metric("total instructions", instructions, "#instr");
}
if let Some(memory) = total_span.memory {
report_metric("total memory", memory.allocated.megabytes() as u64, "MB");
}
2019-06-30 14:40:01 +03:00
2021-01-10 20:58:02 +03:00
if self.memory_usage && verbosity.is_verbose() {
print_memory_usage(host, vfs);
}
Ok(())
2019-06-30 14:40:01 +03:00
}
fn stop_watch(&self) -> StopWatch {
StopWatch::start().memory(self.memory_usage)
}
}
2020-07-29 19:49:10 +02:00
fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
2020-07-30 15:40:15 +02:00
for i in 0..slice.len() {
randomize_first(rng, &mut slice[i..]);
}
fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
assert!(!slice.is_empty());
let idx = rng.rand_range(0..slice.len() as u32) as usize;
slice.swap(0, idx);
2020-07-29 19:49:10 +02:00
}
}
2021-01-11 22:17:30 +03:00
fn percentage(n: u64, total: u64) -> u64 {
(n * 100).checked_div(total).unwrap_or(100)
}