Rollup merge of #111743 - nnethercote:improve-cgu-merging-debug-output, r=lqd

Improve cgu merging debug output

r? ```@lqd```
This commit is contained in:
Dylan DPC 2023-05-20 12:21:02 +05:30 committed by GitHub
commit 9074769dc5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -250,13 +250,13 @@ pub fn partition<'tcx, I>(
cgu.create_size_estimate(tcx); cgu.create_size_estimate(tcx);
} }
debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter()); debug_dump(tcx, "INITIAL PARTITIONING", &initial_partitioning.codegen_units);
// Merge until we have at most `max_cgu_count` codegen units. // Merge until we have at most `max_cgu_count` codegen units.
{ {
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus"); let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus");
partitioner.merge_codegen_units(cx, &mut initial_partitioning); partitioner.merge_codegen_units(cx, &mut initial_partitioning);
debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter()); debug_dump(tcx, "POST MERGING", &initial_partitioning.codegen_units);
} }
// In the next step, we use the inlining map to determine which additional // In the next step, we use the inlining map to determine which additional
@ -272,7 +272,7 @@ pub fn partition<'tcx, I>(
cgu.create_size_estimate(tcx); cgu.create_size_estimate(tcx);
} }
debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter()); debug_dump(tcx, "POST INLINING", &post_inlining.codegen_units);
// Next we try to make as many symbols "internal" as possible, so LLVM has // Next we try to make as many symbols "internal" as possible, so LLVM has
// more freedom to optimize. // more freedom to optimize.
@ -322,6 +322,8 @@ pub fn partition<'tcx, I>(
result.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str())); result.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
debug_dump(tcx, "FINAL", &result);
result result
} }
@ -346,16 +348,20 @@ struct PostInliningPartitioning<'tcx> {
internalization_candidates: FxHashSet<MonoItem<'tcx>>, internalization_candidates: FxHashSet<MonoItem<'tcx>>,
} }
fn debug_dump<'a, 'tcx, I>(tcx: TyCtxt<'tcx>, label: &str, cgus: I) fn debug_dump<'a, 'tcx: 'a>(tcx: TyCtxt<'tcx>, label: &str, cgus: &[CodegenUnit<'tcx>]) {
where
I: Iterator<Item = &'a CodegenUnit<'tcx>>,
'tcx: 'a,
{
let dump = move || { let dump = move || {
use std::fmt::Write; use std::fmt::Write;
let num_cgus = cgus.len();
let max = cgus.iter().map(|cgu| cgu.size_estimate()).max().unwrap();
let min = cgus.iter().map(|cgu| cgu.size_estimate()).min().unwrap();
let ratio = max as f64 / min as f64;
let s = &mut String::new(); let s = &mut String::new();
let _ = writeln!(s, "{label}"); let _ = writeln!(
s,
"{label} ({num_cgus} CodegenUnits, max={max}, min={min}, max/min={ratio:.1}):"
);
for cgu in cgus { for cgu in cgus {
let _ = let _ =
writeln!(s, "CodegenUnit {} estimated size {}:", cgu.name(), cgu.size_estimate()); writeln!(s, "CodegenUnit {} estimated size {}:", cgu.name(), cgu.size_estimate());
@ -365,14 +371,14 @@ fn debug_dump<'a, 'tcx, I>(tcx: TyCtxt<'tcx>, label: &str, cgus: I)
let symbol_hash_start = symbol_name.rfind('h'); let symbol_hash_start = symbol_name.rfind('h');
let symbol_hash = symbol_hash_start.map_or("<no hash>", |i| &symbol_name[i..]); let symbol_hash = symbol_hash_start.map_or("<no hash>", |i| &symbol_name[i..]);
let _ = writeln!( let _ = with_no_trimmed_paths!(writeln!(
s, s,
" - {} [{:?}] [{}] estimated size {}", " - {} [{:?}] [{}] estimated size {}",
mono_item, mono_item,
linkage, linkage,
symbol_hash, symbol_hash,
mono_item.size_estimate(tcx) mono_item.size_estimate(tcx)
); ));
} }
let _ = writeln!(s); let _ = writeln!(s);