2019-01-27 12:52:17 -06:00
|
|
|
use std::collections::{BTreeMap, HashMap};
|
2018-06-05 22:05:30 -05:00
|
|
|
use std::fs;
|
2019-01-27 12:52:17 -06:00
|
|
|
use std::io::{self, Write};
|
|
|
|
use std::thread::ThreadId;
|
2018-12-19 12:29:23 -06:00
|
|
|
use std::time::Instant;
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
use crate::session::config::{Options, OptLevel};
|
|
|
|
|
|
|
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
|
|
|
pub enum ProfileCategory {
|
|
|
|
Parsing,
|
|
|
|
Expansion,
|
|
|
|
TypeChecking,
|
|
|
|
BorrowChecking,
|
|
|
|
Codegen,
|
|
|
|
Linking,
|
|
|
|
Other,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
|
|
|
pub enum ProfilerEvent {
|
|
|
|
QueryStart { query_name: &'static str, category: ProfileCategory, time: Instant },
|
|
|
|
QueryEnd { query_name: &'static str, category: ProfileCategory, time: Instant },
|
|
|
|
GenericActivityStart { category: ProfileCategory, time: Instant },
|
|
|
|
GenericActivityEnd { category: ProfileCategory, time: Instant },
|
|
|
|
QueryCacheHit { query_name: &'static str, category: ProfileCategory },
|
|
|
|
QueryCount { query_name: &'static str, category: ProfileCategory, count: usize },
|
2019-02-07 02:58:14 -06:00
|
|
|
IncrementalLoadResultStart { query_name: &'static str, time: Instant },
|
|
|
|
IncrementalLoadResultEnd { query_name: &'static str, time: Instant },
|
2019-01-27 12:52:17 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl ProfilerEvent {
|
|
|
|
fn is_start_event(&self) -> bool {
|
|
|
|
use self::ProfilerEvent::*;
|
|
|
|
|
|
|
|
match self {
|
2019-02-07 02:58:14 -06:00
|
|
|
QueryStart { .. } |
|
|
|
|
GenericActivityStart { .. } |
|
|
|
|
IncrementalLoadResultStart { .. } => true,
|
|
|
|
|
|
|
|
QueryEnd { .. } |
|
|
|
|
GenericActivityEnd { .. } |
|
|
|
|
QueryCacheHit { .. } |
|
|
|
|
QueryCount { .. } |
|
|
|
|
IncrementalLoadResultEnd { .. } => false,
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
2019-01-27 12:52:17 -06:00
|
|
|
}
|
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
pub struct SelfProfiler {
|
|
|
|
events: HashMap<ThreadId, Vec<ProfilerEvent>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct CategoryResultData {
|
|
|
|
query_times: BTreeMap<&'static str, u64>,
|
|
|
|
query_cache_stats: BTreeMap<&'static str, (u64, u64)>, //(hits, total)
|
|
|
|
}
|
|
|
|
|
|
|
|
impl CategoryResultData {
|
|
|
|
fn new() -> CategoryResultData {
|
|
|
|
CategoryResultData {
|
|
|
|
query_times: BTreeMap::new(),
|
|
|
|
query_cache_stats: BTreeMap::new(),
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
2019-01-27 12:52:17 -06:00
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
fn total_time(&self) -> u64 {
|
|
|
|
let mut total = 0;
|
|
|
|
for (_, time) in &self.query_times {
|
|
|
|
total += time;
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
total
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
fn total_cache_data(&self) -> (u64, u64) {
|
|
|
|
let (mut hits, mut total) = (0, 0);
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
for (_, (h, t)) in &self.query_cache_stats {
|
|
|
|
hits += h;
|
|
|
|
total += t;
|
2018-05-29 22:12:49 -05:00
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
(hits, total)
|
|
|
|
}
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
impl Default for CategoryResultData {
|
|
|
|
fn default() -> CategoryResultData {
|
|
|
|
CategoryResultData::new()
|
|
|
|
}
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
struct CalculatedResults {
|
|
|
|
categories: BTreeMap<ProfileCategory, CategoryResultData>,
|
|
|
|
crate_name: Option<String>,
|
|
|
|
optimization_level: OptLevel,
|
|
|
|
incremental: bool,
|
|
|
|
verbose: bool,
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
impl CalculatedResults {
|
|
|
|
fn new() -> CalculatedResults {
|
|
|
|
CalculatedResults {
|
|
|
|
categories: BTreeMap::new(),
|
|
|
|
crate_name: None,
|
|
|
|
optimization_level: OptLevel::No,
|
|
|
|
incremental: false,
|
|
|
|
verbose: false,
|
|
|
|
}
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
fn consolidate(mut cr1: CalculatedResults, cr2: CalculatedResults) -> CalculatedResults {
|
|
|
|
for (category, data) in cr2.categories {
|
|
|
|
let cr1_data = cr1.categories.entry(category).or_default();
|
2018-11-19 22:26:00 -06:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
for (query, time) in data.query_times {
|
|
|
|
*cr1_data.query_times.entry(query).or_default() += time;
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
for (query, (hits, total)) in data.query_cache_stats {
|
|
|
|
let (h, t) = cr1_data.query_cache_stats.entry(query).or_insert((0, 0));
|
|
|
|
*h += hits;
|
|
|
|
*t += total;
|
2018-07-23 20:52:39 -05:00
|
|
|
}
|
2018-06-11 22:41:11 -05:00
|
|
|
}
|
2019-01-27 12:52:17 -06:00
|
|
|
|
|
|
|
cr1
|
|
|
|
}
|
|
|
|
|
|
|
|
fn total_time(&self) -> u64 {
|
|
|
|
let mut total = 0;
|
|
|
|
|
|
|
|
for (_, data) in &self.categories {
|
|
|
|
total += data.total_time();
|
|
|
|
}
|
|
|
|
|
|
|
|
total
|
|
|
|
}
|
|
|
|
|
|
|
|
fn with_options(mut self, opts: &Options) -> CalculatedResults {
|
|
|
|
self.crate_name = opts.crate_name.clone();
|
|
|
|
self.optimization_level = opts.optimize;
|
|
|
|
self.incremental = opts.incremental.is_some();
|
|
|
|
self.verbose = opts.debugging_opts.verbose;
|
|
|
|
|
|
|
|
self
|
2018-06-05 22:05:30 -05:00
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
fn time_between_ns(start: Instant, end: Instant) -> u64 {
|
|
|
|
if start < end {
|
|
|
|
let time = end - start;
|
|
|
|
(time.as_secs() * 1_000_000_000) + (time.subsec_nanos() as u64)
|
|
|
|
} else {
|
|
|
|
debug!("time_between_ns: ignorning instance of end < start");
|
|
|
|
0
|
|
|
|
}
|
2018-07-23 20:52:39 -05:00
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
fn calculate_percent(numerator: u64, denominator: u64) -> f32 {
|
|
|
|
if denominator > 0 {
|
|
|
|
((numerator as f32) / (denominator as f32)) * 100.0
|
|
|
|
} else {
|
|
|
|
0.0
|
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl SelfProfiler {
|
|
|
|
pub fn new() -> SelfProfiler {
|
|
|
|
let mut profiler = SelfProfiler {
|
2019-01-27 12:52:17 -06:00
|
|
|
events: HashMap::new(),
|
2018-05-19 12:50:58 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
profiler.start_activity(ProfileCategory::Other);
|
|
|
|
|
|
|
|
profiler
|
|
|
|
}
|
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2018-05-19 12:50:58 -05:00
|
|
|
pub fn start_activity(&mut self, category: ProfileCategory) {
|
2019-01-27 12:52:17 -06:00
|
|
|
self.record(ProfilerEvent::GenericActivityStart {
|
|
|
|
category,
|
|
|
|
time: Instant::now(),
|
|
|
|
})
|
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2019-01-27 12:52:17 -06:00
|
|
|
pub fn end_activity(&mut self, category: ProfileCategory) {
|
|
|
|
self.record(ProfilerEvent::GenericActivityEnd {
|
|
|
|
category,
|
|
|
|
time: Instant::now(),
|
|
|
|
})
|
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2019-01-27 12:52:17 -06:00
|
|
|
pub fn record_computed_queries(
|
|
|
|
&mut self,
|
|
|
|
query_name: &'static str,
|
|
|
|
category: ProfileCategory,
|
|
|
|
count: usize)
|
|
|
|
{
|
|
|
|
self.record(ProfilerEvent::QueryCount {
|
|
|
|
query_name,
|
|
|
|
category,
|
|
|
|
count,
|
|
|
|
})
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2019-01-27 12:52:17 -06:00
|
|
|
pub fn record_query_hit(&mut self, query_name: &'static str, category: ProfileCategory) {
|
|
|
|
self.record(ProfilerEvent::QueryCacheHit {
|
|
|
|
query_name,
|
|
|
|
category,
|
|
|
|
})
|
2018-05-29 22:12:49 -05:00
|
|
|
}
|
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2019-01-27 12:52:17 -06:00
|
|
|
pub fn start_query(&mut self, query_name: &'static str, category: ProfileCategory) {
|
|
|
|
self.record(ProfilerEvent::QueryStart {
|
|
|
|
query_name,
|
|
|
|
category,
|
|
|
|
time: Instant::now(),
|
|
|
|
});
|
2018-05-29 22:12:49 -05:00
|
|
|
}
|
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2019-01-27 12:52:17 -06:00
|
|
|
pub fn end_query(&mut self, query_name: &'static str, category: ProfileCategory) {
|
|
|
|
self.record(ProfilerEvent::QueryEnd {
|
|
|
|
query_name,
|
|
|
|
category,
|
|
|
|
time: Instant::now(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-02-07 02:58:14 -06:00
|
|
|
#[inline]
|
|
|
|
pub fn incremental_load_result_start(&mut self, query_name: &'static str) {
|
|
|
|
self.record(ProfilerEvent::IncrementalLoadResultStart {
|
|
|
|
query_name,
|
|
|
|
time: Instant::now(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn incremental_load_result_end(&mut self, query_name: &'static str) {
|
|
|
|
self.record(ProfilerEvent::IncrementalLoadResultEnd {
|
|
|
|
query_name,
|
|
|
|
time: Instant::now(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-02-05 10:59:41 -06:00
|
|
|
#[inline]
|
2019-01-27 12:52:17 -06:00
|
|
|
fn record(&mut self, event: ProfilerEvent) {
|
|
|
|
let thread_id = std::thread::current().id();
|
|
|
|
let events = self.events.entry(thread_id).or_default();
|
|
|
|
|
|
|
|
events.push(event);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn calculate_thread_results(events: &Vec<ProfilerEvent>) -> CalculatedResults {
|
|
|
|
use self::ProfilerEvent::*;
|
|
|
|
|
|
|
|
assert!(
|
|
|
|
events.last().map(|e| !e.is_start_event()).unwrap_or(true),
|
|
|
|
"there was an event running when calculate_reslts() was called"
|
|
|
|
);
|
|
|
|
|
|
|
|
let mut results = CalculatedResults::new();
|
|
|
|
|
2019-02-05 11:35:41 -06:00
|
|
|
//(event, child time to subtract)
|
2019-01-27 12:52:17 -06:00
|
|
|
let mut query_stack = Vec::new();
|
|
|
|
|
|
|
|
for event in events {
|
|
|
|
match event {
|
|
|
|
QueryStart { .. } | GenericActivityStart { .. } => {
|
2019-02-05 11:35:41 -06:00
|
|
|
query_stack.push((event, 0));
|
2019-01-27 12:52:17 -06:00
|
|
|
},
|
|
|
|
QueryEnd { query_name, category, time: end_time } => {
|
|
|
|
let previous_query = query_stack.pop();
|
2019-02-05 11:35:41 -06:00
|
|
|
if let Some((QueryStart {
|
2019-01-27 12:52:17 -06:00
|
|
|
query_name: p_query_name,
|
|
|
|
time: start_time,
|
2019-02-05 11:35:41 -06:00
|
|
|
category: _ }, child_time_to_subtract)) = previous_query {
|
2019-01-27 12:52:17 -06:00
|
|
|
assert_eq!(
|
|
|
|
p_query_name,
|
|
|
|
query_name,
|
|
|
|
"Saw a query end but the previous query wasn't the corresponding start"
|
|
|
|
);
|
|
|
|
|
|
|
|
let time_ns = time_between_ns(*start_time, *end_time);
|
2019-02-05 11:35:41 -06:00
|
|
|
let self_time_ns = time_ns - child_time_to_subtract;
|
2019-01-27 12:52:17 -06:00
|
|
|
let result_data = results.categories.entry(*category).or_default();
|
|
|
|
|
2019-02-05 11:35:41 -06:00
|
|
|
*result_data.query_times.entry(query_name).or_default() += self_time_ns;
|
|
|
|
|
|
|
|
if let Some((_, child_time_to_subtract)) = query_stack.last_mut() {
|
|
|
|
*child_time_to_subtract += time_ns;
|
|
|
|
}
|
2019-01-27 12:52:17 -06:00
|
|
|
} else {
|
|
|
|
bug!("Saw a query end but the previous event wasn't a query start");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
GenericActivityEnd { category, time: end_time } => {
|
|
|
|
let previous_event = query_stack.pop();
|
2019-02-05 11:35:41 -06:00
|
|
|
if let Some((GenericActivityStart {
|
2019-01-27 12:52:17 -06:00
|
|
|
category: previous_category,
|
2019-02-05 11:35:41 -06:00
|
|
|
time: start_time }, child_time_to_subtract)) = previous_event {
|
2019-01-27 12:52:17 -06:00
|
|
|
assert_eq!(
|
|
|
|
previous_category,
|
|
|
|
category,
|
|
|
|
"Saw an end but the previous event wasn't the corresponding start"
|
|
|
|
);
|
|
|
|
|
|
|
|
let time_ns = time_between_ns(*start_time, *end_time);
|
2019-02-05 11:35:41 -06:00
|
|
|
let self_time_ns = time_ns - child_time_to_subtract;
|
2019-01-27 12:52:17 -06:00
|
|
|
let result_data = results.categories.entry(*category).or_default();
|
|
|
|
|
|
|
|
*result_data.query_times
|
|
|
|
.entry("{time spent not running queries}")
|
2019-02-05 11:35:41 -06:00
|
|
|
.or_default() += self_time_ns;
|
|
|
|
|
|
|
|
if let Some((_, child_time_to_subtract)) = query_stack.last_mut() {
|
|
|
|
*child_time_to_subtract += time_ns;
|
|
|
|
}
|
2019-01-27 12:52:17 -06:00
|
|
|
} else {
|
|
|
|
bug!("Saw an activity end but the previous event wasn't an activity start");
|
|
|
|
}
|
|
|
|
},
|
|
|
|
QueryCacheHit { category, query_name } => {
|
|
|
|
let result_data = results.categories.entry(*category).or_default();
|
|
|
|
|
|
|
|
let (hits, total) =
|
|
|
|
result_data.query_cache_stats.entry(query_name).or_insert((0, 0));
|
|
|
|
*hits += 1;
|
|
|
|
*total += 1;
|
|
|
|
},
|
|
|
|
QueryCount { category, query_name, count } => {
|
|
|
|
let result_data = results.categories.entry(*category).or_default();
|
|
|
|
|
|
|
|
let (_, totals) =
|
|
|
|
result_data.query_cache_stats.entry(query_name).or_insert((0, 0));
|
|
|
|
*totals += *count as u64;
|
|
|
|
},
|
2019-02-07 02:58:14 -06:00
|
|
|
//we don't summarize incremental load result events in the simple output mode
|
|
|
|
IncrementalLoadResultStart { .. } | IncrementalLoadResultEnd { .. } => { },
|
2019-01-27 12:52:17 -06:00
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
//normalize the times to ms
|
|
|
|
for (_, data) in &mut results.categories {
|
|
|
|
for (_, time) in &mut data.query_times {
|
|
|
|
*time = *time / 1_000_000;
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
results
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
fn get_results(&self, opts: &Options) -> CalculatedResults {
|
|
|
|
self.events
|
|
|
|
.iter()
|
|
|
|
.map(|(_, r)| SelfProfiler::calculate_thread_results(r))
|
|
|
|
.fold(CalculatedResults::new(), CalculatedResults::consolidate)
|
|
|
|
.with_options(opts)
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn print_results(&mut self, opts: &Options) {
|
|
|
|
self.end_activity(ProfileCategory::Other);
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
let results = self.get_results(opts);
|
|
|
|
|
|
|
|
let total_time = results.total_time() as f32;
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2018-12-05 22:48:25 -06:00
|
|
|
let out = io::stderr();
|
2018-05-19 12:50:58 -05:00
|
|
|
let mut lock = out.lock();
|
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
let crate_name = results.crate_name.map(|n| format!(" for {}", n)).unwrap_or_default();
|
2018-05-19 12:50:58 -05:00
|
|
|
|
|
|
|
writeln!(lock, "Self profiling results{}:", crate_name).unwrap();
|
2018-05-30 22:41:20 -05:00
|
|
|
writeln!(lock).unwrap();
|
2018-05-19 12:50:58 -05:00
|
|
|
|
2019-01-27 12:52:17 -06:00
|
|
|
writeln!(lock, "| Phase | Time (ms) \
|
|
|
|
| Time (%) | Queries | Hits (%)")
|
|
|
|
.unwrap();
|
|
|
|
writeln!(lock, "| ----------------------------------------- | -------------- \
|
|
|
|
| -------- | -------------- | --------")
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let mut categories: Vec<_> = results.categories.iter().collect();
|
|
|
|
categories.sort_by(|(_, data1), (_, data2)| data2.total_time().cmp(&data1.total_time()));
|
|
|
|
|
|
|
|
for (category, data) in categories {
|
|
|
|
let (category_hits, category_total) = data.total_cache_data();
|
|
|
|
let category_hit_percent = calculate_percent(category_hits, category_total);
|
|
|
|
|
|
|
|
writeln!(
|
|
|
|
lock,
|
|
|
|
"| {0: <41} | {1: >14} | {2: >8.2} | {3: >14} | {4: >8}",
|
|
|
|
format!("{:?}", category),
|
|
|
|
data.total_time(),
|
|
|
|
((data.total_time() as f32) / total_time) * 100.0,
|
|
|
|
category_total,
|
|
|
|
format!("{:.2}", category_hit_percent),
|
|
|
|
).unwrap();
|
|
|
|
|
|
|
|
//in verbose mode, show individual query data
|
|
|
|
if results.verbose {
|
|
|
|
//don't show queries that took less than 1ms
|
|
|
|
let mut times: Vec<_> = data.query_times.iter().filter(|(_, t)| **t > 0).collect();
|
|
|
|
times.sort_by(|(_, time1), (_, time2)| time2.cmp(time1));
|
|
|
|
|
|
|
|
for (query, time) in times {
|
|
|
|
let (hits, total) = data.query_cache_stats.get(query).unwrap_or(&(0, 0));
|
|
|
|
let hit_percent = calculate_percent(*hits, *total);
|
|
|
|
|
|
|
|
writeln!(
|
|
|
|
lock,
|
|
|
|
"| - {0: <39} | {1: >14} | {2: >8.2} | {3: >14} | {4: >8}",
|
|
|
|
query,
|
|
|
|
time,
|
|
|
|
((*time as f32) / total_time) * 100.0,
|
|
|
|
total,
|
|
|
|
format!("{:.2}", hit_percent),
|
|
|
|
).unwrap();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-05-19 12:50:58 -05:00
|
|
|
|
|
|
|
writeln!(lock).unwrap();
|
|
|
|
writeln!(lock, "Optimization level: {:?}", opts.optimize).unwrap();
|
2019-01-27 12:52:17 -06:00
|
|
|
writeln!(lock, "Incremental: {}", if results.incremental { "on" } else { "off" }).unwrap();
|
2018-05-19 12:50:58 -05:00
|
|
|
}
|
|
|
|
|
2018-06-13 21:08:41 -05:00
|
|
|
pub fn save_results(&self, opts: &Options) {
|
2019-01-27 12:52:17 -06:00
|
|
|
let results = self.get_results(opts);
|
|
|
|
|
2018-07-20 20:28:37 -05:00
|
|
|
let compilation_options =
|
|
|
|
format!("{{ \"optimization_level\": \"{:?}\", \"incremental\": {} }}",
|
2019-01-27 12:52:17 -06:00
|
|
|
results.optimization_level,
|
|
|
|
if results.incremental { "true" } else { "false" });
|
|
|
|
|
|
|
|
let mut category_data = String::new();
|
|
|
|
|
|
|
|
for (category, data) in &results.categories {
|
|
|
|
let (hits, total) = data.total_cache_data();
|
|
|
|
let hit_percent = calculate_percent(hits, total);
|
|
|
|
|
|
|
|
category_data.push_str(&format!("{{ \"category\": \"{:?}\", \"time_ms\": {}, \
|
|
|
|
\"query_count\": {}, \"query_hits\": {} }}",
|
|
|
|
category,
|
|
|
|
data.total_time(),
|
|
|
|
total,
|
|
|
|
format!("{:.2}", hit_percent)));
|
|
|
|
}
|
2018-06-13 21:08:41 -05:00
|
|
|
|
|
|
|
let json = format!("{{ \"category_data\": {}, \"compilation_options\": {} }}",
|
|
|
|
category_data,
|
|
|
|
compilation_options);
|
|
|
|
|
|
|
|
fs::write("self_profiler_results.json", json).unwrap();
|
2018-06-05 22:05:30 -05:00
|
|
|
}
|
2018-06-20 06:57:22 -05:00
|
|
|
}
|