Auto merge of #68001 - Marwes:lexical_region_resolve, r=nikomatsakis
perf: Only search potentially changed constraints in lexical_region_resolve Gives a big performance increase to the unicode_normalization benchmark in my testing.
This commit is contained in:
commit
779f85bac6
@ -19,7 +19,6 @@
|
||||
Direction, Graph, NodeIndex, INCOMING, OUTGOING,
|
||||
};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_span::Span;
|
||||
use std::fmt;
|
||||
@ -295,62 +294,59 @@ fn enforce_member_constraint(
|
||||
}
|
||||
|
||||
fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
|
||||
let mut process_constraint = |constraint: &Constraint<'tcx>| {
|
||||
let (a_region, b_vid, b_data, retain) = match *constraint {
|
||||
let mut constraints = IndexVec::from_elem_n(Vec::new(), var_values.values.len());
|
||||
let mut changes = Vec::new();
|
||||
for constraint in self.data.constraints.keys() {
|
||||
let (a_vid, a_region, b_vid, b_data) = match *constraint {
|
||||
Constraint::RegSubVar(a_region, b_vid) => {
|
||||
let b_data = var_values.value_mut(b_vid);
|
||||
(a_region, b_vid, b_data, false)
|
||||
(None, a_region, b_vid, b_data)
|
||||
}
|
||||
Constraint::VarSubVar(a_vid, b_vid) => match *var_values.value(a_vid) {
|
||||
VarValue::ErrorValue => return (false, false),
|
||||
VarValue::ErrorValue => continue,
|
||||
VarValue::Value(a_region) => {
|
||||
let b_data = var_values.value_mut(b_vid);
|
||||
let retain = match *b_data {
|
||||
VarValue::Value(ReStatic) | VarValue::ErrorValue => false,
|
||||
_ => true,
|
||||
};
|
||||
(a_region, b_vid, b_data, retain)
|
||||
(Some(a_vid), a_region, b_vid, b_data)
|
||||
}
|
||||
},
|
||||
Constraint::RegSubReg(..) | Constraint::VarSubReg(..) => {
|
||||
// These constraints are checked after expansion
|
||||
// is done, in `collect_errors`.
|
||||
return (false, false);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let changed = self.expand_node(a_region, b_vid, b_data);
|
||||
(changed, retain)
|
||||
};
|
||||
|
||||
// Using bitsets to track the remaining elements is faster than using a
|
||||
// `Vec` by itself (which requires removing elements, which requires
|
||||
// element shuffling, which is slow).
|
||||
let constraints: Vec<_> = self.data.constraints.keys().collect();
|
||||
let mut live_indices: BitSet<usize> = BitSet::new_filled(constraints.len());
|
||||
let mut killed_indices: BitSet<usize> = BitSet::new_empty(constraints.len());
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
changed = false;
|
||||
for index in live_indices.iter() {
|
||||
let constraint = constraints[index];
|
||||
let (edge_changed, retain) = process_constraint(constraint);
|
||||
changed |= edge_changed;
|
||||
if !retain {
|
||||
let changed = killed_indices.insert(index);
|
||||
debug_assert!(changed);
|
||||
if self.expand_node(a_region, b_vid, b_data) {
|
||||
changes.push(b_vid);
|
||||
}
|
||||
if let Some(a_vid) = a_vid {
|
||||
match *b_data {
|
||||
VarValue::Value(ReStatic) | VarValue::ErrorValue => (),
|
||||
_ => {
|
||||
constraints[a_vid].push((a_vid, b_vid));
|
||||
constraints[b_vid].push((a_vid, b_vid));
|
||||
}
|
||||
}
|
||||
}
|
||||
live_indices.subtract(&killed_indices);
|
||||
}
|
||||
|
||||
// We could clear `killed_indices` here, but we don't need to and
|
||||
// it's cheaper not to.
|
||||
while let Some(vid) = changes.pop() {
|
||||
constraints[vid].retain(|&(a_vid, b_vid)| {
|
||||
let a_region = match *var_values.value(a_vid) {
|
||||
VarValue::ErrorValue => return false,
|
||||
VarValue::Value(a_region) => a_region,
|
||||
};
|
||||
let b_data = var_values.value_mut(b_vid);
|
||||
if self.expand_node(a_region, b_vid, b_data) {
|
||||
changes.push(b_vid);
|
||||
}
|
||||
match *b_data {
|
||||
VarValue::Value(ReStatic) | VarValue::ErrorValue => false,
|
||||
_ => true,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// This function is very hot in some workloads. There's a single callsite
|
||||
// so always inlining is ok even though it's large.
|
||||
#[inline(always)]
|
||||
fn expand_node(
|
||||
&self,
|
||||
a_region: Region<'tcx>,
|
||||
@ -790,8 +786,8 @@ fn region_order_key(x: &RegionAndOrigin<'_>) -> u8 {
|
||||
self.var_infos[node_idx].origin.span(),
|
||||
&format!(
|
||||
"collect_error_for_expanding_node() could not find \
|
||||
error for var {:?} in universe {:?}, lower_bounds={:#?}, \
|
||||
upper_bounds={:#?}",
|
||||
error for var {:?} in universe {:?}, lower_bounds={:#?}, \
|
||||
upper_bounds={:#?}",
|
||||
node_idx, node_universe, lower_bounds, upper_bounds
|
||||
),
|
||||
);
|
||||
|
Loading…
Reference in New Issue
Block a user