Auto merge of #96500 - SparrowLii:rpo, r=tmiasko
Reduce duplication of RPO calculation of mir Computing the RPO of mir is not a low-cost thing, but it is duplicate in many places. In particular the `iterate_to_fixpoint` method which is called multiple times when computing the data flow. This PR reduces the number of times the RPO is recalculated as much as possible, which should save some compile time.
This commit is contained in:
commit
9a98c63b30
@ -244,7 +244,6 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
fx.debug_introduce_locals(&mut bx);
|
||||
|
||||
// Codegen the body of each block using reverse postorder
|
||||
// FIXME(eddyb) reuse RPO iterator between `analysis` and this.
|
||||
for (bb, _) in traversal::reverse_postorder(&mir) {
|
||||
fx.codegen_block(bb);
|
||||
}
|
||||
|
@ -13,7 +13,7 @@
|
||||
//! move analysis runs after promotion on broken MIR.
|
||||
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::mir::traversal::ReversePostorder;
|
||||
use rustc_middle::mir::traversal::ReversePostorderIter;
|
||||
use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::ty::cast::CastTy;
|
||||
@ -170,7 +170,7 @@ impl<'tcx> Visitor<'tcx> for Collector<'_, 'tcx> {
|
||||
|
||||
pub fn collect_temps_and_candidates<'tcx>(
|
||||
ccx: &ConstCx<'_, 'tcx>,
|
||||
rpo: &mut ReversePostorder<'_, 'tcx>,
|
||||
rpo: &mut ReversePostorderIter<'_, 'tcx>,
|
||||
) -> (IndexVec<Local, TempState>, Vec<Candidate>) {
|
||||
let mut collector = Collector {
|
||||
temps: IndexVec::from_elem(TempState::Undefined, &ccx.body.local_decls),
|
||||
|
@ -62,7 +62,9 @@ pub mod spanview;
|
||||
mod switch_sources;
|
||||
pub mod tcx;
|
||||
pub mod terminator;
|
||||
use crate::mir::traversal::PostorderCache;
|
||||
pub use terminator::*;
|
||||
|
||||
pub mod traversal;
|
||||
mod type_foldable;
|
||||
pub mod visit;
|
||||
@ -323,6 +325,7 @@ pub struct Body<'tcx> {
|
||||
predecessor_cache: PredecessorCache,
|
||||
switch_source_cache: SwitchSourceCache,
|
||||
is_cyclic: GraphIsCyclicCache,
|
||||
postorder_cache: PostorderCache,
|
||||
|
||||
pub tainted_by_errors: Option<ErrorGuaranteed>,
|
||||
}
|
||||
@ -372,6 +375,7 @@ impl<'tcx> Body<'tcx> {
|
||||
predecessor_cache: PredecessorCache::new(),
|
||||
switch_source_cache: SwitchSourceCache::new(),
|
||||
is_cyclic: GraphIsCyclicCache::new(),
|
||||
postorder_cache: PostorderCache::new(),
|
||||
tainted_by_errors,
|
||||
};
|
||||
body.is_polymorphic = body.has_param_types_or_consts();
|
||||
@ -401,6 +405,7 @@ impl<'tcx> Body<'tcx> {
|
||||
predecessor_cache: PredecessorCache::new(),
|
||||
switch_source_cache: SwitchSourceCache::new(),
|
||||
is_cyclic: GraphIsCyclicCache::new(),
|
||||
postorder_cache: PostorderCache::new(),
|
||||
tainted_by_errors: None,
|
||||
};
|
||||
body.is_polymorphic = body.has_param_types_or_consts();
|
||||
@ -422,6 +427,7 @@ impl<'tcx> Body<'tcx> {
|
||||
self.predecessor_cache.invalidate();
|
||||
self.switch_source_cache.invalidate();
|
||||
self.is_cyclic.invalidate();
|
||||
self.postorder_cache.invalidate();
|
||||
&mut self.basic_blocks
|
||||
}
|
||||
|
||||
@ -432,6 +438,7 @@ impl<'tcx> Body<'tcx> {
|
||||
self.predecessor_cache.invalidate();
|
||||
self.switch_source_cache.invalidate();
|
||||
self.is_cyclic.invalidate();
|
||||
self.postorder_cache.invalidate();
|
||||
(&mut self.basic_blocks, &mut self.local_decls)
|
||||
}
|
||||
|
||||
@ -446,6 +453,7 @@ impl<'tcx> Body<'tcx> {
|
||||
self.predecessor_cache.invalidate();
|
||||
self.switch_source_cache.invalidate();
|
||||
self.is_cyclic.invalidate();
|
||||
self.postorder_cache.invalidate();
|
||||
(&mut self.basic_blocks, &mut self.local_decls, &mut self.var_debug_info)
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,7 @@
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::OnceCell;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_serialize as serialize;
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -268,10 +271,6 @@ impl<'a, 'tcx> ReversePostorder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> {
|
||||
ReversePostorder::new(body, START_BLOCK)
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Iterator for ReversePostorder<'a, 'tcx> {
|
||||
type Item = (BasicBlock, &'a BasicBlockData<'tcx>);
|
||||
|
||||
@ -307,3 +306,86 @@ pub fn reachable_as_bitset<'tcx>(body: &Body<'tcx>) -> BitSet<BasicBlock> {
|
||||
(&mut iter).for_each(drop);
|
||||
iter.visited
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ReversePostorderIter<'a, 'tcx> {
|
||||
body: &'a Body<'tcx>,
|
||||
blocks: &'a Vec<BasicBlock>,
|
||||
idx: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Iterator for ReversePostorderIter<'a, 'tcx> {
|
||||
type Item = (BasicBlock, &'a BasicBlockData<'tcx>);
|
||||
|
||||
fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
|
||||
if self.idx == 0 {
|
||||
return None;
|
||||
}
|
||||
self.idx -= 1;
|
||||
|
||||
self.blocks.get(self.idx).map(|&bb| (bb, &self.body[bb]))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
(self.idx, Some(self.idx))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> ExactSizeIterator for ReversePostorderIter<'a, 'tcx> {}
|
||||
|
||||
pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorderIter<'a, 'tcx> {
|
||||
let blocks = body.postorder_cache.compute(body);
|
||||
|
||||
let len = blocks.len();
|
||||
|
||||
ReversePostorderIter { body, blocks, idx: len }
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct PostorderCache {
|
||||
cache: OnceCell<Vec<BasicBlock>>,
|
||||
}
|
||||
|
||||
impl PostorderCache {
|
||||
#[inline]
|
||||
pub(super) fn new() -> Self {
|
||||
PostorderCache { cache: OnceCell::new() }
|
||||
}
|
||||
|
||||
/// Invalidates the postorder cache.
|
||||
#[inline]
|
||||
pub(super) fn invalidate(&mut self) {
|
||||
self.cache = OnceCell::new();
|
||||
}
|
||||
|
||||
/// Returns the &Vec<BasicBlocks> represents the postorder graph for this MIR.
|
||||
#[inline]
|
||||
pub(super) fn compute(&self, body: &Body<'_>) -> &Vec<BasicBlock> {
|
||||
self.cache.get_or_init(|| Postorder::new(body, START_BLOCK).map(|(bb, _)| bb).collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: serialize::Encoder> serialize::Encodable<S> for PostorderCache {
|
||||
#[inline]
|
||||
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_unit()
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: serialize::Decoder> serialize::Decodable<D> for PostorderCache {
|
||||
#[inline]
|
||||
fn decode(_: &mut D) -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for PostorderCache {
|
||||
#[inline]
|
||||
fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
|
||||
TrivialTypeFoldableAndLiftImpls! {
|
||||
PostorderCache,
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user