Replace Body::basic_blocks() with field access

This commit is contained in:
Tomasz Miąsko 2022-07-05 00:00:00 +00:00
parent 983f4daddf
commit b48870b451
65 changed files with 131 additions and 140 deletions

View File

@ -31,7 +31,7 @@ pub(super) fn generate_constraints<'cx, 'tcx>(
body, body,
}; };
for (bb, data) in body.basic_blocks().iter_enumerated() { for (bb, data) in body.basic_blocks.iter_enumerated() {
cg.visit_basic_block_data(bb, data); cg.visit_basic_block_data(bb, data);
} }
} }

View File

@ -143,7 +143,7 @@ struct OutOfScopePrecomputer<'a, 'tcx> {
impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> { impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> {
fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self { fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self {
OutOfScopePrecomputer { OutOfScopePrecomputer {
visited: BitSet::new_empty(body.basic_blocks().len()), visited: BitSet::new_empty(body.basic_blocks.len()),
visit_stack: vec![], visit_stack: vec![],
body, body,
regioncx, regioncx,

View File

@ -459,7 +459,7 @@ fn reach_through_backedge(&self, from: Location, to: Location) -> Option<Locatio
return outmost_back_edge; return outmost_back_edge;
} }
let block = &self.body.basic_blocks()[location.block]; let block = &self.body.basic_blocks[location.block];
if location.statement_index < block.statements.len() { if location.statement_index < block.statements.len() {
let successor = location.successor_within_block(); let successor = location.successor_within_block();
@ -518,7 +518,7 @@ fn find_loop_head_dfs(
} }
if loop_head.dominates(from, &self.dominators) { if loop_head.dominates(from, &self.dominators) {
let block = &self.body.basic_blocks()[from.block]; let block = &self.body.basic_blocks[from.block];
if from.statement_index < block.statements.len() { if from.statement_index < block.statements.len() {
let successor = from.successor_within_block(); let successor = from.successor_within_block();
@ -568,7 +568,7 @@ fn later_use_kind(
UseSpans::PatUse(span) UseSpans::PatUse(span)
| UseSpans::OtherUse(span) | UseSpans::OtherUse(span)
| UseSpans::FnSelfUse { var_span: span, .. } => { | UseSpans::FnSelfUse { var_span: span, .. } => {
let block = &self.body.basic_blocks()[location.block]; let block = &self.body.basic_blocks[location.block];
let kind = if let Some(&Statement { let kind = if let Some(&Statement {
kind: StatementKind::FakeRead(box (FakeReadCause::ForLet(_), _)), kind: StatementKind::FakeRead(box (FakeReadCause::ForLet(_), _)),

View File

@ -88,7 +88,7 @@ fn append_to_grouped_errors(
if let Some(StatementKind::Assign(box ( if let Some(StatementKind::Assign(box (
place, place,
Rvalue::Use(Operand::Move(move_from)), Rvalue::Use(Operand::Move(move_from)),
))) = self.body.basic_blocks()[location.block] ))) = self.body.basic_blocks[location.block]
.statements .statements
.get(location.statement_index) .get(location.statement_index)
.map(|stmt| &stmt.kind) .map(|stmt| &stmt.kind)

View File

@ -33,7 +33,7 @@ impl LocationTable {
pub(crate) fn new(body: &Body<'_>) -> Self { pub(crate) fn new(body: &Body<'_>) -> Self {
let mut num_points = 0; let mut num_points = 0;
let statements_before_block = body let statements_before_block = body
.basic_blocks() .basic_blocks
.iter() .iter()
.map(|block_data| { .map(|block_data| {
let v = num_points; let v = num_points;

View File

@ -25,7 +25,7 @@ impl RegionValueElements {
pub(crate) fn new(body: &Body<'_>) -> Self { pub(crate) fn new(body: &Body<'_>) -> Self {
let mut num_points = 0; let mut num_points = 0;
let statements_before_block: IndexVec<BasicBlock, usize> = body let statements_before_block: IndexVec<BasicBlock, usize> = body
.basic_blocks() .basic_blocks
.iter() .iter()
.map(|block_data| { .map(|block_data| {
let v = num_points; let v = num_points;
@ -37,7 +37,7 @@ pub(crate) fn new(body: &Body<'_>) -> Self {
debug!("RegionValueElements: num_points={:#?}", num_points); debug!("RegionValueElements: num_points={:#?}", num_points);
let mut basic_blocks = IndexVec::with_capacity(num_points); let mut basic_blocks = IndexVec::with_capacity(num_points);
for (bb, bb_data) in body.basic_blocks().iter_enumerated() { for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
basic_blocks.extend((0..=bb_data.statements.len()).map(|_| bb)); basic_blocks.extend((0..=bb_data.statements.len()).map(|_| bb));
} }

View File

@ -2633,7 +2633,7 @@ fn typeck_mir(&mut self, body: &Body<'tcx>) {
self.check_local(&body, local, local_decl); self.check_local(&body, local, local_decl);
} }
for (block, block_data) in body.basic_blocks().iter_enumerated() { for (block, block_data) in body.basic_blocks.iter_enumerated() {
let mut location = Location { block, statement_index: 0 }; let mut location = Location { block, statement_index: 0 };
for stmt in &block_data.statements { for stmt in &block_data.statements {
if !stmt.source_info.span.is_dummy() { if !stmt.source_info.span.is_dummy() {

View File

@ -26,7 +26,7 @@ pub(crate) fn analyze(fx: &FunctionCx<'_, '_, '_>) -> IndexVec<Local, SsaKind> {
}) })
.collect::<IndexVec<Local, SsaKind>>(); .collect::<IndexVec<Local, SsaKind>>();
for bb in fx.mir.basic_blocks().iter() { for bb in fx.mir.basic_blocks.iter() {
for stmt in bb.statements.iter() { for stmt in bb.statements.iter() {
match &stmt.kind { match &stmt.kind {
Assign(place_and_rval) => match &place_and_rval.1 { Assign(place_and_rval) => match &place_and_rval.1 {

View File

@ -73,7 +73,7 @@ pub(crate) fn codegen_fn<'tcx>(
// Predefine blocks // Predefine blocks
let start_block = bcx.create_block(); let start_block = bcx.create_block();
let block_map: IndexVec<BasicBlock, Block> = let block_map: IndexVec<BasicBlock, Block> =
(0..mir.basic_blocks().len()).map(|_| bcx.create_block()).collect(); (0..mir.basic_blocks.len()).map(|_| bcx.create_block()).collect();
// Make FunctionCx // Make FunctionCx
let target_config = module.target_config(); let target_config = module.target_config();
@ -271,7 +271,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
} }
fx.tcx.sess.time("codegen prelude", || crate::abi::codegen_fn_prelude(fx, start_block)); fx.tcx.sess.time("codegen prelude", || crate::abi::codegen_fn_prelude(fx, start_block));
for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() { for (bb, bb_data) in fx.mir.basic_blocks.iter_enumerated() {
let block = fx.get_block(bb); let block = fx.get_block(bb);
fx.bcx.switch_to_block(block); fx.bcx.switch_to_block(block);

View File

@ -505,7 +505,7 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
return None; return None;
} }
let mut computed_const_val = None; let mut computed_const_val = None;
for bb_data in fx.mir.basic_blocks() { for bb_data in fx.mir.basic_blocks.iter() {
for stmt in &bb_data.statements { for stmt in &bb_data.statements {
match &stmt.kind { match &stmt.kind {
StatementKind::Assign(local_and_rvalue) if &local_and_rvalue.0 == place => { StatementKind::Assign(local_and_rvalue) if &local_and_rvalue.0 == place => {

View File

@ -266,7 +266,7 @@ fn discover_masters<'tcx>(
result: &mut IndexVec<mir::BasicBlock, CleanupKind>, result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
mir: &mir::Body<'tcx>, mir: &mir::Body<'tcx>,
) { ) {
for (bb, data) in mir.basic_blocks().iter_enumerated() { for (bb, data) in mir.basic_blocks.iter_enumerated() {
match data.terminator().kind { match data.terminator().kind {
TerminatorKind::Goto { .. } TerminatorKind::Goto { .. }
| TerminatorKind::Resume | TerminatorKind::Resume
@ -296,7 +296,7 @@ fn discover_masters<'tcx>(
} }
fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) { fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks()); let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] { let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
ref mut s @ None => { ref mut s @ None => {
@ -359,7 +359,7 @@ fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mi
} }
} }
let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks()); let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
discover_masters(&mut result, mir); discover_masters(&mut result, mir);
propagate(&mut result, mir); propagate(&mut result, mir);

View File

@ -150,13 +150,13 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
let start_llbb = Bx::append_block(cx, llfn, "start"); let start_llbb = Bx::append_block(cx, llfn, "start");
let mut bx = Bx::build(cx, start_llbb); let mut bx = Bx::build(cx, start_llbb);
if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) { if mir.basic_blocks.iter().any(|bb| bb.is_cleanup) {
bx.set_personality_fn(cx.eh_personality()); bx.set_personality_fn(cx.eh_personality());
} }
let cleanup_kinds = analyze::cleanup_kinds(&mir); let cleanup_kinds = analyze::cleanup_kinds(&mir);
let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> = mir let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> = mir
.basic_blocks() .basic_blocks
.indices() .indices()
.map(|bb| if bb == mir::START_BLOCK { Some(start_llbb) } else { None }) .map(|bb| if bb == mir::START_BLOCK { Some(start_llbb) } else { None })
.collect(); .collect();
@ -172,8 +172,8 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
unreachable_block: None, unreachable_block: None,
double_unwind_guard: None, double_unwind_guard: None,
cleanup_kinds, cleanup_kinds,
landing_pads: IndexVec::from_elem(None, mir.basic_blocks()), landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks().len()), funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
locals: IndexVec::new(), locals: IndexVec::new(),
debug_context, debug_context,
per_local_var_debug_info: None, per_local_var_debug_info: None,

View File

@ -782,7 +782,7 @@ pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx>
assert_eq!( assert_eq!(
unwinding, unwinding,
match self.frame().loc { match self.frame().loc {
Ok(loc) => self.body().basic_blocks()[loc.block].is_cleanup, Ok(loc) => self.body().basic_blocks[loc.block].is_cleanup,
Err(_) => true, Err(_) => true,
} }
); );

View File

@ -28,7 +28,7 @@ pub(crate) fn find_closest_untracked_caller_location(&self) -> Span {
let mut source_info = *frame.body.source_info(loc); let mut source_info = *frame.body.source_info(loc);
// If this is a `Call` terminator, use the `fn_span` instead. // If this is a `Call` terminator, use the `fn_span` instead.
let block = &frame.body.basic_blocks()[loc.block]; let block = &frame.body.basic_blocks[loc.block];
if loc.statement_index == block.statements.len() { if loc.statement_index == block.statements.len() {
debug!( debug!(
"find_closest_untracked_caller_location: got terminator {:?} ({:?})", "find_closest_untracked_caller_location: got terminator {:?} ({:?})",

View File

@ -53,7 +53,7 @@ pub fn step(&mut self) -> InterpResult<'tcx, bool> {
self.pop_stack_frame(/* unwinding */ true)?; self.pop_stack_frame(/* unwinding */ true)?;
return Ok(true); return Ok(true);
}; };
let basic_block = &self.body().basic_blocks()[loc.block]; let basic_block = &self.body().basic_blocks[loc.block];
if let Some(stmt) = basic_block.statements.get(loc.statement_index) { if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
let old_frames = self.frame_idx(); let old_frames = self.frame_idx();

View File

@ -135,7 +135,7 @@ fn in_return_place(
// qualifs for the return type. // qualifs for the return type.
let return_block = ccx let return_block = ccx
.body .body
.basic_blocks() .basic_blocks
.iter_enumerated() .iter_enumerated()
.find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return)) .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
.map(|(bb, _)| bb); .map(|(bb, _)| bb);

View File

@ -710,7 +710,7 @@ fn new_block(&mut self) -> BasicBlock {
} }
fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) { fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
let last = self.promoted.basic_blocks().last().unwrap(); let last = self.promoted.basic_blocks.last().unwrap();
let data = &mut self.promoted[last]; let data = &mut self.promoted[last];
data.statements.push(Statement { data.statements.push(Statement {
source_info: SourceInfo::outermost(span), source_info: SourceInfo::outermost(span),
@ -803,7 +803,7 @@ fn promote_temp(&mut self, temp: Local) -> Local {
self.visit_operand(arg, loc); self.visit_operand(arg, loc);
} }
let last = self.promoted.basic_blocks().last().unwrap(); let last = self.promoted.basic_blocks.last().unwrap();
let new_target = self.new_block(); let new_target = self.new_block();
*self.promoted[last].terminator_mut() = Terminator { *self.promoted[last].terminator_mut() = Terminator {
@ -1041,7 +1041,7 @@ pub fn is_const_fn_in_array_repeat_expression<'tcx>(
_ => {} _ => {}
} }
for block in body.basic_blocks() { for block in body.basic_blocks.iter() {
if let Some(Terminator { kind: TerminatorKind::Call { func, destination, .. }, .. }) = if let Some(Terminator { kind: TerminatorKind::Call { func, destination, .. }, .. }) =
&block.terminator &block.terminator
{ {

View File

@ -140,8 +140,8 @@ fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
if bb == START_BLOCK { if bb == START_BLOCK {
self.fail(location, "start block must not have predecessors") self.fail(location, "start block must not have predecessors")
} }
if let Some(bb) = self.body.basic_blocks().get(bb) { if let Some(bb) = self.body.basic_blocks.get(bb) {
let src = self.body.basic_blocks().get(location.block).unwrap(); let src = self.body.basic_blocks.get(location.block).unwrap();
match (src.is_cleanup, bb.is_cleanup, edge_kind) { match (src.is_cleanup, bb.is_cleanup, edge_kind) {
// Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
(false, false, EdgeKind::Normal) (false, false, EdgeKind::Normal)
@ -881,13 +881,13 @@ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location
} }
TerminatorKind::Resume | TerminatorKind::Abort => { TerminatorKind::Resume | TerminatorKind::Abort => {
let bb = location.block; let bb = location.block;
if !self.body.basic_blocks()[bb].is_cleanup { if !self.body.basic_blocks[bb].is_cleanup {
self.fail(location, "Cannot `Resume` or `Abort` from non-cleanup basic block") self.fail(location, "Cannot `Resume` or `Abort` from non-cleanup basic block")
} }
} }
TerminatorKind::Return => { TerminatorKind::Return => {
let bb = location.block; let bb = location.block;
if self.body.basic_blocks()[bb].is_cleanup { if self.body.basic_blocks[bb].is_cleanup {
self.fail(location, "Cannot `Return` from cleanup basic block") self.fail(location, "Cannot `Return` from cleanup basic block")
} }
} }

View File

@ -12,14 +12,14 @@ pub fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Grap
// Nodes // Nodes
let nodes: Vec<Node> = body let nodes: Vec<Node> = body
.basic_blocks() .basic_blocks
.iter_enumerated() .iter_enumerated()
.map(|(block, _)| bb_to_graph_node(block, body, dark_mode)) .map(|(block, _)| bb_to_graph_node(block, body, dark_mode))
.collect(); .collect();
// Edges // Edges
let mut edges = Vec::new(); let mut edges = Vec::new();
for (source, _) in body.basic_blocks().iter_enumerated() { for (source, _) in body.basic_blocks.iter_enumerated() {
let def_id = body.source.def_id(); let def_id = body.source.def_id();
let terminator = body[source].terminator(); let terminator = body[source].terminator();
let labels = terminator.kind.fmt_successor_labels(); let labels = terminator.kind.fmt_successor_labels();

View File

@ -331,11 +331,6 @@ pub fn new_cfg_only(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>) ->
body body
} }
#[inline]
pub fn basic_blocks(&self) -> &IndexVec<BasicBlock, BasicBlockData<'tcx>> {
&self.basic_blocks
}
#[inline] #[inline]
pub fn basic_blocks_mut(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockData<'tcx>> { pub fn basic_blocks_mut(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockData<'tcx>> {
self.basic_blocks.as_mut() self.basic_blocks.as_mut()
@ -490,7 +485,7 @@ impl<'tcx> Index<BasicBlock> for Body<'tcx> {
#[inline] #[inline]
fn index(&self, index: BasicBlock) -> &BasicBlockData<'tcx> { fn index(&self, index: BasicBlock) -> &BasicBlockData<'tcx> {
&self.basic_blocks()[index] &self.basic_blocks[index]
} }
} }

View File

@ -19,7 +19,7 @@ pub struct MirPatch<'tcx> {
impl<'tcx> MirPatch<'tcx> { impl<'tcx> MirPatch<'tcx> {
pub fn new(body: &Body<'tcx>) -> Self { pub fn new(body: &Body<'tcx>) -> Self {
let mut result = MirPatch { let mut result = MirPatch {
patch_map: IndexVec::from_elem(None, body.basic_blocks()), patch_map: IndexVec::from_elem(None, &body.basic_blocks),
new_blocks: vec![], new_blocks: vec![],
new_statements: vec![], new_statements: vec![],
new_locals: vec![], new_locals: vec![],
@ -29,7 +29,7 @@ pub fn new(body: &Body<'tcx>) -> Self {
}; };
// Check if we already have a resume block // Check if we already have a resume block
for (bb, block) in body.basic_blocks().iter_enumerated() { for (bb, block) in body.basic_blocks.iter_enumerated() {
if let TerminatorKind::Resume = block.terminator().kind && block.statements.is_empty() { if let TerminatorKind::Resume = block.terminator().kind && block.statements.is_empty() {
result.resume_block = Some(bb); result.resume_block = Some(bb);
break; break;
@ -61,7 +61,7 @@ pub fn is_patched(&self, bb: BasicBlock) -> bool {
} }
pub fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location { pub fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location {
let offset = match bb.index().checked_sub(body.basic_blocks().len()) { let offset = match bb.index().checked_sub(body.basic_blocks.len()) {
Some(index) => self.new_blocks[index].statements.len(), Some(index) => self.new_blocks[index].statements.len(),
None => body[bb].statements.len(), None => body[bb].statements.len(),
}; };
@ -129,7 +129,7 @@ pub fn apply(self, body: &mut Body<'tcx>) {
debug!( debug!(
"MirPatch: {} new blocks, starting from index {}", "MirPatch: {} new blocks, starting from index {}",
self.new_blocks.len(), self.new_blocks.len(),
body.basic_blocks().len() body.basic_blocks.len()
); );
let bbs = if self.patch_map.is_empty() && self.new_blocks.is_empty() { let bbs = if self.patch_map.is_empty() && self.new_blocks.is_empty() {
body.basic_blocks.as_mut_preserves_cfg() body.basic_blocks.as_mut_preserves_cfg()
@ -173,7 +173,7 @@ pub fn source_info_for_index(data: &BasicBlockData<'_>, loc: Location) -> Source
} }
pub fn source_info_for_location(&self, body: &Body<'tcx>, loc: Location) -> SourceInfo { pub fn source_info_for_location(&self, body: &Body<'tcx>, loc: Location) -> SourceInfo {
let data = match loc.block.index().checked_sub(body.basic_blocks().len()) { let data = match loc.block.index().checked_sub(body.basic_blocks.len()) {
Some(new) => &self.new_blocks[new], Some(new) => &self.new_blocks[new],
None => &body[loc.block], None => &body[loc.block],
}; };

View File

@ -318,10 +318,10 @@ pub fn write_mir_fn<'tcx, F>(
F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>, F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
{ {
write_mir_intro(tcx, body, w)?; write_mir_intro(tcx, body, w)?;
for block in body.basic_blocks().indices() { for block in body.basic_blocks.indices() {
extra_data(PassWhere::BeforeBlock(block), w)?; extra_data(PassWhere::BeforeBlock(block), w)?;
write_basic_block(tcx, block, body, extra_data, w)?; write_basic_block(tcx, block, body, extra_data, w)?;
if block.index() + 1 != body.basic_blocks().len() { if block.index() + 1 != body.basic_blocks.len() {
writeln!(w)?; writeln!(w)?;
} }
} }

View File

@ -105,7 +105,7 @@ pub fn write_mir_fn_spanview<'tcx, W>(
} }
let body_span = hir_body.unwrap().value.span; let body_span = hir_body.unwrap().value.span;
let mut span_viewables = Vec::new(); let mut span_viewables = Vec::new();
for (bb, data) in body.basic_blocks().iter_enumerated() { for (bb, data) in body.basic_blocks.iter_enumerated() {
match spanview { match spanview {
MirSpanview::Statement => { MirSpanview::Statement => {
for (i, statement) in data.statements.iter().enumerate() { for (i, statement) in data.statements.iter().enumerate() {

View File

@ -37,7 +37,7 @@ pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Preorder<'a, 'tcx> {
Preorder { Preorder {
body, body,
visited: BitSet::new_empty(body.basic_blocks().len()), visited: BitSet::new_empty(body.basic_blocks.len()),
worklist, worklist,
root_is_start_block: root == START_BLOCK, root_is_start_block: root == START_BLOCK,
} }
@ -71,7 +71,7 @@ fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
fn size_hint(&self) -> (usize, Option<usize>) { fn size_hint(&self) -> (usize, Option<usize>) {
// All the blocks, minus the number of blocks we've visited. // All the blocks, minus the number of blocks we've visited.
let upper = self.body.basic_blocks().len() - self.visited.count(); let upper = self.body.basic_blocks.len() - self.visited.count();
let lower = if self.root_is_start_block { let lower = if self.root_is_start_block {
// We will visit all remaining blocks exactly once. // We will visit all remaining blocks exactly once.

View File

@ -951,7 +951,7 @@ macro_rules! basic_blocks {
$body.basic_blocks.as_mut_preserves_cfg() $body.basic_blocks.as_mut_preserves_cfg()
}; };
($body:ident,) => { ($body:ident,) => {
$body.basic_blocks() $body.basic_blocks
}; };
} }

View File

@ -272,7 +272,7 @@ fn mir_build(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_
// by borrow checking. // by borrow checking.
debug_assert!( debug_assert!(
!(body.local_decls.has_free_regions() !(body.local_decls.has_free_regions()
|| body.basic_blocks().has_free_regions() || body.basic_blocks.has_free_regions()
|| body.var_debug_info.has_free_regions() || body.var_debug_info.has_free_regions()
|| body.yield_ty().has_free_regions()), || body.yield_ty().has_free_regions()),
"Unexpected free regions in MIR: {:?}", "Unexpected free regions in MIR: {:?}",

View File

@ -108,9 +108,9 @@ pub fn new_gen_kill(tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, analysis: A) -
// Otherwise, compute and store the cumulative transfer function for each block. // Otherwise, compute and store the cumulative transfer function for each block.
let identity = GenKillSet::identity(analysis.bottom_value(body).domain_size()); let identity = GenKillSet::identity(analysis.bottom_value(body).domain_size());
let mut trans_for_block = IndexVec::from_elem(identity, body.basic_blocks()); let mut trans_for_block = IndexVec::from_elem(identity, &body.basic_blocks);
for (block, block_data) in body.basic_blocks().iter_enumerated() { for (block, block_data) in body.basic_blocks.iter_enumerated() {
let trans = &mut trans_for_block[block]; let trans = &mut trans_for_block[block];
A::Direction::gen_kill_effects_in_block(&analysis, trans, block, block_data); A::Direction::gen_kill_effects_in_block(&analysis, trans, block, block_data);
} }
@ -144,7 +144,7 @@ fn new(
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>, apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
) -> Self { ) -> Self {
let bottom_value = analysis.bottom_value(body); let bottom_value = analysis.bottom_value(body);
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), body.basic_blocks()); let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), &body.basic_blocks);
analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]); analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != bottom_value { if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != bottom_value {
@ -197,8 +197,7 @@ pub fn iterate_to_fixpoint(self) -> Results<'tcx, A>
.. ..
} = self; } = self;
let mut dirty_queue: WorkQueue<BasicBlock> = let mut dirty_queue: WorkQueue<BasicBlock> = WorkQueue::with_none(body.basic_blocks.len());
WorkQueue::with_none(body.basic_blocks().len());
if A::Direction::IS_FORWARD { if A::Direction::IS_FORWARD {
for (bb, _) in traversal::reverse_postorder(body) { for (bb, _) in traversal::reverse_postorder(body) {

View File

@ -108,12 +108,12 @@ impl<'a, 'tcx, A> dot::GraphWalk<'a> for Formatter<'a, 'tcx, A>
type Edge = CfgEdge; type Edge = CfgEdge;
fn nodes(&self) -> dot::Nodes<'_, Self::Node> { fn nodes(&self) -> dot::Nodes<'_, Self::Node> {
self.body.basic_blocks().indices().collect::<Vec<_>>().into() self.body.basic_blocks.indices().collect::<Vec<_>>().into()
} }
fn edges(&self) -> dot::Edges<'_, Self::Edge> { fn edges(&self) -> dot::Edges<'_, Self::Edge> {
self.body self.body
.basic_blocks() .basic_blocks
.indices() .indices()
.flat_map(|bb| dataflow_successors(self.body, bb)) .flat_map(|bb| dataflow_successors(self.body, bb))
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@ -100,9 +100,9 @@ fn mock_entry_set(&self, bb: BasicBlock) -> BitSet<usize> {
fn mock_entry_sets(&self) -> IndexVec<BasicBlock, BitSet<usize>> { fn mock_entry_sets(&self) -> IndexVec<BasicBlock, BitSet<usize>> {
let empty = self.bottom_value(self.body); let empty = self.bottom_value(self.body);
let mut ret = IndexVec::from_elem(empty, &self.body.basic_blocks()); let mut ret = IndexVec::from_elem(empty, &self.body.basic_blocks);
for (bb, _) in self.body.basic_blocks().iter_enumerated() { for (bb, _) in self.body.basic_blocks.iter_enumerated() {
ret[bb] = self.mock_entry_set(bb); ret[bb] = self.mock_entry_set(bb);
} }
@ -169,7 +169,7 @@ impl<'tcx, D: Direction> AnalysisDomain<'tcx> for MockAnalysis<'tcx, D> {
const NAME: &'static str = "mock"; const NAME: &'static str = "mock";
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain { fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
BitSet::new_empty(Self::BASIC_BLOCK_OFFSET + body.basic_blocks().len()) BitSet::new_empty(Self::BASIC_BLOCK_OFFSET + body.basic_blocks.len())
} }
fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) { fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
@ -271,9 +271,7 @@ fn test_cursor<D: Direction>(analysis: MockAnalysis<'_, D>) {
cursor.allow_unreachable(); cursor.allow_unreachable();
let every_target = || { let every_target = || {
body.basic_blocks() body.basic_blocks.iter_enumerated().flat_map(|(bb, _)| SeekTarget::iter_in_block(body, bb))
.iter_enumerated()
.flat_map(|(bb, _)| SeekTarget::iter_in_block(body, bb))
}; };
let mut seek_to_target = |targ| { let mut seek_to_target = |targ| {

View File

@ -243,7 +243,7 @@ pub(super) fn gather_moves<'tcx>(
builder.gather_args(); builder.gather_args();
for (bb, block) in body.basic_blocks().iter_enumerated() { for (bb, block) in body.basic_blocks.iter_enumerated() {
for (i, stmt) in block.statements.iter().enumerate() { for (i, stmt) in block.statements.iter().enumerate() {
let source = Location { block: bb, statement_index: i }; let source = Location { block: bb, statement_index: i };
builder.gather_statement(source, stmt); builder.gather_statement(source, stmt);

View File

@ -217,7 +217,7 @@ impl<T> LocationMap<T>
fn new(body: &Body<'_>) -> Self { fn new(body: &Body<'_>) -> Self {
LocationMap { LocationMap {
map: body map: body
.basic_blocks() .basic_blocks
.iter() .iter()
.map(|block| vec![T::default(); block.statements.len() + 1]) .map(|block| vec![T::default(); block.statements.len() + 1])
.collect(), .collect(),

View File

@ -97,7 +97,7 @@ pub fn sanity_check_via_rustc_peek<'tcx, A>(
let mut cursor = ResultsCursor::new(body, results); let mut cursor = ResultsCursor::new(body, results);
let peek_calls = body.basic_blocks().iter_enumerated().filter_map(|(bb, block_data)| { let peek_calls = body.basic_blocks.iter_enumerated().filter_map(|(bb, block_data)| {
PeekCall::from_terminator(tcx, block_data.terminator()).map(|call| (bb, block_data, call)) PeekCall::from_terminator(tcx, block_data.terminator()).map(|call| (bb, block_data, call))
}); });

View File

@ -7,7 +7,7 @@
pub fn always_storage_live_locals(body: &mir::Body<'_>) -> BitSet<Local> { pub fn always_storage_live_locals(body: &mir::Body<'_>) -> BitSet<Local> {
let mut always_live_locals = BitSet::new_filled(body.local_decls.len()); let mut always_live_locals = BitSet::new_filled(body.local_decls.len());
for block in body.basic_blocks() { for block in &*body.basic_blocks {
for statement in &block.statements { for statement in &block.statements {
use mir::StatementKind::{StorageDead, StorageLive}; use mir::StatementKind::{StorageDead, StorageLive};
if let StorageLive(l) | StorageDead(l) = statement.kind { if let StorageLive(l) | StorageDead(l) = statement.kind {

View File

@ -56,7 +56,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
// example. // example.
let mut calls_to_terminate = Vec::new(); let mut calls_to_terminate = Vec::new();
let mut cleanups_to_remove = Vec::new(); let mut cleanups_to_remove = Vec::new();
for (id, block) in body.basic_blocks().iter_enumerated() { for (id, block) in body.basic_blocks.iter_enumerated() {
if block.is_cleanup { if block.is_cleanup {
continue; continue;
} }

View File

@ -45,7 +45,7 @@ pub fn add_call_guards(&self, body: &mut Body<'_>) {
// We need a place to store the new blocks generated // We need a place to store the new blocks generated
let mut new_blocks = Vec::new(); let mut new_blocks = Vec::new();
let cur_len = body.basic_blocks().len(); let cur_len = body.basic_blocks.len();
for block in body.basic_blocks_mut() { for block in body.basic_blocks_mut() {
match block.terminator { match block.terminator {

View File

@ -55,7 +55,7 @@ fn add_moves_for_packed_drops_patch<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>)
let mut patch = MirPatch::new(body); let mut patch = MirPatch::new(body);
let param_env = tcx.param_env(def_id); let param_env = tcx.param_env(def_id);
for (bb, data) in body.basic_blocks().iter_enumerated() { for (bb, data) in body.basic_blocks.iter_enumerated() {
let loc = Location { block: bb, statement_index: data.statements.len() }; let loc = Location { block: bb, statement_index: data.statements.len() };
let terminator = data.terminator(); let terminator = data.terminator();

View File

@ -61,14 +61,14 @@ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location
let _: Option<_> = try { let _: Option<_> = try {
let target = terminator.kind.as_goto()?; let target = terminator.kind.as_goto()?;
// We only apply this optimization if the last statement is a const assignment // We only apply this optimization if the last statement is a const assignment
let last_statement = self.body.basic_blocks()[location.block].statements.last()?; let last_statement = self.body.basic_blocks[location.block].statements.last()?;
if let (place, Rvalue::Use(Operand::Constant(_const))) = if let (place, Rvalue::Use(Operand::Constant(_const))) =
last_statement.kind.as_assign()? last_statement.kind.as_assign()?
{ {
// We found a constant being assigned to `place`. // We found a constant being assigned to `place`.
// Now check that the target of this Goto switches on this place. // Now check that the target of this Goto switches on this place.
let target_bb = &self.body.basic_blocks()[target]; let target_bb = &self.body.basic_blocks[target];
// The `StorageDead(..)` statement does not affect the functionality of mir. // The `StorageDead(..)` statement does not affect the functionality of mir.
// We can move this part of the statement up to the predecessor. // We can move this part of the statement up to the predecessor.

View File

@ -131,7 +131,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let dummy_body = &Body::new( let dummy_body = &Body::new(
body.source, body.source,
body.basic_blocks().clone(), (*body.basic_blocks).clone(),
body.source_scopes.clone(), body.source_scopes.clone(),
body.local_decls.clone(), body.local_decls.clone(),
Default::default(), Default::default(),

View File

@ -106,7 +106,7 @@ fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
let dummy_body = &Body::new( let dummy_body = &Body::new(
body.source, body.source,
body.basic_blocks().clone(), (*body.basic_blocks).clone(),
body.source_scopes.clone(), body.source_scopes.clone(),
body.local_decls.clone(), body.local_decls.clone(),
Default::default(), Default::default(),
@ -524,7 +524,7 @@ fn const_prop(
impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
fn visit_body(&mut self, body: &Body<'tcx>) { fn visit_body(&mut self, body: &Body<'tcx>) {
for (bb, data) in body.basic_blocks().iter_enumerated() { for (bb, data) in body.basic_blocks.iter_enumerated() {
self.visit_basic_block_data(bb, data); self.visit_basic_block_data(bb, data);
} }
} }

View File

@ -713,7 +713,7 @@ pub fn new(
ShortCircuitPreorder { ShortCircuitPreorder {
body, body,
visited: BitSet::new_empty(body.basic_blocks().len()), visited: BitSet::new_empty(body.basic_blocks.len()),
worklist, worklist,
filtered_successors, filtered_successors,
} }
@ -747,7 +747,7 @@ fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
} }
fn size_hint(&self) -> (usize, Option<usize>) { fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.body.basic_blocks().len() - self.visited.count(); let size = self.body.basic_blocks.len() - self.visited.count();
(size, Some(size)) (size, Some(size))
} }
} }

View File

@ -80,7 +80,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, mir_body: &mut mir::Body<'tcx>) {
return; return;
} }
match mir_body.basic_blocks()[mir::START_BLOCK].terminator().kind { match mir_body.basic_blocks[mir::START_BLOCK].terminator().kind {
TerminatorKind::Unreachable => { TerminatorKind::Unreachable => {
trace!("InstrumentCoverage skipped for unreachable `START_BLOCK`"); trace!("InstrumentCoverage skipped for unreachable `START_BLOCK`");
return; return;

View File

@ -84,7 +84,7 @@ fn update_from_expression_operand(&mut self, operand_id: u32) {
} }
fn visit_body(&mut self, body: &Body<'_>) { fn visit_body(&mut self, body: &Body<'_>) {
for bb_data in body.basic_blocks().iter() { for bb_data in body.basic_blocks.iter() {
for statement in bb_data.statements.iter() { for statement in bb_data.statements.iter() {
if let StatementKind::Coverage(box ref coverage) = statement.kind { if let StatementKind::Coverage(box ref coverage) = statement.kind {
if is_inlined(body, statement) { if is_inlined(body, statement) {
@ -138,7 +138,7 @@ fn coverageinfo<'tcx>(tcx: TyCtxt<'tcx>, instance_def: ty::InstanceDef<'tcx>) ->
fn covered_code_regions<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Vec<&'tcx CodeRegion> { fn covered_code_regions<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Vec<&'tcx CodeRegion> {
let body = mir_body(tcx, def_id); let body = mir_body(tcx, def_id);
body.basic_blocks() body.basic_blocks
.iter() .iter()
.flat_map(|data| { .flat_map(|data| {
data.statements.iter().filter_map(|statement| match statement.kind { data.statements.iter().filter_map(|statement| match statement.kind {

View File

@ -176,7 +176,7 @@ fn debug_basic_blocks<'tcx>(mir_body: &Body<'tcx>) -> String {
format!( format!(
"{:?}", "{:?}",
mir_body mir_body
.basic_blocks() .basic_blocks
.iter_enumerated() .iter_enumerated()
.map(|(bb, data)| { .map(|(bb, data)| {
let term = &data.terminator(); let term = &data.terminator();
@ -213,7 +213,7 @@ fn print_mir_graphviz(name: &str, mir_body: &Body<'_>) {
"digraph {} {{\n{}\n}}", "digraph {} {{\n{}\n}}",
name, name,
mir_body mir_body
.basic_blocks() .basic_blocks
.iter_enumerated() .iter_enumerated()
.map(|(bb, data)| { .map(|(bb, data)| {
format!( format!(
@ -653,7 +653,7 @@ fn test_traverse_coverage_with_loops() {
fn synthesize_body_span_from_terminators(mir_body: &Body<'_>) -> Span { fn synthesize_body_span_from_terminators(mir_body: &Body<'_>) -> Span {
let mut some_span: Option<Span> = None; let mut some_span: Option<Span> = None;
for (_, data) in mir_body.basic_blocks().iter_enumerated() { for (_, data) in mir_body.basic_blocks.iter_enumerated() {
let term_span = data.terminator().source_info.span; let term_span = data.terminator().source_info.span;
if let Some(span) = some_span.as_mut() { if let Some(span) = some_span.as_mut() {
*span = span.to(term_span); *span = span.to(term_span);

View File

@ -58,7 +58,7 @@ fn find_duplicates(body: &Body<'_>) -> FxHashMap<BasicBlock, BasicBlock> {
let mut duplicates = FxHashMap::default(); let mut duplicates = FxHashMap::default();
let bbs_to_go_through = let bbs_to_go_through =
body.basic_blocks().iter_enumerated().filter(|(_, bbd)| !bbd.is_cleanup).count(); body.basic_blocks.iter_enumerated().filter(|(_, bbd)| !bbd.is_cleanup).count();
let mut same_hashes = let mut same_hashes =
FxHashMap::with_capacity_and_hasher(bbs_to_go_through, Default::default()); FxHashMap::with_capacity_and_hasher(bbs_to_go_through, Default::default());
@ -71,8 +71,7 @@ fn find_duplicates(body: &Body<'_>) -> FxHashMap<BasicBlock, BasicBlock> {
// When we see bb1, we see that it is a duplicate of bb3, and therefore insert it in the duplicates list // When we see bb1, we see that it is a duplicate of bb3, and therefore insert it in the duplicates list
// with replacement bb3. // with replacement bb3.
// When the duplicates are removed, we will end up with only bb3. // When the duplicates are removed, we will end up with only bb3.
for (bb, bbd) in body.basic_blocks().iter_enumerated().rev().filter(|(_, bbd)| !bbd.is_cleanup) for (bb, bbd) in body.basic_blocks.iter_enumerated().rev().filter(|(_, bbd)| !bbd.is_cleanup) {
{
// Basic blocks can get really big, so to avoid checking for duplicates in basic blocks // Basic blocks can get really big, so to avoid checking for duplicates in basic blocks
// that are unlikely to have duplicates, we stop early. The early bail number has been // that are unlikely to have duplicates, we stop early. The early bail number has been
// found experimentally by eprintln while compiling the crates in the rustc-perf suite. // found experimentally by eprintln while compiling the crates in the rustc-perf suite.

View File

@ -150,7 +150,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
def_id, def_id,
body.local_decls.len(), body.local_decls.len(),
relevant, relevant,
body.basic_blocks().len() body.basic_blocks.len()
); );
if relevant > MAX_LOCALS { if relevant > MAX_LOCALS {
warn!( warn!(
@ -159,11 +159,11 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
); );
return; return;
} }
if body.basic_blocks().len() > MAX_BLOCKS { if body.basic_blocks.len() > MAX_BLOCKS {
warn!( warn!(
"too many blocks in {:?} ({}, max is {}), not optimizing", "too many blocks in {:?} ({}, max is {}), not optimizing",
def_id, def_id,
body.basic_blocks().len(), body.basic_blocks.len(),
MAX_BLOCKS MAX_BLOCKS
); );
return; return;

View File

@ -104,8 +104,8 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let mut should_cleanup = false; let mut should_cleanup = false;
// Also consider newly generated bbs in the same pass // Also consider newly generated bbs in the same pass
for i in 0..body.basic_blocks().len() { for i in 0..body.basic_blocks.len() {
let bbs = body.basic_blocks(); let bbs = &*body.basic_blocks;
let parent = BasicBlock::from_usize(i); let parent = BasicBlock::from_usize(i);
let Some(opt_data) = evaluate_candidate(tcx, body, parent) else { let Some(opt_data) = evaluate_candidate(tcx, body, parent) else {
continue continue
@ -316,7 +316,7 @@ fn evaluate_candidate<'tcx>(
body: &Body<'tcx>, body: &Body<'tcx>,
parent: BasicBlock, parent: BasicBlock,
) -> Option<OptimizationData<'tcx>> { ) -> Option<OptimizationData<'tcx>> {
let bbs = body.basic_blocks(); let bbs = &body.basic_blocks;
let TerminatorKind::SwitchInt { let TerminatorKind::SwitchInt {
targets, targets,
switch_ty: parent_ty, switch_ty: parent_ty,

View File

@ -89,13 +89,13 @@ fn find_dead_unwinds<'tcx>(
debug!("find_dead_unwinds({:?})", body.span); debug!("find_dead_unwinds({:?})", body.span);
// We only need to do this pass once, because unwind edges can only // We only need to do this pass once, because unwind edges can only
// reach cleanup blocks, which can't have unwind edges themselves. // reach cleanup blocks, which can't have unwind edges themselves.
let mut dead_unwinds = BitSet::new_empty(body.basic_blocks().len()); let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env) let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
.into_engine(tcx, body) .into_engine(tcx, body)
.pass_name("find_dead_unwinds") .pass_name("find_dead_unwinds")
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(body); .into_results_cursor(body);
for (bb, bb_data) in body.basic_blocks().iter_enumerated() { for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
let place = match bb_data.terminator().kind { let place = match bb_data.terminator().kind {
TerminatorKind::Drop { ref place, unwind: Some(_), .. } TerminatorKind::Drop { ref place, unwind: Some(_), .. }
| TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => { | TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => {
@ -303,7 +303,7 @@ fn elaborate(mut self) -> MirPatch<'tcx> {
} }
fn collect_drop_flags(&mut self) { fn collect_drop_flags(&mut self) {
for (bb, data) in self.body.basic_blocks().iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
let terminator = data.terminator(); let terminator = data.terminator();
let place = match terminator.kind { let place = match terminator.kind {
TerminatorKind::Drop { ref place, .. } TerminatorKind::Drop { ref place, .. }
@ -358,7 +358,7 @@ fn collect_drop_flags(&mut self) {
} }
fn elaborate_drops(&mut self) { fn elaborate_drops(&mut self) {
for (bb, data) in self.body.basic_blocks().iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
let loc = Location { block: bb, statement_index: data.statements.len() }; let loc = Location { block: bb, statement_index: data.statements.len() };
let terminator = data.terminator(); let terminator = data.terminator();
@ -515,7 +515,7 @@ fn drop_flags_on_init(&mut self) {
} }
fn drop_flags_for_fn_rets(&mut self) { fn drop_flags_for_fn_rets(&mut self) {
for (bb, data) in self.body.basic_blocks().iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
if let TerminatorKind::Call { if let TerminatorKind::Call {
destination, target: Some(tgt), cleanup: Some(_), .. destination, target: Some(tgt), cleanup: Some(_), ..
} = data.terminator().kind } = data.terminator().kind
@ -550,7 +550,7 @@ fn drop_flags_for_locs(&mut self) {
// drop flags by themselves, to avoid the drop flags being // drop flags by themselves, to avoid the drop flags being
// clobbered before they are read. // clobbered before they are read.
for (bb, data) in self.body.basic_blocks().iter_enumerated() { for (bb, data) in self.body.basic_blocks.iter_enumerated() {
debug!("drop_flags_for_locs({:?})", data); debug!("drop_flags_for_locs({:?})", data);
for i in 0..(data.statements.len() + 1) { for i in 0..(data.statements.len() + 1) {
debug!("drop_flag_for_locs: stmt {}", i); debug!("drop_flag_for_locs: stmt {}", i);

View File

@ -65,7 +65,7 @@ fn has_ffi_unwind_calls(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> bool {
let mut tainted = false; let mut tainted = false;
for block in body.basic_blocks() { for block in body.basic_blocks.iter() {
if block.is_cleanup { if block.is_cleanup {
continue; continue;
} }

View File

@ -490,12 +490,12 @@ fn locals_live_across_suspend_points<'tcx>(
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(body_ref); .into_results_cursor(body_ref);
let mut storage_liveness_map = IndexVec::from_elem(None, body.basic_blocks()); let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
let mut live_locals_at_suspension_points = Vec::new(); let mut live_locals_at_suspension_points = Vec::new();
let mut source_info_at_suspension_points = Vec::new(); let mut source_info_at_suspension_points = Vec::new();
let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len()); let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len());
for (block, data) in body.basic_blocks().iter_enumerated() { for (block, data) in body.basic_blocks.iter_enumerated() {
if let TerminatorKind::Yield { .. } = data.terminator().kind { if let TerminatorKind::Yield { .. } = data.terminator().kind {
let loc = Location { block, statement_index: data.statements.len() }; let loc = Location { block, statement_index: data.statements.len() };
@ -704,7 +704,7 @@ fn visit_terminator_before_primary_effect(
impl StorageConflictVisitor<'_, '_, '_> { impl StorageConflictVisitor<'_, '_, '_> {
fn apply_state(&mut self, flow_state: &BitSet<Local>, loc: Location) { fn apply_state(&mut self, flow_state: &BitSet<Local>, loc: Location) {
// Ignore unreachable blocks. // Ignore unreachable blocks.
if self.body.basic_blocks()[loc.block].terminator().kind == TerminatorKind::Unreachable { if self.body.basic_blocks[loc.block].terminator().kind == TerminatorKind::Unreachable {
return; return;
} }
@ -886,7 +886,7 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, param_env }; let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, param_env };
for (block, block_data) in body.basic_blocks().iter_enumerated() { for (block, block_data) in body.basic_blocks.iter_enumerated() {
let (target, unwind, source_info) = match block_data.terminator() { let (target, unwind, source_info) = match block_data.terminator() {
Terminator { source_info, kind: TerminatorKind::Drop { place, target, unwind } } => { Terminator { source_info, kind: TerminatorKind::Drop { place, target, unwind } } => {
if let Some(local) = place.as_local() { if let Some(local) = place.as_local() {
@ -991,7 +991,7 @@ fn insert_panic_block<'tcx>(
body: &mut Body<'tcx>, body: &mut Body<'tcx>,
message: AssertMessage<'tcx>, message: AssertMessage<'tcx>,
) -> BasicBlock { ) -> BasicBlock {
let assert_block = BasicBlock::new(body.basic_blocks().len()); let assert_block = BasicBlock::new(body.basic_blocks.len());
let term = TerminatorKind::Assert { let term = TerminatorKind::Assert {
cond: Operand::Constant(Box::new(Constant { cond: Operand::Constant(Box::new(Constant {
span: body.span, span: body.span,
@ -1021,7 +1021,7 @@ fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, param_env: ty::ParamEn
} }
// If there's a return terminator the function may return. // If there's a return terminator the function may return.
for block in body.basic_blocks() { for block in body.basic_blocks.iter() {
if let TerminatorKind::Return = block.terminator().kind { if let TerminatorKind::Return = block.terminator().kind {
return true; return true;
} }
@ -1038,7 +1038,7 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
} }
// Unwinds can only start at certain terminators. // Unwinds can only start at certain terminators.
for block in body.basic_blocks() { for block in body.basic_blocks.iter() {
match block.terminator().kind { match block.terminator().kind {
// These never unwind. // These never unwind.
TerminatorKind::Goto { .. } TerminatorKind::Goto { .. }

View File

@ -95,7 +95,7 @@ fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
history: Vec::new(), history: Vec::new(),
changed: false, changed: false,
}; };
let blocks = BasicBlock::new(0)..body.basic_blocks().next_index(); let blocks = BasicBlock::new(0)..body.basic_blocks.next_index();
this.process_blocks(body, blocks); this.process_blocks(body, blocks);
this.changed this.changed
} }
@ -217,9 +217,9 @@ fn try_inlining(
} }
} }
let old_blocks = caller_body.basic_blocks().next_index(); let old_blocks = caller_body.basic_blocks.next_index();
self.inline_call(caller_body, &callsite, callee_body); self.inline_call(caller_body, &callsite, callee_body);
let new_blocks = old_blocks..caller_body.basic_blocks().next_index(); let new_blocks = old_blocks..caller_body.basic_blocks.next_index();
Ok(new_blocks) Ok(new_blocks)
} }
@ -409,14 +409,14 @@ fn check_mir_body(
// Give a bonus functions with a small number of blocks, // Give a bonus functions with a small number of blocks,
// We normally have two or three blocks for even // We normally have two or three blocks for even
// very small functions. // very small functions.
if callee_body.basic_blocks().len() <= 3 { if callee_body.basic_blocks.len() <= 3 {
threshold += threshold / 4; threshold += threshold / 4;
} }
debug!(" final inline threshold = {}", threshold); debug!(" final inline threshold = {}", threshold);
// FIXME: Give a bonus to functions with only a single caller // FIXME: Give a bonus to functions with only a single caller
let diverges = matches!( let diverges = matches!(
callee_body.basic_blocks()[START_BLOCK].terminator().kind, callee_body.basic_blocks[START_BLOCK].terminator().kind,
TerminatorKind::Unreachable | TerminatorKind::Call { target: None, .. } TerminatorKind::Unreachable | TerminatorKind::Call { target: None, .. }
); );
if diverges && !matches!(callee_attrs.inline, InlineAttr::Always) { if diverges && !matches!(callee_attrs.inline, InlineAttr::Always) {
@ -434,13 +434,13 @@ fn check_mir_body(
// Traverse the MIR manually so we can account for the effects of inlining on the CFG. // Traverse the MIR manually so we can account for the effects of inlining on the CFG.
let mut work_list = vec![START_BLOCK]; let mut work_list = vec![START_BLOCK];
let mut visited = BitSet::new_empty(callee_body.basic_blocks().len()); let mut visited = BitSet::new_empty(callee_body.basic_blocks.len());
while let Some(bb) = work_list.pop() { while let Some(bb) = work_list.pop() {
if !visited.insert(bb.index()) { if !visited.insert(bb.index()) {
continue; continue;
} }
let blk = &callee_body.basic_blocks()[bb]; let blk = &callee_body.basic_blocks[bb];
checker.visit_basic_block_data(bb, blk); checker.visit_basic_block_data(bb, blk);
let term = blk.terminator(); let term = blk.terminator();
@ -541,7 +541,7 @@ fn dest_needs_borrow(place: Place<'_>) -> bool {
args: &args, args: &args,
new_locals: Local::new(caller_body.local_decls.len()).., new_locals: Local::new(caller_body.local_decls.len())..,
new_scopes: SourceScope::new(caller_body.source_scopes.len()).., new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
new_blocks: BasicBlock::new(caller_body.basic_blocks().len()).., new_blocks: BasicBlock::new(caller_body.basic_blocks.len())..,
destination: dest, destination: dest,
callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(), callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(),
callsite, callsite,

View File

@ -153,7 +153,7 @@ pub(crate) fn mir_inliner_callees<'tcx>(
_ => tcx.instance_mir(instance), _ => tcx.instance_mir(instance),
}; };
let mut calls = FxIndexSet::default(); let mut calls = FxIndexSet::default();
for bb_data in body.basic_blocks() { for bb_data in body.basic_blocks.iter() {
let terminator = bb_data.terminator(); let terminator = bb_data.terminator();
if let TerminatorKind::Call { func, .. } = &terminator.kind { if let TerminatorKind::Call { func, .. } = &terminator.kind {
let ty = func.ty(&body.local_decls, tcx); let ty = func.ty(&body.local_decls, tcx);

View File

@ -15,7 +15,7 @@ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
// find basic blocks with no statement and a return terminator // find basic blocks with no statement and a return terminator
let mut bbs_simple_returns = BitSet::new_empty(body.basic_blocks().len()); let mut bbs_simple_returns = BitSet::new_empty(body.basic_blocks.len());
let def_id = body.source.def_id(); let def_id = body.source.def_id();
let bbs = body.basic_blocks_mut(); let bbs = body.basic_blocks_mut();
for idx in bbs.indices() { for idx in bbs.indices() {

View File

@ -21,10 +21,10 @@ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
// early returns for edge cases of highly unrolled functions // early returns for edge cases of highly unrolled functions
if body.basic_blocks().len() > MAX_NUM_BLOCKS { if body.basic_blocks.len() > MAX_NUM_BLOCKS {
return; return;
} }
if body.local_decls().len() > MAX_NUM_LOCALS { if body.local_decls.len() > MAX_NUM_LOCALS {
return; return;
} }
normalize_array_len_calls(tcx, body) normalize_array_len_calls(tcx, body)

View File

@ -89,7 +89,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> {
} }
let mut copied_to_return_place = None; let mut copied_to_return_place = None;
for block in body.basic_blocks().indices() { for block in body.basic_blocks.indices() {
// Look for blocks with a `Return` terminator. // Look for blocks with a `Return` terminator.
if !matches!(body[block].terminator().kind, mir::TerminatorKind::Return) { if !matches!(body[block].terminator().kind, mir::TerminatorKind::Return) {
continue; continue;
@ -122,7 +122,7 @@ fn find_local_assigned_to_return_place(
body: &mut mir::Body<'_>, body: &mut mir::Body<'_>,
) -> Option<Local> { ) -> Option<Local> {
let mut block = start; let mut block = start;
let mut seen = HybridBitSet::new_empty(body.basic_blocks().len()); let mut seen = HybridBitSet::new_empty(body.basic_blocks.len());
// Iterate as long as `block` has exactly one predecessor that we have not yet visited. // Iterate as long as `block` has exactly one predecessor that we have not yet visited.
while seen.insert(block) { while seen.insert(block) {

View File

@ -94,7 +94,7 @@ fn remove_nop_landing_pads(&self, body: &mut Body<'_>) {
let mut jumps_folded = 0; let mut jumps_folded = 0;
let mut landing_pads_removed = 0; let mut landing_pads_removed = 0;
let mut nop_landing_pads = BitSet::new_empty(body.basic_blocks().len()); let mut nop_landing_pads = BitSet::new_empty(body.basic_blocks.len());
// This is a post-order traversal, so that if A post-dominates B // This is a post-order traversal, so that if A post-dominates B
// then A will be visited before B. // then A will be visited before B.

View File

@ -35,7 +35,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
.into_results_cursor(body); .into_results_cursor(body);
let mut to_remove = vec![]; let mut to_remove = vec![];
for (bb, block) in body.basic_blocks().iter_enumerated() { for (bb, block) in body.basic_blocks.iter_enumerated() {
let terminator = block.terminator(); let terminator = block.terminator();
let (TerminatorKind::Drop { place, .. } | TerminatorKind::DropAndReplace { place, .. }) let (TerminatorKind::Drop { place, .. } | TerminatorKind::DropAndReplace { place, .. })
= &terminator.kind = &terminator.kind

View File

@ -62,7 +62,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
pub fn separate_const_switch(body: &mut Body<'_>) -> usize { pub fn separate_const_switch(body: &mut Body<'_>) -> usize {
let mut new_blocks: SmallVec<[(BasicBlock, BasicBlock); 6]> = SmallVec::new(); let mut new_blocks: SmallVec<[(BasicBlock, BasicBlock); 6]> = SmallVec::new();
let predecessors = body.basic_blocks.predecessors(); let predecessors = body.basic_blocks.predecessors();
'block_iter: for (block_id, block) in body.basic_blocks().iter_enumerated() { 'block_iter: for (block_id, block) in body.basic_blocks.iter_enumerated() {
if let TerminatorKind::SwitchInt { if let TerminatorKind::SwitchInt {
discr: Operand::Copy(switch_place) | Operand::Move(switch_place), discr: Operand::Copy(switch_place) | Operand::Move(switch_place),
.. ..
@ -90,7 +90,7 @@ pub fn separate_const_switch(body: &mut Body<'_>) -> usize {
let mut predecessors_left = predecessors[block_id].len(); let mut predecessors_left = predecessors[block_id].len();
'predec_iter: for predecessor_id in predecessors[block_id].iter().copied() { 'predec_iter: for predecessor_id in predecessors[block_id].iter().copied() {
let predecessor = &body.basic_blocks()[predecessor_id]; let predecessor = &body.basic_blocks[predecessor_id];
// First we make sure the predecessor jumps // First we make sure the predecessor jumps
// in a reasonable way // in a reasonable way

View File

@ -74,7 +74,7 @@ pub struct CfgSimplifier<'a, 'tcx> {
impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
pub fn new(body: &'a mut Body<'tcx>) -> Self { pub fn new(body: &'a mut Body<'tcx>) -> Self {
let mut pred_count = IndexVec::from_elem(0u32, body.basic_blocks()); let mut pred_count = IndexVec::from_elem(0u32, &body.basic_blocks);
// we can't use mir.predecessors() here because that counts // we can't use mir.predecessors() here because that counts
// dead blocks, which we don't want to. // dead blocks, which we don't want to.
@ -263,7 +263,7 @@ fn strip_nops(&mut self) {
pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let reachable = traversal::reachable_as_bitset(body); let reachable = traversal::reachable_as_bitset(body);
let num_blocks = body.basic_blocks().len(); let num_blocks = body.basic_blocks.len();
if num_blocks == reachable.count() { if num_blocks == reachable.count() {
return; return;
} }

View File

@ -151,7 +151,7 @@ struct OptimizationFinder<'a, 'tcx> {
impl<'tcx> OptimizationFinder<'_, 'tcx> { impl<'tcx> OptimizationFinder<'_, 'tcx> {
fn find_optimizations(&self) -> Vec<OptimizationInfo<'tcx>> { fn find_optimizations(&self) -> Vec<OptimizationInfo<'tcx>> {
self.body self.body
.basic_blocks() .basic_blocks
.iter_enumerated() .iter_enumerated()
.filter_map(|(bb_idx, bb)| { .filter_map(|(bb_idx, bb)| {
// find switch // find switch

View File

@ -596,7 +596,7 @@ struct SimplifyBranchSameOptimizationFinder<'a, 'tcx> {
impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> { impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> {
fn find(&self) -> Vec<SimplifyBranchSameOptimization> { fn find(&self) -> Vec<SimplifyBranchSameOptimization> {
self.body self.body
.basic_blocks() .basic_blocks
.iter_enumerated() .iter_enumerated()
.filter_map(|(bb_idx, bb)| { .filter_map(|(bb_idx, bb)| {
let (discr_switched_on, targets_and_values) = match &bb.terminator().kind { let (discr_switched_on, targets_and_values) = match &bb.terminator().kind {
@ -632,7 +632,7 @@ fn find(&self) -> Vec<SimplifyBranchSameOptimization> {
let mut iter_bbs_reachable = targets_and_values let mut iter_bbs_reachable = targets_and_values
.iter() .iter()
.map(|target_and_value| (target_and_value, &self.body.basic_blocks()[target_and_value.target])) .map(|target_and_value| (target_and_value, &self.body.basic_blocks[target_and_value.target]))
.filter(|(_, bb)| { .filter(|(_, bb)| {
// Reaching `unreachable` is UB so assume it doesn't happen. // Reaching `unreachable` is UB so assume it doesn't happen.
bb.terminator().kind != TerminatorKind::Unreachable bb.terminator().kind != TerminatorKind::Unreachable

View File

@ -79,7 +79,7 @@ fn ensure_otherwise_unreachable<'tcx>(
targets: &SwitchTargets, targets: &SwitchTargets,
) -> Option<BasicBlockData<'tcx>> { ) -> Option<BasicBlockData<'tcx>> {
let otherwise = targets.otherwise(); let otherwise = targets.otherwise();
let bb = &body.basic_blocks()[otherwise]; let bb = &body.basic_blocks[otherwise];
if bb.terminator().kind == TerminatorKind::Unreachable if bb.terminator().kind == TerminatorKind::Unreachable
&& bb.statements.iter().all(|s| matches!(&s.kind, StatementKind::StorageDead(_))) && bb.statements.iter().all(|s| matches!(&s.kind, StatementKind::StorageDead(_)))
{ {
@ -102,10 +102,10 @@ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
trace!("UninhabitedEnumBranching starting for {:?}", body.source); trace!("UninhabitedEnumBranching starting for {:?}", body.source);
for bb in body.basic_blocks().indices() { for bb in body.basic_blocks.indices() {
trace!("processing block {:?}", bb); trace!("processing block {:?}", bb);
let Some(discriminant_ty) = get_switched_on_type(&body.basic_blocks()[bb], tcx, body) else { let Some(discriminant_ty) = get_switched_on_type(&body.basic_blocks[bb], tcx, body) else {
continue; continue;
}; };

View File

@ -481,7 +481,7 @@ fn codegened_and_inlined_items<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> &'tcx DefIdSe
continue; continue;
} }
let body = tcx.instance_mir(instance.def); let body = tcx.instance_mir(instance.def);
for block in body.basic_blocks() { for block in body.basic_blocks.iter() {
for statement in &block.statements { for statement in &block.statements {
let mir::StatementKind::Coverage(_) = statement.kind else { continue }; let mir::StatementKind::Coverage(_) = statement.kind else { continue };
let scope = statement.source_info.scope; let scope = statement.source_info.scope;

View File

@ -348,7 +348,7 @@ fn instance_def_size_estimate<'tcx>(
match instance_def { match instance_def {
InstanceDef::Item(..) | InstanceDef::DropGlue(..) => { InstanceDef::Item(..) | InstanceDef::DropGlue(..) => {
let mir = tcx.instance_mir(instance_def); let mir = tcx.instance_mir(instance_def);
mir.basic_blocks().iter().map(|bb| bb.statements.len() + 1).sum() mir.basic_blocks.iter().map(|bb| bb.statements.len() + 1).sum()
} }
// Estimate the size of other compiler-generated shims to be 1. // Estimate the size of other compiler-generated shims to be 1.
_ => 1, _ => 1,

View File

@ -105,7 +105,7 @@ fn check_fn(
vis.into_map(cx, maybe_storage_live_result) vis.into_map(cx, maybe_storage_live_result)
}; };
for (bb, bbdata) in mir.basic_blocks().iter_enumerated() { for (bb, bbdata) in mir.basic_blocks.iter_enumerated() {
let terminator = bbdata.terminator(); let terminator = bbdata.terminator();
if terminator.source_info.span.from_expansion() { if terminator.source_info.span.from_expansion() {
@ -186,7 +186,7 @@ fn check_fn(
unwrap_or_continue!(find_stmt_assigns_to(cx, mir, pred_arg, true, ps[0])); unwrap_or_continue!(find_stmt_assigns_to(cx, mir, pred_arg, true, ps[0]));
let loc = mir::Location { let loc = mir::Location {
block: bb, block: bb,
statement_index: mir.basic_blocks()[bb].statements.len(), statement_index: mir.basic_blocks[bb].statements.len(),
}; };
// This can be turned into `res = move local` if `arg` and `cloned` are not borrowed // This can be turned into `res = move local` if `arg` and `cloned` are not borrowed
@ -310,7 +310,7 @@ fn find_stmt_assigns_to<'tcx>(
by_ref: bool, by_ref: bool,
bb: mir::BasicBlock, bb: mir::BasicBlock,
) -> Option<(mir::Local, CannotMoveOut)> { ) -> Option<(mir::Local, CannotMoveOut)> {
let rvalue = mir.basic_blocks()[bb].statements.iter().rev().find_map(|stmt| { let rvalue = mir.basic_blocks[bb].statements.iter().rev().find_map(|stmt| {
if let mir::StatementKind::Assign(box (mir::Place { local, .. }, v)) = &stmt.kind { if let mir::StatementKind::Assign(box (mir::Place { local, .. }, v)) = &stmt.kind {
return if *local == to_local { Some(v) } else { None }; return if *local == to_local { Some(v) } else { None };
} }

View File

@ -55,7 +55,7 @@ pub fn is_min_const_fn<'a, 'tcx>(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, msrv:
body.local_decls.iter().next().unwrap().source_info.span, body.local_decls.iter().next().unwrap().source_info.span,
)?; )?;
for bb in body.basic_blocks() { for bb in body.basic_blocks.iter() {
check_terminator(tcx, body, bb.terminator(), msrv)?; check_terminator(tcx, body, bb.terminator(), msrv)?;
for stmt in &bb.statements { for stmt in &bb.statements {
check_statement(tcx, body, def_id, stmt)?; check_statement(tcx, body, def_id, stmt)?;