Auto merge of #43576 - arielb1:no-unneeded-unwind, r=eddyb
rustc_mir: don't build unused unwind cleanup blocks When building a scope exit, don't build unwind cleanup blocks unless they will actually be used by the unwind path of a drop - the unused blocks are removed by SimplifyCfg, but they can cause a significant performance slowdown before they are removed. That fixes #43511. Also a few other small MIR cleanups & optimizations. r? @eddyb
This commit is contained in:
commit
640cfc852a
@ -14,7 +14,6 @@ use ty::subst::Substs;
|
||||
use ty::{ClosureSubsts, Region, Ty};
|
||||
use mir::*;
|
||||
use rustc_const_math::ConstUsize;
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use syntax_pos::Span;
|
||||
|
||||
// # The MIR Visitor
|
||||
@ -260,9 +259,15 @@ macro_rules! make_mir_visitor {
|
||||
|
||||
fn super_mir(&mut self,
|
||||
mir: & $($mutability)* Mir<'tcx>) {
|
||||
for index in 0..mir.basic_blocks().len() {
|
||||
let block = BasicBlock::new(index);
|
||||
self.visit_basic_block_data(block, &$($mutability)* mir[block]);
|
||||
// for best performance, we want to use an iterator rather
|
||||
// than a for-loop, to avoid calling Mir::invalidate for
|
||||
// each basic block.
|
||||
macro_rules! basic_blocks {
|
||||
(mut) => (mir.basic_blocks_mut().iter_enumerated_mut());
|
||||
() => (mir.basic_blocks().iter_enumerated());
|
||||
};
|
||||
for (bb, data) in basic_blocks!($($mutability)*) {
|
||||
self.visit_basic_block_data(bb, data);
|
||||
}
|
||||
|
||||
for scope in &$($mutability)* mir.visibility_scopes {
|
||||
|
@ -86,7 +86,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
let tcx = this.hir.tcx();
|
||||
|
||||
// Enter the remainder scope, i.e. the bindings' destruction scope.
|
||||
this.push_scope(remainder_scope);
|
||||
this.push_scope((remainder_scope, source_info));
|
||||
let_extent_stack.push(remainder_scope);
|
||||
|
||||
// Declare the bindings, which may create a visibility scope.
|
||||
|
@ -237,7 +237,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
.collect();
|
||||
|
||||
let success = this.cfg.start_new_block();
|
||||
let cleanup = this.diverge_cleanup(expr_span);
|
||||
let cleanup = this.diverge_cleanup();
|
||||
this.cfg.terminate(block, source_info, TerminatorKind::Call {
|
||||
func: fun,
|
||||
args: args,
|
||||
|
@ -306,7 +306,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
let bool_ty = self.hir.bool_ty();
|
||||
let eq_result = self.temp(bool_ty, test.span);
|
||||
let eq_block = self.cfg.start_new_block();
|
||||
let cleanup = self.diverge_cleanup(test.span);
|
||||
let cleanup = self.diverge_cleanup();
|
||||
self.cfg.terminate(block, source_info, TerminatorKind::Call {
|
||||
func: Operand::Constant(box Constant {
|
||||
span: test.span,
|
||||
|
@ -107,6 +107,9 @@ pub struct Scope<'tcx> {
|
||||
/// the extent of this scope within source code.
|
||||
extent: CodeExtent,
|
||||
|
||||
/// the span of that extent
|
||||
extent_span: Span,
|
||||
|
||||
/// Whether there's anything to do for the cleanup path, that is,
|
||||
/// when unwinding through this scope. This includes destructors,
|
||||
/// but not StorageDead statements, which don't get emitted at all
|
||||
@ -116,7 +119,7 @@ pub struct Scope<'tcx> {
|
||||
/// * pollutting the cleanup MIR with StorageDead creates
|
||||
/// landing pads even though there's no actual destructors
|
||||
/// * freeing up stack space has no effect during unwinding
|
||||
pub(super) needs_cleanup: bool,
|
||||
needs_cleanup: bool,
|
||||
|
||||
/// set of lvalues to drop when exiting this scope. This starts
|
||||
/// out empty but grows as variables are declared during the
|
||||
@ -197,6 +200,15 @@ pub struct BreakableScope<'tcx> {
|
||||
pub break_destination: Lvalue<'tcx>,
|
||||
}
|
||||
|
||||
impl DropKind {
|
||||
fn may_panic(&self) -> bool {
|
||||
match *self {
|
||||
DropKind::Value { .. } => true,
|
||||
DropKind::Storage => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Scope<'tcx> {
|
||||
/// Invalidate all the cached blocks in the scope.
|
||||
///
|
||||
@ -282,7 +294,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
|
||||
{
|
||||
debug!("in_opt_scope(opt_extent={:?}, block={:?})", opt_extent, block);
|
||||
if let Some(extent) = opt_extent { self.push_scope(extent.0); }
|
||||
if let Some(extent) = opt_extent { self.push_scope(extent); }
|
||||
let rv = unpack!(block = f(self));
|
||||
if let Some(extent) = opt_extent {
|
||||
unpack!(block = self.pop_scope(extent, block));
|
||||
@ -301,7 +313,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
|
||||
{
|
||||
debug!("in_scope(extent={:?}, block={:?})", extent, block);
|
||||
self.push_scope(extent.0);
|
||||
self.push_scope(extent);
|
||||
let rv = unpack!(block = f(self));
|
||||
unpack!(block = self.pop_scope(extent, block));
|
||||
debug!("in_scope: exiting extent={:?} block={:?}", extent, block);
|
||||
@ -312,12 +324,13 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
/// scope and call `pop_scope` afterwards. Note that these two
|
||||
/// calls must be paired; using `in_scope` as a convenience
|
||||
/// wrapper maybe preferable.
|
||||
pub fn push_scope(&mut self, extent: CodeExtent) {
|
||||
pub fn push_scope(&mut self, extent: (CodeExtent, SourceInfo)) {
|
||||
debug!("push_scope({:?})", extent);
|
||||
let vis_scope = self.visibility_scope;
|
||||
self.scopes.push(Scope {
|
||||
visibility_scope: vis_scope,
|
||||
extent: extent,
|
||||
extent: extent.0,
|
||||
extent_span: extent.1.span,
|
||||
needs_cleanup: false,
|
||||
drops: vec![],
|
||||
free: None,
|
||||
@ -333,9 +346,13 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
mut block: BasicBlock)
|
||||
-> BlockAnd<()> {
|
||||
debug!("pop_scope({:?}, {:?})", extent, block);
|
||||
// We need to have `cached_block`s available for all the drops, so we call diverge_cleanup
|
||||
// to make sure all the `cached_block`s are filled in.
|
||||
self.diverge_cleanup(extent.1.span);
|
||||
// If we are emitting a `drop` statement, we need to have the cached
|
||||
// diverge cleanup pads ready in case that drop panics.
|
||||
let may_panic =
|
||||
self.scopes.last().unwrap().drops.iter().any(|s| s.kind.may_panic());
|
||||
if may_panic {
|
||||
self.diverge_cleanup();
|
||||
}
|
||||
let scope = self.scopes.pop().unwrap();
|
||||
assert_eq!(scope.extent, extent.0);
|
||||
unpack!(block = build_scope_drops(&mut self.cfg,
|
||||
@ -366,6 +383,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
let len = self.scopes.len();
|
||||
assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes");
|
||||
let tmp = self.get_unit_temp();
|
||||
|
||||
// If we are emitting a `drop` statement, we need to have the cached
|
||||
// diverge cleanup pads ready in case that drop panics.
|
||||
let may_panic = self.scopes[(len - scope_count)..].iter()
|
||||
.any(|s| s.drops.iter().any(|s| s.kind.may_panic()));
|
||||
if may_panic {
|
||||
self.diverge_cleanup();
|
||||
}
|
||||
|
||||
{
|
||||
let mut rest = &mut self.scopes[(len - scope_count)..];
|
||||
while let Some((scope, rest_)) = {rest}.split_last_mut() {
|
||||
@ -618,7 +644,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
/// This path terminates in Resume. Returns the start of the path.
|
||||
/// See module comment for more details. None indicates there’s no
|
||||
/// cleanup to do at this point.
|
||||
pub fn diverge_cleanup(&mut self, span: Span) -> Option<BasicBlock> {
|
||||
pub fn diverge_cleanup(&mut self) -> Option<BasicBlock> {
|
||||
if !self.scopes.iter().any(|scope| scope.needs_cleanup) {
|
||||
return None;
|
||||
}
|
||||
@ -652,7 +678,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
};
|
||||
|
||||
for scope in scopes.iter_mut() {
|
||||
target = build_diverge_scope(hir.tcx(), cfg, &unit_temp, span, scope, target);
|
||||
target = build_diverge_scope(
|
||||
hir.tcx(), cfg, &unit_temp, scope.extent_span, scope, target);
|
||||
}
|
||||
Some(target)
|
||||
}
|
||||
@ -668,7 +695,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
let source_info = self.source_info(span);
|
||||
let next_target = self.cfg.start_new_block();
|
||||
let diverge_target = self.diverge_cleanup(span);
|
||||
let diverge_target = self.diverge_cleanup();
|
||||
self.cfg.terminate(block, source_info,
|
||||
TerminatorKind::Drop {
|
||||
location: location,
|
||||
@ -686,7 +713,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
value: Operand<'tcx>) -> BlockAnd<()> {
|
||||
let source_info = self.source_info(span);
|
||||
let next_target = self.cfg.start_new_block();
|
||||
let diverge_target = self.diverge_cleanup(span);
|
||||
let diverge_target = self.diverge_cleanup();
|
||||
self.cfg.terminate(block, source_info,
|
||||
TerminatorKind::DropAndReplace {
|
||||
location: location,
|
||||
@ -709,7 +736,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
let source_info = self.source_info(span);
|
||||
|
||||
let success_block = self.cfg.start_new_block();
|
||||
let cleanup = self.diverge_cleanup(span);
|
||||
let cleanup = self.diverge_cleanup();
|
||||
|
||||
self.cfg.terminate(block, source_info,
|
||||
TerminatorKind::Assert {
|
||||
@ -731,45 +758,48 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>,
|
||||
mut block: BasicBlock,
|
||||
arg_count: usize)
|
||||
-> BlockAnd<()> {
|
||||
debug!("build_scope_drops({:?} -> {:?})", block, scope);
|
||||
let mut iter = scope.drops.iter().rev().peekable();
|
||||
while let Some(drop_data) = iter.next() {
|
||||
let source_info = scope.source_info(drop_data.span);
|
||||
if let DropKind::Value { .. } = drop_data.kind {
|
||||
// Try to find the next block with its cached block
|
||||
// for us to diverge into in case the drop panics.
|
||||
let on_diverge = iter.peek().iter().filter_map(|dd| {
|
||||
match dd.kind {
|
||||
DropKind::Value { cached_block } => cached_block,
|
||||
DropKind::Storage => None
|
||||
}
|
||||
}).next();
|
||||
// If there’s no `cached_block`s within current scope,
|
||||
// we must look for one in the enclosing scope.
|
||||
let on_diverge = on_diverge.or_else(||{
|
||||
earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
|
||||
});
|
||||
let next = cfg.start_new_block();
|
||||
cfg.terminate(block, source_info, TerminatorKind::Drop {
|
||||
location: drop_data.location.clone(),
|
||||
target: next,
|
||||
unwind: on_diverge
|
||||
});
|
||||
block = next;
|
||||
}
|
||||
match drop_data.kind {
|
||||
DropKind::Value { .. } |
|
||||
DropKind::Storage => {
|
||||
// Only temps and vars need their storage dead.
|
||||
match drop_data.location {
|
||||
Lvalue::Local(index) if index.index() > arg_count => {}
|
||||
_ => continue
|
||||
}
|
||||
DropKind::Value { .. } => {
|
||||
// Try to find the next block with its cached block
|
||||
// for us to diverge into in case the drop panics.
|
||||
let on_diverge = iter.peek().iter().filter_map(|dd| {
|
||||
match dd.kind {
|
||||
DropKind::Value { cached_block: None } =>
|
||||
span_bug!(drop_data.span, "cached block not present?"),
|
||||
DropKind::Value { cached_block } => cached_block,
|
||||
DropKind::Storage => None
|
||||
}
|
||||
}).next();
|
||||
// If there’s no `cached_block`s within current scope,
|
||||
// we must look for one in the enclosing scope.
|
||||
let on_diverge = on_diverge.or_else(|| {
|
||||
earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
|
||||
});
|
||||
let next = cfg.start_new_block();
|
||||
cfg.terminate(block, source_info, TerminatorKind::Drop {
|
||||
location: drop_data.location.clone(),
|
||||
target: next,
|
||||
unwind: on_diverge
|
||||
});
|
||||
block = next;
|
||||
}
|
||||
DropKind::Storage => {}
|
||||
}
|
||||
|
||||
// Drop the storage for both value and storage drops.
|
||||
// Only temps and vars need their storage dead.
|
||||
match drop_data.location {
|
||||
Lvalue::Local(index) if index.index() > arg_count => {
|
||||
cfg.push(block, Statement {
|
||||
source_info: source_info,
|
||||
kind: StatementKind::StorageDead(drop_data.location.clone())
|
||||
});
|
||||
}
|
||||
_ => continue
|
||||
}
|
||||
}
|
||||
block.unit()
|
||||
|
@ -105,6 +105,8 @@ impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn simplify(mut self) {
|
||||
self.strip_nops();
|
||||
|
||||
loop {
|
||||
let mut changed = false;
|
||||
|
||||
@ -141,8 +143,6 @@ impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> {
|
||||
|
||||
if !changed { break }
|
||||
}
|
||||
|
||||
self.strip_nops()
|
||||
}
|
||||
|
||||
// Collapse a goto chain starting from `start`
|
||||
|
@ -47,42 +47,36 @@ fn main() {
|
||||
// StorageDead(_3);
|
||||
// StorageLive(_4);
|
||||
// _4 = std::option::Option<std::boxed::Box<u32>>::None;
|
||||
// StorageLive(_5);
|
||||
// StorageLive(_6);
|
||||
// StorageLive(_7);
|
||||
// _7 = _4;
|
||||
// replace(_6 <- _7) -> [return: bb6, unwind: bb7];
|
||||
// _6 = _4;
|
||||
// replace(_5 <- _6) -> [return: bb1, unwind: bb5];
|
||||
// }
|
||||
// bb1: {
|
||||
// resume;
|
||||
// drop(_6) -> [return: bb6, unwind: bb4];
|
||||
// }
|
||||
// bb2: {
|
||||
// drop(_4) -> bb1;
|
||||
// resume;
|
||||
// }
|
||||
// bb3: {
|
||||
// goto -> bb2;
|
||||
// drop(_4) -> bb2;
|
||||
// }
|
||||
// bb4: {
|
||||
// drop(_6) -> bb3;
|
||||
// drop(_5) -> bb3;
|
||||
// }
|
||||
// bb5: {
|
||||
// goto -> bb4;
|
||||
// drop(_6) -> bb4;
|
||||
// }
|
||||
// bb6: {
|
||||
// drop(_7) -> [return: bb8, unwind: bb4];
|
||||
// StorageDead(_6);
|
||||
// _0 = ();
|
||||
// drop(_5) -> [return: bb7, unwind: bb3];
|
||||
// }
|
||||
// bb7: {
|
||||
// drop(_7) -> bb5;
|
||||
// StorageDead(_5);
|
||||
// drop(_4) -> bb8;
|
||||
// }
|
||||
// bb8: {
|
||||
// StorageDead(_7);
|
||||
// _0 = ();
|
||||
// drop(_6) -> [return: bb9, unwind: bb2];
|
||||
// }
|
||||
// bb9: {
|
||||
// StorageDead(_6);
|
||||
// drop(_4) -> bb10;
|
||||
// }
|
||||
// bb10: {
|
||||
// StorageDead(_4);
|
||||
// StorageDead(_2);
|
||||
// StorageDead(_1);
|
||||
|
@ -32,41 +32,41 @@ fn foo(i: i32) {
|
||||
// START rustc.node4.SimplifyCfg-qualify-consts.after.mir
|
||||
// let mut _0: ();
|
||||
// let _1: D;
|
||||
// let _3: i32;
|
||||
// let _4: &'6_2rce i32;
|
||||
// let _2: i32;
|
||||
// let _3: &'6_2rce i32;
|
||||
// let _7: &'6_4rce i32;
|
||||
// let mut _5: ();
|
||||
// let mut _6: i32;
|
||||
//
|
||||
// let mut _4: ();
|
||||
// let mut _5: i32;
|
||||
// let mut _6: ();
|
||||
// bb0: {
|
||||
// StorageLive(_1);
|
||||
// _1 = D::{{constructor}}(const 0i32,);
|
||||
// StorageLive(_2);
|
||||
// _2 = const 0i32;
|
||||
// StorageLive(_3);
|
||||
// _3 = const 0i32;
|
||||
// StorageLive(_4);
|
||||
// _4 = &'6_2rce _3;
|
||||
// StorageLive(_6);
|
||||
// _6 = (*_4);
|
||||
// _5 = const foo(_6) -> [return: bb2, unwind: bb3];
|
||||
// _3 = &'6_2rce _2;
|
||||
// StorageLive(_5);
|
||||
// _5 = (*_3);
|
||||
// _4 = const foo(_5) -> [return: bb1, unwind: bb3];
|
||||
// }
|
||||
// bb1: {
|
||||
// resume;
|
||||
// }
|
||||
// bb2: {
|
||||
// StorageDead(_6);
|
||||
// StorageDead(_5);
|
||||
// StorageLive(_7);
|
||||
// _7 = &'6_4rce _3;
|
||||
// _7 = &'6_4rce _2;
|
||||
// _0 = ();
|
||||
// StorageDead(_7);
|
||||
// EndRegion('6_4rce);
|
||||
// StorageDead(_4);
|
||||
// EndRegion('6_2rce);
|
||||
// StorageDead(_3);
|
||||
// EndRegion('6_2rce);
|
||||
// StorageDead(_2);
|
||||
// drop(_1) -> bb4;
|
||||
// }
|
||||
// bb2: {
|
||||
// resume;
|
||||
// }
|
||||
// bb3: {
|
||||
// EndRegion('6_2rce);
|
||||
// drop(_1) -> bb1;
|
||||
// drop(_1) -> bb2;
|
||||
// }
|
||||
// bb4: {
|
||||
// StorageDead(_1);
|
||||
|
@ -31,32 +31,31 @@ fn foo<F>(f: F) where F: FnOnce() -> i32 {
|
||||
// let mut _0: ();
|
||||
// let _1: D;
|
||||
// let mut _2: ();
|
||||
// let mut _3: ();
|
||||
// let mut _4: [closure@NodeId(18) d: &'19mce D];
|
||||
// let mut _5: &'19mce D;
|
||||
//
|
||||
// let mut _3: [closure@NodeId(18) d:&'19mce D];
|
||||
// let mut _4: &'19mce D;
|
||||
// let mut _5: ();
|
||||
// bb0: {
|
||||
// StorageLive(_1);
|
||||
// _1 = D::{{constructor}}(const 0i32,);
|
||||
// StorageLive(_3);
|
||||
// StorageLive(_4);
|
||||
// StorageLive(_5);
|
||||
// _5 = &'19mce _1;
|
||||
// _4 = [closure@NodeId(18)] { d: _5 };
|
||||
// StorageDead(_5);
|
||||
// _3 = const foo(_4) -> [return: bb2, unwind: bb3];
|
||||
// _4 = &'19mce _1;
|
||||
// _3 = [closure@NodeId(18)] { d: _4 };
|
||||
// StorageDead(_4);
|
||||
// _2 = const foo(_3) -> [return: bb1, unwind: bb3];
|
||||
// }
|
||||
// bb1: {
|
||||
// resume;
|
||||
// }
|
||||
// bb2: {
|
||||
// StorageDead(_4);
|
||||
// StorageDead(_3);
|
||||
// EndRegion('19mce);
|
||||
// _0 = ();
|
||||
// drop(_1) -> bb4;
|
||||
// }
|
||||
// bb2: {
|
||||
// resume;
|
||||
// }
|
||||
// bb3: {
|
||||
// EndRegion('19mce);
|
||||
// drop(_1) -> bb1;
|
||||
// drop(_1) -> bb2;
|
||||
// }
|
||||
// bb4: {
|
||||
// StorageDead(_1);
|
||||
|
@ -27,35 +27,35 @@ fn foo<F>(f: F) where F: FnOnce() -> i32 {
|
||||
|
||||
// END RUST SOURCE
|
||||
// START rustc.node4.SimplifyCfg-qualify-consts.after.mir
|
||||
// fn main() -> () {
|
||||
// let mut _0: ();
|
||||
// let _1: D;
|
||||
// let mut _2: ();
|
||||
// let mut _3: ();
|
||||
// let mut _4: [closure@NodeId(22) d:&'23mce D];
|
||||
// let mut _5: &'23mce D;
|
||||
//
|
||||
// let mut _3: [closure@NodeId(22) d:&'23mce D];
|
||||
// let mut _4: &'23mce D;
|
||||
// let mut _5: ();
|
||||
// bb0: {
|
||||
// StorageLive(_1);
|
||||
// _1 = D::{{constructor}}(const 0i32,);
|
||||
// StorageLive(_3);
|
||||
// StorageLive(_4);
|
||||
// StorageLive(_5);
|
||||
// _5 = &'23mce _1;
|
||||
// _4 = [closure@NodeId(22)] { d: _5 };
|
||||
// StorageDead(_5);
|
||||
// _3 = const foo(_4) -> [return: bb2, unwind: bb3];
|
||||
// _4 = &'23mce _1;
|
||||
// _3 = [closure@NodeId(22)] { d: _4 };
|
||||
// StorageDead(_4);
|
||||
// _2 = const foo(_3) -> [return: bb1, unwind: bb3];
|
||||
// }
|
||||
// bb1: {
|
||||
// resume;
|
||||
// }
|
||||
// bb2: {
|
||||
// StorageDead(_4);
|
||||
// StorageDead(_3);
|
||||
// EndRegion('23mce);
|
||||
// _0 = ();
|
||||
// drop(_1) -> bb4;
|
||||
// }
|
||||
// bb2: {
|
||||
// resume;
|
||||
// }
|
||||
// bb3: {
|
||||
// EndRegion('23mce);
|
||||
// drop(_1) -> bb1;
|
||||
// drop(_1) -> bb2;
|
||||
// }
|
||||
// bb4: {
|
||||
// StorageDead(_1);
|
||||
|
@ -31,18 +31,18 @@ fn foo<F>(f: F) where F: FnOnce() -> i32 {
|
||||
// let mut _0: ();
|
||||
// let _1: D;
|
||||
// let mut _2: ();
|
||||
// let mut _3: ();
|
||||
// let mut _4: [closure@NodeId(22) d:D];
|
||||
// let mut _5: D;
|
||||
// let mut _3: [closure@NodeId(22) d:D];
|
||||
// let mut _4: D;
|
||||
// let mut _5: ();
|
||||
//
|
||||
// bb0: {
|
||||
// StorageLive(_1);
|
||||
// _1 = D::{{constructor}}(const 0i32,);
|
||||
// StorageLive(_3);
|
||||
// StorageLive(_4);
|
||||
// StorageLive(_5);
|
||||
// _5 = _1;
|
||||
// _4 = [closure@NodeId(22)] { d: _5 };
|
||||
// drop(_5) -> [return: bb4, unwind: bb3];
|
||||
// _4 = _1;
|
||||
// _3 = [closure@NodeId(22)] { d: _4 };
|
||||
// drop(_4) -> [return: bb4, unwind: bb3];
|
||||
// }
|
||||
// bb1: {
|
||||
// resume;
|
||||
@ -51,17 +51,17 @@ fn foo<F>(f: F) where F: FnOnce() -> i32 {
|
||||
// drop(_1) -> bb1;
|
||||
// }
|
||||
// bb3: {
|
||||
// drop(_4) -> bb2;
|
||||
// drop(_3) -> bb2;
|
||||
// }
|
||||
// bb4: {
|
||||
// StorageDead(_5);
|
||||
// _3 = const foo(_4) -> [return: bb5, unwind: bb3];
|
||||
// StorageDead(_4);
|
||||
// _2 = const foo(_3) -> [return: bb5, unwind: bb3];
|
||||
// }
|
||||
// bb5: {
|
||||
// drop(_4) -> [return: bb6, unwind: bb2];
|
||||
// drop(_3) -> [return: bb6, unwind: bb2];
|
||||
// }
|
||||
// bb6: {
|
||||
// StorageDead(_4);
|
||||
// StorageDead(_3);
|
||||
// _0 = ();
|
||||
// drop(_1) -> bb7;
|
||||
// }
|
||||
@ -76,16 +76,16 @@ fn foo<F>(f: F) where F: FnOnce() -> i32 {
|
||||
// fn main::{{closure}}(_1: [closure@NodeId(22) d:D]) -> i32 {
|
||||
// let mut _0: i32;
|
||||
// let _2: &'14_0rce D;
|
||||
// let mut _3: ();
|
||||
// let mut _4: i32;
|
||||
// let mut _3: i32;
|
||||
// let mut _4: ();
|
||||
//
|
||||
// bb0: {
|
||||
// StorageLive(_2);
|
||||
// _2 = &'14_0rce (_1.0: D);
|
||||
// StorageLive(_4);
|
||||
// _4 = ((*_2).0: i32);
|
||||
// _0 = _4;
|
||||
// StorageDead(_4);
|
||||
// StorageLive(_3);
|
||||
// _3 = ((*_2).0: i32);
|
||||
// _0 = _3;
|
||||
// StorageDead(_3);
|
||||
// StorageDead(_2);
|
||||
// EndRegion('14_0rce);
|
||||
// drop(_1) -> bb1;
|
||||
|
@ -29,44 +29,43 @@ fn foo<F>(f: F) where F: FnOnce() -> i32 {
|
||||
// END RUST SOURCE
|
||||
// START rustc.node4.SimplifyCfg-qualify-consts.after.mir
|
||||
// fn main() -> () {
|
||||
// let mut _0: ();
|
||||
// let _1: D;
|
||||
// let _3: &'6_1rce D;
|
||||
// let mut _2: ();
|
||||
// let mut _4: ();
|
||||
// let mut _5: [closure@NodeId(22) r:&'6_1rce D];
|
||||
// let mut _6: &'6_1rce D;
|
||||
//
|
||||
// bb0: {
|
||||
// StorageLive(_1);
|
||||
// _1 = D::{{constructor}}(const 0i32,);
|
||||
// StorageLive(_3);
|
||||
// _3 = &'6_1rce _1;
|
||||
// StorageLive(_5);
|
||||
// StorageLive(_6);
|
||||
// _6 = _3;
|
||||
// _5 = [closure@NodeId(22)] { r: _6 };
|
||||
// StorageDead(_6);
|
||||
// _4 = const foo(_5) -> [return: bb2, unwind: bb3];
|
||||
// }
|
||||
// bb1: {
|
||||
// resume;
|
||||
// }
|
||||
// bb2: {
|
||||
// StorageDead(_5);
|
||||
// _0 = ();
|
||||
// StorageDead(_3);
|
||||
// EndRegion('6_1rce);
|
||||
// drop(_1) -> bb4;
|
||||
// }
|
||||
// bb3: {
|
||||
// EndRegion('6_1rce);
|
||||
// drop(_1) -> bb1;
|
||||
// }
|
||||
// bb4: {
|
||||
// StorageDead(_1);
|
||||
// return;
|
||||
// }
|
||||
// let mut _0: ();
|
||||
// let _1: D;
|
||||
// let _2: &'6_1rce D;
|
||||
// let mut _3: ();
|
||||
// let mut _4: [closure@NodeId(22) r:&'6_1rce D];
|
||||
// let mut _5: &'6_1rce D;
|
||||
// let mut _6: ();
|
||||
// bb0: {
|
||||
// StorageLive(_1);
|
||||
// _1 = D::{{constructor}}(const 0i32,);
|
||||
// StorageLive(_2);
|
||||
// _2 = &'6_1rce _1;
|
||||
// StorageLive(_4);
|
||||
// StorageLive(_5);
|
||||
// _5 = _2;
|
||||
// _4 = [closure@NodeId(22)] { r: _5 };
|
||||
// StorageDead(_5);
|
||||
// _3 = const foo(_4) -> [return: bb1, unwind: bb3];
|
||||
// }
|
||||
// bb1: {
|
||||
// StorageDead(_4);
|
||||
// _0 = ();
|
||||
// StorageDead(_2);
|
||||
// EndRegion('6_1rce);
|
||||
// drop(_1) -> bb4;
|
||||
// }
|
||||
// bb2: {
|
||||
// resume;
|
||||
// }
|
||||
// bb3: {
|
||||
// EndRegion('6_1rce);
|
||||
// drop(_1) -> bb2;
|
||||
// }
|
||||
// bb4: {
|
||||
// StorageDead(_1);
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
// END rustc.node4.SimplifyCfg-qualify-consts.after.mir
|
||||
|
||||
|
@ -34,18 +34,23 @@ impl S {
|
||||
|
||||
// END RUST SOURCE
|
||||
// START rustc.node4.ElaborateDrops.after.mir
|
||||
// let mut _0: ();
|
||||
// let _1: ();
|
||||
// let mut _2: S;
|
||||
// let mut _3: ();
|
||||
// let mut _3: S;
|
||||
// let mut _4: S;
|
||||
// let mut _5: S;
|
||||
// let mut _5: ();
|
||||
// let mut _6: bool;
|
||||
//
|
||||
// bb0: {
|
||||
// END rustc.node4.ElaborateDrops.after.mir
|
||||
// START rustc.node13.ElaborateDrops.after.mir
|
||||
// let mut _2: ();
|
||||
// let mut _4: ();
|
||||
// let mut _5: S;
|
||||
// let mut _0: ();
|
||||
// let _1: S;
|
||||
// let mut _2: S;
|
||||
// let mut _3: ();
|
||||
// let mut _4: S;
|
||||
// let mut _5: ();
|
||||
// let mut _6: S;
|
||||
// let mut _7: bool;
|
||||
//
|
||||
|
Loading…
x
Reference in New Issue
Block a user