explicit InlineFunctionInfo(CallGraph *cg = nullptr,
AssumptionCacheTracker *ACT = nullptr,
BlockCloningFunctor Ftor = nullptr)
- : CG(cg), ACT(ACT), Ftor(Ftor) {}
+ : CG(cg), ACT(ACT), Ftor(Ftor), CallSuccessorBlockDeleted(false) {}
/// CG - If non-null, InlineFunction will update the callgraph to reflect the
/// changes it makes.
// Functor that is invoked when a block is cloned into the new function.
BlockCloningFunctor Ftor;
+ /// CallSuccessorBlockDeleted - whether the block immediately following the
+ /// call has been deleted during inlining
+ bool CallSuccessorBlockDeleted;
+
/// StaticAllocas - InlineFunction fills this in with all static allocas that
/// get copied into the caller.
SmallVector<AllocaInst *, 4> StaticAllocas;
continue;
}
updateEntryCount(CallSiteBlock, Callee);
- // The instruction following the call is part of a new basic block
- // created during the inlining process. This does not have an entry in
- // the BFI. We create an entry by copying the frequency of the original
- // block containing the call.
- copyBlockFrequency(CallSiteBlock, CallSuccessor->getParent());
+ if (!InlineInfo.CallSuccessorBlockDeleted) {
+ // The instruction following the call is part of a new basic block
+ // created during the inlining process. This does not have an entry in
+ // the BFI. We create an entry by copying the frequency of the
+ // original block containing the call.
+ copyBlockFrequency(CallSiteBlock, CallSuccessor->getParent());
+ }
++NumInlined;
// If we inlined any musttail calls and the original return is now
// unreachable, delete it. It can only contain a bitcast and ret.
- if (InlinedMustTailCalls && pred_begin(AfterCallBB) == pred_end(AfterCallBB))
+ if (InlinedMustTailCalls &&
+ pred_begin(AfterCallBB) == pred_end(AfterCallBB)) {
+ IFI.CallSuccessorBlockDeleted = true;
AfterCallBB->eraseFromParent();
+ }
// We should always be able to fold the entry block of the function into the
// single predecessor of the block...