if (compIsForInlining())
{
+ bool hasReturnBlocks = false;
+ bool hasMoreThanOneReturnBlock = false;
+
+ for (BasicBlock* block = fgFirstBB; block != nullptr; block = block->bbNext)
+ {
+ if (block->bbJumpKind == BBJ_RETURN)
+ {
+ if (hasReturnBlocks)
+ {
+ hasMoreThanOneReturnBlock = true;
+ break;
+ }
+
+ hasReturnBlocks = true;
+ }
+ }
+
+ if (!hasReturnBlocks && !compInlineResult->UsesLegacyPolicy())
+ {
+ //
+ // Mark the call node as "no return". The inliner might ignore CALLEE_DOES_NOT_RETURN and
+ // fail inline for a different reasons. In that case we still want to make the "no return"
+ // information available to the caller as it can impact caller's code quality.
+ //
+
+ impInlineInfo->iciCall->gtCallMoreFlags |= GTF_CALL_M_DOES_NOT_RETURN;
+ }
+
+ compInlineResult->NoteBool(InlineObservation::CALLEE_DOES_NOT_RETURN, !hasReturnBlocks);
+
+ if (compInlineResult->IsFailure())
+ {
+ return;
+ }
+
noway_assert(info.compXcptnsCount == 0);
compHndBBtab = impInlineInfo->InlinerCompiler->compHndBBtab;
compHndBBtabAllocCount = impInlineInfo->InlinerCompiler->compHndBBtabAllocCount; // we probably only use the table, not add to it.
compHndBBtabCount = impInlineInfo->InlinerCompiler->compHndBBtabCount;
info.compXcptnsCount = impInlineInfo->InlinerCompiler->info.compXcptnsCount;
- if (info.compRetNativeType != TYP_VOID &&
- fgMoreThanOneReturnBlock())
+ if (info.compRetNativeType != TYP_VOID && hasMoreThanOneReturnBlock)
{
// The lifetime of this var might expand multiple BBs. So it is a long lifetime compiler temp.
lvaInlineeReturnSpillTemp = lvaGrabTemp(false DEBUGARG("Inline candidate multiple BBJ_RETURN spill temp"));
// know when these flags are set.
#define GTF_CALL_M_R2R_REL_INDIRECT 0x2000 // GT_CALL -- ready to run call is indirected through a relative address
+#define GTF_CALL_M_DOES_NOT_RETURN 0x4000 // GT_CALL -- call does not return
bool IsUnmanaged() const { return (gtFlags & GTF_CALL_UNMANAGED) != 0; }
bool NeedsNullCheck() const { return (gtFlags & GTF_CALL_NULLCHECK) != 0; }
bool IsVarargs() const { return (gtCallMoreFlags & GTF_CALL_M_VARARGS) != 0; }
+ bool IsNoReturn() const { return (gtCallMoreFlags & GTF_CALL_M_DOES_NOT_RETURN) != 0; }
+
unsigned short gtCallMoreFlags; // in addition to gtFlags
unsigned char gtCallType :3; // value from the gtCallTypes enumeration
INLINE_OBSERVATION(BEGIN_OPCODE_SCAN, bool, "prepare to look at opcodes", INFORMATION, CALLEE)
INLINE_OBSERVATION(BELOW_ALWAYS_INLINE_SIZE, bool, "below ALWAYS_INLINE size", INFORMATION, CALLEE)
INLINE_OBSERVATION(CLASS_PROMOTABLE, bool, "promotable value class", INFORMATION, CALLEE)
+INLINE_OBSERVATION(DOES_NOT_RETURN, bool, "does not return", INFORMATION, CALLEE)
INLINE_OBSERVATION(END_OPCODE_SCAN, bool, "done looking at opcodes", INFORMATION, CALLEE)
INLINE_OBSERVATION(HAS_SIMD, bool, "has SIMD arg, local, or ret", INFORMATION, CALLEE)
INLINE_OBSERVATION(HAS_SWITCH, bool, "has switch", INFORMATION, CALLEE)
bool hasSIMDTypeArgLocalOrReturn;
#endif // FEATURE_SIMD
- GenTree * iciCall; // The GT_CALL node to be inlined.
+ GenTreeCall * iciCall; // The GT_CALL node to be inlined.
GenTree * iciStmt; // The statement iciCall is in.
BasicBlock * iciBlock; // The basic block iciStmt is in.
};
#endif // defined(DEBUG) || defined(INLINE_DATA)
- InlinePolicy* policy = nullptr;
+ // Optionally install the ModelPolicy.
bool useModelPolicy = JitConfig.JitInlinePolicyModel() != 0;
if (useModelPolicy)
{
- // Optionally install the ModelPolicy.
- policy = new (compiler, CMK_Inlining) ModelPolicy(compiler, isPrejitRoot);
+ return new (compiler, CMK_Inlining) ModelPolicy(compiler, isPrejitRoot);
}
- else
+
+ // Optionally fallback to the original legacy policy
+ bool useLegacyPolicy = JitConfig.JitInlinePolicyLegacy() != 0;
+
+ if (useLegacyPolicy)
{
- // Use the legacy policy
- policy = new (compiler, CMK_Inlining) LegacyPolicy(compiler, isPrejitRoot);
+ return new (compiler, CMK_Inlining) LegacyPolicy(compiler, isPrejitRoot);
}
- return policy;
+ // Use the enhanced legacy policy by default
+ return new (compiler, CMK_Inlining) EnhancedLegacyPolicy(compiler, isPrejitRoot);
}
//------------------------------------------------------------------------
}
}
+//------------------------------------------------------------------------
+// NoteBool: handle a boolean observation with non-fatal impact
+//
+// Arguments:
+// obs - the current obsevation
+// value - the value of the observation
+
+void EnhancedLegacyPolicy::NoteBool(InlineObservation obs, bool value)
+{
+ switch (obs)
+ {
+ case InlineObservation::CALLEE_DOES_NOT_RETURN:
+ m_IsNoReturn = value;
+ m_IsNoReturnKnown = true;
+ break;
+
+ default:
+ // Pass all other information to the legacy policy
+ LegacyPolicy::NoteBool(obs, value);
+ break;
+ }
+}
+
+//------------------------------------------------------------------------
+// NoteInt: handle an observed integer value
+//
+// Arguments:
+// obs - the current obsevation
+// value - the value being observed
+
+void EnhancedLegacyPolicy::NoteInt(InlineObservation obs, int value)
+{
+ switch (obs)
+ {
+ case InlineObservation::CALLEE_NUMBER_OF_BASIC_BLOCKS:
+ {
+ assert(value != 0);
+ assert(m_IsNoReturnKnown);
+
+ //
+ // Let's be conservative for now and reject inlining of "no return" methods only
+ // if the callee contains a single basic block. This covers most of the use cases
+ // (typical throw helpers simply do "throw new X();" and so they have a single block)
+ // without affecting more exotic cases (loops that do actual work for example) where
+ // failure to inline could negatively impact code quality.
+ //
+
+ unsigned basicBlockCount = static_cast<unsigned>(value);
+
+ if (m_IsNoReturn && (basicBlockCount == 1))
+ {
+ SetNever(InlineObservation::CALLEE_DOES_NOT_RETURN);
+ }
+ else
+ {
+ LegacyPolicy::NoteInt(obs, value);
+ }
+
+ break;
+ }
+
+ default:
+ // Pass all other information to the legacy policy
+ LegacyPolicy::NoteInt(obs, value);
+ break;
+ }
+}
+
+//------------------------------------------------------------------------
+// PropagateNeverToRuntime: determine if a never result should cause the
+// method to be marked as un-inlinable.
+
+bool EnhancedLegacyPolicy::PropagateNeverToRuntime() const
+{
+ //
+ // Do not propagate the "no return" observation. If we do this then future inlining
+ // attempts will fail immediately without marking the call node as "no return".
+ // This can have an adverse impact on caller's code quality as it may have to preserve
+ // registers across the call.
+ // TODO-Throughput: We should persist the "no return" information in the runtime
+ // so we don't need to re-analyze the inlinee all the time.
+ //
+
+ bool propagate = (m_Observation != InlineObservation::CALLEE_DOES_NOT_RETURN);
+
+ propagate &= LegacyPolicy::PropagateNeverToRuntime();
+
+ return propagate;
+}
+
#ifdef DEBUG
//------------------------------------------------------------------------
bool m_MethodIsMostlyLoadStore :1;
};
+// EnhancedLegacyPolicy extends the legacy policy by rejecting
+// inlining of methods that never return because they throw.
+
+class EnhancedLegacyPolicy : public LegacyPolicy
+{
+public:
+ EnhancedLegacyPolicy(Compiler* compiler, bool isPrejitRoot)
+ : LegacyPolicy(compiler, isPrejitRoot)
+ , m_IsNoReturn(false)
+ , m_IsNoReturnKnown(false)
+ {
+ // empty
+ }
+
+ // Policy observations
+ void NoteBool(InlineObservation obs, bool value) override;
+ void NoteInt(InlineObservation obs, int value) override;
+
+ // Policy policies
+ bool PropagateNeverToRuntime() const override;
+ bool IsLegacyPolicy() const override { return false; }
+
+protected:
+
+ // Data members
+ bool m_IsNoReturn :1;
+ bool m_IsNoReturnKnown :1;
+};
+
#ifdef DEBUG
// RandomPolicy implements a policy that inlines at random.
CONFIG_STRING(JitInlineReplayFile, W("JitInlineReplayFile"))
#endif // defined(DEBUG) || defined(INLINE_DATA)
+CONFIG_INTEGER(JitInlinePolicyLegacy, W("JitInlinePolicyLegacy"), 0)
CONFIG_INTEGER(JitInlinePolicyModel, W("JitInlinePolicyModel"), 0)
#undef CONFIG_INTEGER
return result;
}
+ if (call->IsNoReturn())
+ {
+ //
+ // If we know that the call does not return then we can set fgRemoveRestOfBlock
+ // to remove all subsequent statements and change the call's basic block to BBJ_THROW.
+ // As a result the compiler won't need to preserve live registers across the call.
+ //
+ // This isn't need for tail calls as there shouldn't be any code after the call anyway.
+ // Besides, the tail call code is part of the epilog and converting the block to
+ // BBJ_THROW would result in the tail call being dropped as the epilog is generated
+ // only for BBJ_RETURN blocks.
+ //
+ // Currently this doesn't work for non-void callees. Some of the code that handles
+ // fgRemoveRestOfBlock expects the tree to have GTF_EXCEPT flag set but call nodes
+ // do not have this flag by default. We could add the flag here but the proper solution
+ // would be to replace the return expression with a local var node during inlining
+ // so the rest of the call tree stays in a separate statement. That statement can then
+ // be removed by fgRemoveRestOfBlock without needing to add GTF_EXCEPT anywhere.
+ //
+
+ if (!call->IsTailCall() && call->TypeGet() == TYP_VOID)
+ {
+ fgRemoveRestOfBlock = true;
+ }
+ }
return call;
}