setUsesSIMDTypes(false);
#endif // FEATURE_SIMD
- if (compIsForInlining() || compIsForImportOnly())
+ if (compIsForImportOnly())
{
return;
}
+
+#if FEATURE_TAILCALL_OPT
+ // By default opportunistic tail call optimization is enabled.
+ // Recognition is done in the importer so this must be set for
+ // inlinees as well.
+ opts.compTailCallOpt = true;
+#endif // FEATURE_TAILCALL_OPT
+
+ if (compIsForInlining())
+ {
+ return;
+ }
+
// The rest of the opts fields that we initialize here
// should only be used when we generate code for the method
// They should not be used when importing or inlining
+ CLANG_FORMAT_COMMENT_ANCHOR;
+
+#if FEATURE_TAILCALL_OPT
+ opts.compTailCallLoopOpt = true;
+#endif // FEATURE_TAILCALL_OPT
opts.genFPorder = true;
opts.genFPopt = true;
opts.instrCount = 0;
opts.lvRefCount = 0;
-#if FEATURE_TAILCALL_OPT
- // By default opportunistic tail call optimization is enabled
- opts.compTailCallOpt = true;
- opts.compTailCallLoopOpt = true;
-#endif
-
#ifdef PROFILING_SUPPORTED
opts.compJitELTHookEnabled = false;
#endif // PROFILING_SUPPORTED
prefixFlags |= PREFIX_TAILCALL_EXPLICIT;
}
}
+ }
+
+ // This is split up to avoid goto flow warnings.
+ bool isRecursive;
+ isRecursive = !compIsForInlining() && (callInfo.hMethod == info.compMethodHnd);
- // Note that when running under tail call stress, a call will be marked as explicit tail prefixed
- // hence will not be considered for implicit tail calling.
- bool isRecursive = (callInfo.hMethod == info.compMethodHnd);
- if (impIsImplicitTailCallCandidate(opcode, codeAddr + sz, codeEndp, prefixFlags, isRecursive))
+ // Note that when running under tail call stress, a call will be marked as explicit tail prefixed
+ // hence will not be considered for implicit tail calling.
+ if (impIsImplicitTailCallCandidate(opcode, codeAddr + sz, codeEndp, prefixFlags, isRecursive))
+ {
+ if (compIsForInlining())
+ {
+#if FEATURE_TAILCALL_OPT_SHARED_RETURN
+ // Are we inlining at an implicit tail call site? If so the we can flag
+ // implicit tail call sites in the inline body. These call sites
+ // often end up in non BBJ_RETURN blocks, so only flag them when
+ // we're able to handle shared returns.
+ if (impInlineInfo->iciCall->IsImplicitTailCall())
+ {
+ JITDUMP(" (Inline Implicit Tail call: prefixFlags |= PREFIX_TAILCALL_IMPLICIT)");
+ prefixFlags |= PREFIX_TAILCALL_IMPLICIT;
+ }
+#endif // FEATURE_TAILCALL_OPT_SHARED_RETURN
+ }
+ else
{
JITDUMP(" (Implicit Tail call: prefixFlags |= PREFIX_TAILCALL_IMPLICIT)");
prefixFlags |= PREFIX_TAILCALL_IMPLICIT;
printTreeID(fgMorphStmt);
printf(" in BB%02u:\n", compCurBB->bbNum);
gtDispTree(fgMorphStmt);
-
- // printf("startVars=%d.\n", startVars);
+ if (call->IsImplicitTailCall())
+ {
+ printf("Note: candidate is implicit tail call\n");
+ }
}
#endif
{
treeWithCall = stmtExpr->gtGetOp2();
}
- if (treeWithCall->gtOper == GT_CAST)
- {
- noway_assert(treeWithCall->gtGetOp1() == call && !treeWithCall->gtOverflow());
- }
- else
+
+ // Peel off casts
+ while (treeWithCall->gtOper == GT_CAST)
{
- noway_assert(treeWithCall == call);
+ noway_assert(!treeWithCall->gtOverflow());
+ treeWithCall = treeWithCall->gtGetOp1();
}
+
+ noway_assert(treeWithCall == call);
}
#endif