#endif // DEBUG
}
-//*********************************************************************************************
-// #Phases
+//------------------------------------------------------------------------
+// compCompile: run phases needed for compilation
+//
+// Arguments:
+// methodCodePtr [OUT] - address of generated code
+// methodCodeSize [OUT] - size of the generated code (hot + cold setions)
+// compileFlags [IN] - flags controlling jit behavior
//
-// This is the most interesting 'toplevel' function in the JIT. It goes through the operations of
-// importing, morphing, optimizations and code generation. This is called from the EE through the
-// code:CILJit::compileMethod function.
+// Notes:
+// This is the most interesting 'toplevel' function in the JIT. It goes through the operations of
+// importing, morphing, optimizations and code generation. This is called from the EE through the
+// code:CILJit::compileMethod function.
//
-// For an overview of the structure of the JIT, see:
+// For an overview of the structure of the JIT, see:
// https://github.com/dotnet/runtime/blob/master/docs/design/coreclr/botr/ryujit-overview.md
//
+// Also called for inlinees, though they will only be run through the first few phases.
+//
void Compiler::compCompile(void** methodCodePtr, ULONG* methodCodeSize, JitFlags* compileFlags)
{
if (compIsForInlining())
VarSetOps::AssignAllowUninitRhs(this, compCurLife, VarSetOps::UninitVal());
- /* The temp holding the secret stub argument is used by fgImport() when importing the intrinsic. */
-
+ // The temp holding the secret stub argument is used by fgImport() when importing the intrinsic.
if (info.compPublishStubParam)
{
assert(lvaStubArgumentVar == BAD_VAR_NUM);
compFunctionTraceStart();
- /* Convert the instrs in each basic block to a tree based intermediate representation */
-
+ // Convert the instrs in each basic block to a tree based intermediate representation
fgImport();
assert(!fgComputePredsDone);
if (compIsForInlining())
{
- /* Quit inlining if fgImport() failed for any reason. */
-
+ // Abandon inlining if fgImport() failed for any reason
if (!compDonotInline())
{
- /* Filter out unimported BBs */
-
+ // Filter out unimported BBs
fgRemoveEmptyBlocks();
// Update type of return spill temp if we have gathered
return;
}
- assert(!compDonotInline());
+ // At this point in the phase list, all the inlinee phases have
+ // been run, and inlinee compiles have exited, so we should only
+ // get this far if we are jitting the root method.
+ noway_assert(!compIsForInlining());
// Maybe the caller was not interested in generating code
if (compIsForImportOnly())
EndPhase(PHASE_POST_IMPORT);
- /* Initialize the BlockSet epoch */
-
+ // Initialize the BlockSet epoch
NewBasicBlockEpoch();
- /* Massage the trees so that we can generate code out of them */
+ // Start phases that are broadly called morphing, and includes
+ // global morph, as well as other phases that massage the trees so
+ // that we can generate code out of them.
+ fgOutgoingArgTemps = nullptr;
+
+#ifdef DEBUG
+ if (verbose)
+ {
+ printf("*************** In fgMorph()\n");
+ }
+ if (verboseTrees)
+ {
+ fgDispBasicBlocks(true);
+ }
+#endif // DEBUG
+
+ // Insert call to class constructor as the first basic block if
+ // we were asked to do so.
+ if (info.compCompHnd->initClass(nullptr /* field */, info.compMethodHnd /* method */,
+ impTokenLookupContextHandle /* context */) &
+ CORINFO_INITCLASS_USE_HELPER)
+ {
+ fgEnsureFirstBBisScratch();
+ fgNewStmtAtBeg(fgFirstBB, fgInitThisClass());
+ }
+
+#ifdef DEBUG
+ if (opts.compGcChecks)
+ {
+ for (unsigned i = 0; i < info.compArgsCount; i++)
+ {
+ if (lvaTable[i].TypeGet() == TYP_REF)
+ {
+ // confirm that the argument is a GC pointer (for debugging (GC stress))
+ GenTree* op = gtNewLclvNode(i, TYP_REF);
+ GenTreeCall::Use* args = gtNewCallArgs(op);
+ op = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_VOID, args);
+
+ fgEnsureFirstBBisScratch();
+ fgNewStmtAtEnd(fgFirstBB, op);
+
+ if (verbose)
+ {
+ printf("\ncompGcChecks tree:\n");
+ gtDispTree(op);
+ }
+ }
+ }
+ }
+#endif // DEBUG
+
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+ if (opts.compStackCheckOnRet)
+ {
+ lvaReturnSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("ReturnSpCheck"));
+ lvaTable[lvaReturnSpCheck].lvType = TYP_I_IMPL;
+ }
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
+
+#if defined(DEBUG) && defined(_TARGET_X86_)
+ if (opts.compStackCheckOnCall)
+ {
+ lvaCallSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("CallSpCheck"));
+ lvaTable[lvaCallSpCheck].lvType = TYP_I_IMPL;
+ }
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
+
+ // Filter out unimported BBs
+ fgRemoveEmptyBlocks();
+
+#ifdef DEBUG
+ // Inliner could add basic blocks. Check that the flowgraph data is up-to-date
+ fgDebugCheckBBlist(false, false);
+#endif // DEBUG
+
+ EndPhase(PHASE_MORPH_INIT);
+
+ // Inline callee methods into this root method
+ fgInline();
+
+ RecordStateAtEndOfInlining(); // Record "start" values for post-inlining cycles and elapsed time.
+
+ EndPhase(PHASE_MORPH_INLINE);
+
+ // Transform each GT_ALLOCOBJ node into either an allocation helper call or
+ // local variable allocation on the stack.
+ ObjectAllocator objectAllocator(this); // PHASE_ALLOCATE_OBJECTS
+
+ if (JitConfig.JitObjectStackAllocation() && opts.OptimizationEnabled())
+ {
+ objectAllocator.EnableObjectStackAllocation();
+ }
+
+ objectAllocator.Run();
- fgMorph();
+ // Add any internal blocks/trees we may need
+ fgAddInternal();
+
+#ifdef DEBUG
+ // Inliner could add basic blocks. Check that the flowgraph data is up-to-date
+ fgDebugCheckBBlist(false, false);
+ // Inliner could clone some trees.
+ fgDebugCheckNodesUniqueness();
+#endif // DEBUG
+
+ fgRemoveEmptyTry();
+
+ EndPhase(PHASE_EMPTY_TRY);
+
+ fgRemoveEmptyFinally();
+
+ EndPhase(PHASE_EMPTY_FINALLY);
+
+ fgMergeFinallyChains();
+
+ EndPhase(PHASE_MERGE_FINALLY_CHAINS);
+
+ fgCloneFinally();
+ fgUpdateFinallyTargetFlags();
+
+ EndPhase(PHASE_CLONE_FINALLY);
+
+ // Compute bbNum, bbRefs and bbPreds
+ //
+ JITDUMP("\nRenumbering the basic blocks for fgComputePreds\n");
+ fgRenumberBlocks();
+
+ // This is the first time full (not cheap) preds will be computed
+ //
+ noway_assert(!fgComputePredsDone);
+ fgComputePreds();
+
+ // Run an early flow graph simplification pass
+ if (opts.OptimizationEnabled())
+ {
+ fgUpdateFlowGraph();
+ }
+
+ EndPhase(PHASE_COMPUTE_PREDS);
+
+ // From this point on the flowgraph information such as bbNum,
+ // bbRefs or bbPreds has to be kept updated
+
+ // For x64 and ARM64 we need to mark irregular parameters
+ lvaRefCountState = RCS_EARLY;
+ fgResetImplicitByRefRefCount();
+
+ // Promote struct locals if necessary
+ fgPromoteStructs();
+
+ // Figure out what locals are address exposed
+ fgMarkAddressExposedLocals();
+
+ EndPhase(PHASE_STR_ADRLCL);
+
+ // Apply type updates to implicit byref parameters; also choose (based on address-exposed
+ // analysis) which implicit byref promotions to keep (requires copy to initialize) or discard.
+ fgRetypeImplicitByRefArgs();
+
+#ifdef DEBUG
+ // Now that locals have address-taken and implicit byref marked, we can safely apply stress.
+ lvaStressLclFld();
+ fgStress64RsltMul();
+#endif // DEBUG
+
+ EndPhase(PHASE_MORPH_IMPBYREF);
+
+ // Morph the trees in all the blocks of the method
+ fgMorphBlocks();
+
+ // Fix any LclVar annotations on discarded struct promotion temps for implicit by-ref args
+ fgMarkDemotedImplicitByRefArgs();
+ lvaRefCountState = RCS_INVALID;
+
+ EndPhase(PHASE_MORPH_GLOBAL);
+
+#if 0
+ JITDUMP("trees after fgMorphBlocks\n");
+ DBEXEC(VERBOSE, fgDispBasicBlocks(true));
+#endif
+
+#if defined(FEATURE_EH_FUNCLETS) && defined(_TARGET_ARM_)
+ if (fgNeedToAddFinallyTargetBits)
+ {
+ // We previously wiped out the BBF_FINALLY_TARGET bits due to some morphing; add them back.
+ fgAddFinallyTargetFlags();
+ fgNeedToAddFinallyTargetBits = false;
+ }
+#endif // defined(FEATURE_EH_FUNCLETS) && defined(_TARGET_ARM_)
+
+ // Decide the kind of code we want to generate
+ fgSetOptions();
+
+ fgExpandQmarkNodes();
+
+#ifdef DEBUG
+ compCurBB = nullptr;
+#endif // DEBUG
+
+ // End of the morphing phases
EndPhase(PHASE_MORPH_END);
- /* GS security checks for unsafe buffers */
+ // GS security checks for unsafe buffers
if (getNeedsGSSecurityCookie())
{
#ifdef DEBUG
// so make sure this is the case.
fgRenumberBlocks();
- /* If we need to emit GC Poll calls, mark the blocks that need them now. This is conservative and can
- * be optimized later. */
-
+ // If we need to emit GC Poll calls, mark the blocks that need them now.
+ // This is conservative and can be optimized later.
fgMarkGCPollBlocks();
EndPhase(PHASE_MARK_GC_POLL_BLOCKS);
#if defined(FEATURE_EH_FUNCLETS)
- /* Create funclets from the EH handlers. */
-
+ // Create funclets from the EH handlers.
fgCreateFunclets();
EndPhase(PHASE_CREATE_FUNCLETS);
if (opts.OptimizationEnabled())
{
- /* Perform loop inversion (i.e. transform "while" loops into
- "repeat" loops) and discover and classify natural loops
- (e.g. mark iterative loops as such). Also marks loop blocks
- and sets bbWeight to the loop nesting levels
- */
-
+ // Perform loop inversion (i.e. transform "while" loops into
+ // "repeat" loops) and discover and classify natural loops
+ // (e.g. mark iterative loops as such). Also marks loop blocks
+ // and sets bbWeight to the loop nesting levels
optOptimizeLoops();
EndPhase(PHASE_OPTIMIZE_LOOPS);
optCloneLoops();
EndPhase(PHASE_CLONE_LOOPS);
- /* Unroll loops */
+ // Unroll loops
optUnrollLoops();
EndPhase(PHASE_UNROLL_LOOPS);
}
fgDebugCheckLinks();
#endif
- /* Create the variable table (and compute variable ref counts) */
-
+ // Create the variable table (and compute variable ref counts)
lvaMarkLocalVars();
EndPhase(PHASE_MARK_LOCAL_VARS);
- // IMPORTANT, after this point, every place where trees are modified or cloned
- // the local variable reference counts must be updated
- // You can test the value of the following variable to see if
- // the local variable ref counts must be updated
- //
+ // IMPORTANT, after this point, locals are ref counted.
+ // However, ref counts are not kept incrementally up to date.
assert(lvaLocalVarRefCounted());
if (opts.OptimizationEnabled())
{
- /* Optimize boolean conditions */
-
+ // Optimize boolean conditions
optOptimizeBools();
EndPhase(PHASE_OPTIMIZE_BOOLS);
// optOptimizeBools() might have changed the number of blocks; the dominators/reachability might be bad.
}
- /* Figure out the order in which operators are to be evaluated */
+ // Figure out the order in which operators are to be evaluated
fgFindOperOrder();
EndPhase(PHASE_FIND_OPER_ORDER);
if (doEarlyProp)
{
- /* Propagate array length and rewrite getType() method call */
+ // Propagate array length and rewrite getType() method call
optEarlyProp();
EndPhase(PHASE_EARLY_PROP);
}
if (doLoopHoisting)
{
- /* Hoist invariant code out of loops */
+ // Hoist invariant code out of loops
optHoistLoopCode();
EndPhase(PHASE_HOIST_LOOP_CODE);
}
if (doCopyProp)
{
- /* Perform VN based copy propagation */
+ // Perform VN based copy propagation
optVnCopyProp();
EndPhase(PHASE_VN_COPY_PROP);
}
#if FEATURE_ANYCSE
- /* Remove common sub-expressions */
+ // Remove common sub-expressions
optOptimizeCSEs();
#endif // FEATURE_ANYCSE
#if ASSERTION_PROP
if (doAssertionProp)
{
- /* Assertion propagation */
+ // Assertion propagation
optAssertionPropMain();
EndPhase(PHASE_ASSERTION_PROP_MAIN);
}
if (doRangeAnalysis)
{
- /* Optimize array index range checks */
+ // Optimize array index range checks
RangeCheck rc(this);
rc.OptimizeRangeChecks();
EndPhase(PHASE_OPTIMIZE_INDEX_CHECKS);
}
#endif // ASSERTION_PROP
- /* update the flowgraph if we modified it during the optimization phase*/
+ // update the flowgraph if we modified it during the optimization phase
if (fgModified)
{
fgUpdateFlowGraph();
fgDebugCheckLinks();
#endif
- /* Enable this to gather statistical data such as
- * call and register argument info, flowgraph and loop info, etc. */
-
+ // Enable this to gather statistical data such as
+ // call and register argument info, flowgraph and loop info, etc.
compJitStats();
#ifdef _TARGET_ARM_
}
#endif // _TARGET_ARM_
- /* Assign registers to variables, etc. */
+ // Assign registers to variables, etc.
///////////////////////////////////////////////////////////////////////////////
// Dominator and reachability sets are no longer valid. They haven't been
///////////////////////////////////////////////////////////////////////////////
fgDomsComputed = false;
- /* Create LSRA before Lowering, this way Lowering can initialize the TreeNode Map */
+ // Create LinearScan before Lowering, so that Lowering can call LinearScan methods
+ // for determining whether locals are register candidates and (for xarch) whether
+ // a node is a containable memory op.
m_pLinearScan = getLinearScanAllocator(this);
- /* Lower */
+ // Lower
m_pLowering = new (this, CMK_LSRA) Lowering(this, m_pLinearScan); // PHASE_LOWERING
m_pLowering->Run();
StackLevelSetter stackLevelSetter(this); // PHASE_STACK_LEVEL_SETTER
stackLevelSetter.Run();
- lvaTrackedFixed = true; // We can not add any new tracked variables after this point.
+ // We can not add any new tracked variables after this point.
+ lvaTrackedFixed = true;
- /* Now that lowering is completed we can proceed to perform register allocation */
+ // Now that lowering is completed we can proceed to perform register allocation
m_pLinearScan->doLinearScan();
EndPhase(PHASE_LINEAR_SCAN);
fgDebugCheckLinks();
#endif
- /* Generate code */
-
+ // Generate code
codeGen->genGenerateCode(methodCodePtr, methodCodeSize);
#ifdef FEATURE_JIT_METHOD_PERF
/*****************************************************************************
*
- * Transform all basic blocks for codegen.
- */
-
-void Compiler::fgMorph()
-{
- noway_assert(!compIsForInlining()); // Inlinee's compiler should never reach here.
-
- fgOutgoingArgTemps = nullptr;
-
-#ifdef DEBUG
- if (verbose)
- {
- printf("*************** In fgMorph()\n");
- }
- if (verboseTrees)
- {
- fgDispBasicBlocks(true);
- }
-#endif // DEBUG
-
- // Insert call to class constructor as the first basic block if
- // we were asked to do so.
- if (info.compCompHnd->initClass(nullptr /* field */, info.compMethodHnd /* method */,
- impTokenLookupContextHandle /* context */) &
- CORINFO_INITCLASS_USE_HELPER)
- {
- fgEnsureFirstBBisScratch();
- fgNewStmtAtBeg(fgFirstBB, fgInitThisClass());
- }
-
-#ifdef DEBUG
- if (opts.compGcChecks)
- {
- for (unsigned i = 0; i < info.compArgsCount; i++)
- {
- if (lvaTable[i].TypeGet() == TYP_REF)
- {
- // confirm that the argument is a GC pointer (for debugging (GC stress))
- GenTree* op = gtNewLclvNode(i, TYP_REF);
- GenTreeCall::Use* args = gtNewCallArgs(op);
- op = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_VOID, args);
-
- fgEnsureFirstBBisScratch();
- fgNewStmtAtEnd(fgFirstBB, op);
-
- if (verbose)
- {
- printf("\ncompGcChecks tree:\n");
- gtDispTree(op);
- }
- }
- }
- }
-#endif // DEBUG
-
-#if defined(DEBUG) && defined(_TARGET_XARCH_)
- if (opts.compStackCheckOnRet)
- {
- lvaReturnSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("ReturnSpCheck"));
- lvaTable[lvaReturnSpCheck].lvType = TYP_I_IMPL;
- }
-#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
-
-#if defined(DEBUG) && defined(_TARGET_X86_)
- if (opts.compStackCheckOnCall)
- {
- lvaCallSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("CallSpCheck"));
- lvaTable[lvaCallSpCheck].lvType = TYP_I_IMPL;
- }
-#endif // defined(DEBUG) && defined(_TARGET_X86_)
-
- /* Filter out unimported BBs */
-
- fgRemoveEmptyBlocks();
-
-#ifdef DEBUG
- /* Inliner could add basic blocks. Check that the flowgraph data is up-to-date */
- fgDebugCheckBBlist(false, false);
-#endif // DEBUG
-
- EndPhase(PHASE_MORPH_INIT);
-
- /* Inline */
- fgInline();
-#if 0
- JITDUMP("trees after inlining\n");
- DBEXEC(VERBOSE, fgDispBasicBlocks(true));
-#endif
-
- RecordStateAtEndOfInlining(); // Record "start" values for post-inlining cycles and elapsed time.
-
- EndPhase(PHASE_MORPH_INLINE);
-
- // Transform each GT_ALLOCOBJ node into either an allocation helper call or
- // local variable allocation on the stack.
- ObjectAllocator objectAllocator(this); // PHASE_ALLOCATE_OBJECTS
-
- if (JitConfig.JitObjectStackAllocation() && opts.OptimizationEnabled())
- {
- objectAllocator.EnableObjectStackAllocation();
- }
-
- objectAllocator.Run();
-
- /* Add any internal blocks/trees we may need */
-
- fgAddInternal();
-
-#ifdef DEBUG
- /* Inliner could add basic blocks. Check that the flowgraph data is up-to-date */
- fgDebugCheckBBlist(false, false);
- /* Inliner could clone some trees. */
- fgDebugCheckNodesUniqueness();
-#endif // DEBUG
-
- fgRemoveEmptyTry();
-
- EndPhase(PHASE_EMPTY_TRY);
-
- fgRemoveEmptyFinally();
-
- EndPhase(PHASE_EMPTY_FINALLY);
-
- fgMergeFinallyChains();
-
- EndPhase(PHASE_MERGE_FINALLY_CHAINS);
-
- fgCloneFinally();
- fgUpdateFinallyTargetFlags();
-
- EndPhase(PHASE_CLONE_FINALLY);
-
- // Compute bbNum, bbRefs and bbPreds
- //
- JITDUMP("\nRenumbering the basic blocks for fgComputePreds\n");
- fgRenumberBlocks();
-
- // This is the first time full (not cheap) preds will be computed
- //
- noway_assert(!fgComputePredsDone);
- fgComputePreds();
-
- // Run an early flow graph simplification pass
- if (opts.OptimizationEnabled())
- {
- fgUpdateFlowGraph();
- }
-
- EndPhase(PHASE_COMPUTE_PREDS);
-
- // From this point on the flowgraph information such as bbNum,
- // bbRefs or bbPreds has to be kept updated
-
- /* For x64 and ARM64 we need to mark irregular parameters */
- lvaRefCountState = RCS_EARLY;
- fgResetImplicitByRefRefCount();
-
- /* Promote struct locals if necessary */
- fgPromoteStructs();
-
- /* Now it is the time to figure out what locals have address-taken. */
- fgMarkAddressExposedLocals();
-
- EndPhase(PHASE_STR_ADRLCL);
-
- /* Apply the type update to implicit byref parameters; also choose (based on address-exposed
- analysis) which implicit byref promotions to keep (requires copy to initialize) or discard. */
- fgRetypeImplicitByRefArgs();
-
-#ifdef DEBUG
- /* Now that locals have address-taken and implicit byref marked, we can safely apply stress. */
- lvaStressLclFld();
- fgStress64RsltMul();
-#endif // DEBUG
-
- EndPhase(PHASE_MORPH_IMPBYREF);
-
- /* Morph the trees in all the blocks of the method */
-
- fgMorphBlocks();
-
- /* Fix any LclVar annotations on discarded struct promotion temps for implicit by-ref args */
- fgMarkDemotedImplicitByRefArgs();
- lvaRefCountState = RCS_INVALID;
-
- EndPhase(PHASE_MORPH_GLOBAL);
-
-#if 0
- JITDUMP("trees after fgMorphBlocks\n");
- DBEXEC(VERBOSE, fgDispBasicBlocks(true));
-#endif
-
-#if defined(FEATURE_EH_FUNCLETS) && defined(_TARGET_ARM_)
- if (fgNeedToAddFinallyTargetBits)
- {
- // We previously wiped out the BBF_FINALLY_TARGET bits due to some morphing; add them back.
- fgAddFinallyTargetFlags();
- fgNeedToAddFinallyTargetBits = false;
- }
-#endif // defined(FEATURE_EH_FUNCLETS) && defined(_TARGET_ARM_)
-
- /* Decide the kind of code we want to generate */
-
- fgSetOptions();
-
- fgExpandQmarkNodes();
-
-#ifdef DEBUG
- compCurBB = nullptr;
-#endif // DEBUG
-}
-
-/*****************************************************************************
- *
* Promoting struct locals
*/
void Compiler::fgPromoteStructs()