getEmitter()->emitDisableGC();
break;
+ case GT_START_PREEMPTGC:
+ // Kill callee saves GC registers, and create a label
+ // so that information gets propagated to the emitter.
+ gcInfo.gcMarkRegSetNpt(RBM_INT_CALLEE_SAVED);
+ genDefineTempLabel(genCreateTempLabel());
+ break;
+
case GT_PROF_HOOK:
// We should be seeing this only if profiler hook is needed
noway_assert(compiler->compIsProfilerHookNeeded());
break;
#endif // !defined(JIT32_GCENCODER)
+ case GT_START_PREEMPTGC:
+ // Kill callee saves GC registers, and create a label
+ // so that information gets propagated to the emitter.
+ gcInfo.gcMarkRegSetNpt(RBM_INT_CALLEE_SAVED);
+ genDefineTempLabel(genCreateTempLabel());
+ break;
+
case GT_PROF_HOOK:
#ifdef PROFILING_SUPPORTED
// We should be seeing this only if profiler hook is needed
case GT_NO_OP:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
case GT_CATCH_ARG:
case GT_MEMORYBARRIER:
return true;
}
}
+ else if (tree->OperIs(GT_START_PREEMPTGC))
+ {
+ return true;
+ }
+
return false;
}
case GT_SETCC:
case GT_NO_OP:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
#if !FEATURE_EH_FUNCLETS
case GT_END_LFIN:
case GT_SETCC:
case GT_NO_OP:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
#if !FEATURE_EH_FUNCLETS
case GT_END_LFIN:
case GT_SETCC:
case GT_NO_OP:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
#if !FEATURE_EH_FUNCLETS
case GT_END_LFIN:
case GT_SETCC:
case GT_NO_OP:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
#if !FEATURE_EH_FUNCLETS
case GT_END_LFIN:
case GT_NO_OP:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
case GT_CATCH_ARG:
case GT_MEMORYBARRIER:
GTNODE(START_NONGC , GenTree ,0,GTK_LEAF|GTK_NOVALUE) // starts a new instruction group that will be non-gc interruptible
+GTNODE(START_PREEMPTGC , GenTree ,0,GTK_LEAF|GTK_NOVALUE) // starts a new instruction group where preemptive GC is enabled
+
GTNODE(PROF_HOOK , GenTree ,0,GTK_LEAF|GTK_NOVALUE) // profiler Enter/Leave/TailCall hook
GTNODE(RETFILT , GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // end filter with TYP_I_IMPL return value
case GT_SWITCH:
case GT_RETFILT:
case GT_START_NONGC:
+ case GT_START_PREEMPTGC:
case GT_PROF_HOOK:
#if !FEATURE_EH_FUNCLETS
case GT_END_LFIN:
// InlinedCallFrame.callTarget = methodHandle // stored in m_Datum
// InlinedCallFrame.m_pCallSiteSP = SP // x86 only
// InlinedCallFrame.m_pCallerReturnAddress = return address
+ // GT_START_PREEEMPTC
// Thread.gcState = 0
// (non-stub) - update top Frame on TCB // 64-bit targets only
}
#endif // _TARGET_64BIT_
- // IMPORTANT **** This instruction must come last!!! ****
+ // IMPORTANT **** This instruction must be the last real instruction ****
// It changes the thread's state to Preemptive mode
// ----------------------------------------------------------------------------------
// [tcb + offsetOfGcState] = 0
-
GenTree* storeGCState = SetGCState(0);
BlockRange().InsertBefore(insertBefore, LIR::SeqTree(comp, storeGCState));
ContainCheckStoreIndir(storeGCState->AsIndir());
+
+ // Indicate that codegen has switched this thread to preemptive GC.
+ // This tree node doesn't generate any code, but impacts LSRA and gc reporting.
+ // This tree node is simple so doesn't require sequencing.
+ GenTree* preemptiveGCNode = new (comp, GT_START_PREEMPTGC) GenTree(GT_START_PREEMPTGC, TYP_VOID);
+ BlockRange().InsertBefore(insertBefore, preemptiveGCNode);
}
//------------------------------------------------------------------------
assert(dstCount == 0);
break;
+ case GT_START_PREEMPTGC:
+ // This kills GC refs in callee save regs
+ srcCount = 0;
+ assert(dstCount == 0);
+ BuildDefsWithKills(tree, 0, RBM_NONE, RBM_NONE);
+ break;
+
case GT_LONG:
assert(tree->IsUnusedValue()); // Contained nodes are already processed, only unused GT_LONG can reach here.
// An unused GT_LONG doesn't produce any registers.
assert(dstCount == 0);
break;
+ case GT_START_PREEMPTGC:
+ // This kills GC refs in callee save regs
+ srcCount = 0;
+ assert(dstCount == 0);
+ BuildDefsWithKills(tree, 0, RBM_NONE, RBM_NONE);
+ break;
+
case GT_CNS_DBL:
{
GenTreeDblCon* dblConst = tree->AsDblCon();
// Arguments:
// tree - the tree for which kill positions should be generated
// currentLoc - the location at which the kills should be added
+// killMask - The mask of registers killed by this node
//
// Return Value:
// true - kills were inserted
// the multiple defs for a regPair are in different locations.
// If we generate any kills, we will mark all currentLiveVars as being preferenced
// to avoid the killed registers. This is somewhat conservative.
+//
+// This method can add kills even if killMask is RBM_NONE, if this tree is one of the
+// special cases that signals that we can't permit callee save registers to hold GC refs.
bool LinearScan::buildKillPositionsForNode(GenTree* tree, LsraLocation currentLoc, regMaskTP killMask)
{
- bool isCallKill = ((killMask == RBM_INT_CALLEE_TRASH) || (killMask == RBM_CALLEE_TRASH));
+ bool insertedKills = false;
+
if (killMask != RBM_NONE)
{
// The killMask identifies a set of registers that will be used during codegen.
addRefsForPhysRegMask(killMask, currentLoc, RefTypeKill, true);
// TODO-CQ: It appears to be valuable for both fp and int registers to avoid killing the callee
- // save regs on infrequently exectued paths. However, it results in a large number of asmDiffs,
+ // save regs on infrequently executed paths. However, it results in a large number of asmDiffs,
// many of which appear to be regressions (because there is more spill on the infrequently path),
// but are not really because the frequent path becomes smaller. Validating these diffs will need
// to be done before making this change.
{
continue;
}
- Interval* interval = getIntervalForLocalVar(varIndex);
+ Interval* interval = getIntervalForLocalVar(varIndex);
+ const bool isCallKill = ((killMask == RBM_INT_CALLEE_TRASH) || (killMask == RBM_CALLEE_TRASH));
+
if (isCallKill)
{
interval->preferCalleeSave = true;
}
}
- if (compiler->killGCRefs(tree))
- {
- RefPosition* pos = newRefPosition((Interval*)nullptr, currentLoc, RefTypeKillGCRefs, tree,
- (allRegs(TYP_REF) & ~RBM_ARG_REGS));
- }
- return true;
+ insertedKills = true;
}
- return false;
+ if (compiler->killGCRefs(tree))
+ {
+ RefPosition* pos =
+ newRefPosition((Interval*)nullptr, currentLoc, RefTypeKillGCRefs, tree, (allRegs(TYP_REF) & ~RBM_ARG_REGS));
+ insertedKills = true;
+ }
+
+ return insertedKills;
}
//----------------------------------------------------------------------------
VARSET_TP liveLargeVectors(VarSetOps::UninitVal());
bool doLargeVectorRestore = false;
#endif // FEATURE_PARTIAL_SIMD_CALLEE_SAVE
+
+ // Call this even when killMask is RBM_NONE, as we have to check for some special cases
+ buildKillPositionsForNode(tree, currentLoc + 1, killMask);
+
if (killMask != RBM_NONE)
{
- buildKillPositionsForNode(tree, currentLoc + 1, killMask);
#if FEATURE_PARTIAL_SIMD_CALLEE_SAVE
if (enregisterLocalVars && ((killMask & RBM_FLT_CALLEE_TRASH) != RBM_NONE))
{
assert(dstCount == 0);
break;
+ case GT_START_PREEMPTGC:
+ // This kills GC refs in callee save regs
+ srcCount = 0;
+ assert(dstCount == 0);
+ BuildDefsWithKills(tree, 0, RBM_NONE, RBM_NONE);
+ break;
+
case GT_PROF_HOOK:
srcCount = 0;
assert(dstCount == 0);