The actual checking had gotten lost between JIT32 and RyuJIT.
I fixed the "on return from function" case for x86/x64, and
the "around every call site" case for x86.
I removed the arm64 case because it's not easy to store SP to a
stack local or directly compare SP against a stack local without
a temporary. Also, for the fixed outgoing arg space ABIs (all but x86),
these checks don't seem too useful anyway, so I also removed the
arm case.
#endif // !_TARGET_X86_
#endif // !FEATURE_PUT_STRUCT_ARG_STK
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+ void genStackPointerCheck(bool doStackPointerCheck, unsigned lvaStackPointerVar);
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
+
#ifdef DEBUG
GenTree* lastConsumedNode;
void genNumberOperandUse(GenTree* const operand, int& useNum) const;
BasicBlock* endLabel = nullptr;
unsigned stackAdjustment = 0;
-#ifdef DEBUG
- // Verify ESP
- if (compiler->opts.compStackCheckOnRet)
- {
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
-
- BasicBlock* esp_check = genCreateTempLabel();
- emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED);
- inst_JMP(jmpEqual, esp_check);
- getEmitter()->emitIns(INS_BREAKPOINT);
- genDefineTempLabel(esp_check);
- }
-#endif
-
noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes
noway_assert(genStackLevel == 0); // Can't have anything on the stack
}
#endif
-#ifdef DEBUG
- // Update new ESP
- if (compiler->opts.compStackCheckOnRet)
- {
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
- }
-#endif
-
genProduceReg(tree);
}
BasicBlock* loop = nullptr;
unsigned stackAdjustment = 0;
-#ifdef DEBUG
- // Verify ESP
- if (compiler->opts.compStackCheckOnRet)
- {
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
-
- BasicBlock* esp_check = genCreateTempLabel();
- emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED);
- inst_JMP(jmpEqual, esp_check);
- getEmitter()->emitIns(INS_bkpt);
- genDefineTempLabel(esp_check);
- }
-#endif
-
noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes
noway_assert(genStackLevel == 0); // Can't have anything on the stack
}
#endif
-#ifdef DEBUG
- // Update new ESP
- if (compiler->opts.compStackCheckOnRet)
- {
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, targetReg, compiler->lvaReturnEspCheck, 0);
- }
-#endif
-
genProduceReg(tree);
}
#if !defined(UNIX_AMD64_ABI)
assert((i > 0) || (regNum == varDsc->lvArgReg));
#endif // defined(UNIX_AMD64_ABI)
+
// Is the arg dead on entry to the method ?
if ((regArgMaskLive & genRegMask(regNum)) == 0)
continue;
}
#endif // defined(UNIX_AMD64_ABI)
+
// If the arg is dead on entry to the method, skip it
if (regArgTab[argNum].processed)
getEmitter()->emitIns_R_R_I(INS_mov, EA_8BYTE, destRegNum, nextRegNum, 1);
}
#endif // defined(_TARGET_ARM64_) && defined(FEATURE_SIMD)
+
// Mark the rest of the argument registers corresponding to this multi-reg type as
// being processed and no longer live.
for (int regSlot = 1; regSlot < argRegCount; regSlot++)
noway_assert(!isFramePointerUsed() || loOffs != 0);
#endif // !defined(_TARGET_AMD64_)
+
// printf(" Untracked tmp at [EBP-%04X]\n", -stkOffs);
hasUntrLcl = true;
#endif // _TARGET_X86_
-#ifdef DEBUG
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
if (compiler->opts.compStackCheckOnRet)
{
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
+ noway_assert(compiler->lvaReturnSpCheck != 0xCCCCCCCC &&
+ compiler->lvaTable[compiler->lvaReturnSpCheck].lvDoNotEnregister &&
+ compiler->lvaTable[compiler->lvaReturnSpCheck].lvOnFrame);
+ getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnSpCheck, 0);
}
-#endif
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
getEmitter()->emitEndProlog();
compiler->unwindEndProlog();
}
}
#endif // PROFILING_SUPPORTED
+
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+ bool doStackPointerCheck = compiler->opts.compStackCheckOnRet;
+
+#if FEATURE_EH_FUNCLETS
+ // Don't do stack pointer check at the return from a funclet; only for the main function.
+ if (compiler->funCurrentFunc()->funKind != FUNC_ROOT)
+ {
+ doStackPointerCheck = false;
+ }
+#else // !FEATURE_EH_FUNCLETS
+ // Don't generate stack checks for x86 finally/filter EH returns: these are not invoked
+ // with the same SP as the main function. See also CodeGen::genEHFinallyOrFilterRet().
+ if ((compiler->compCurBB->bbJumpKind == BBJ_EHFINALLYRET) || (compiler->compCurBB->bbJumpKind == BBJ_EHFILTERRET))
+ {
+ doStackPointerCheck = false;
+ }
+#endif // !FEATURE_EH_FUNCLETS
+
+ genStackPointerCheck(doStackPointerCheck, compiler->lvaReturnSpCheck);
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
}
+
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+
+//------------------------------------------------------------------------
+// genStackPointerCheck: Generate code to check the stack pointer against a saved value.
+// This is a debug check.
+//
+// Arguments:
+// doStackPointerCheck - If true, do the stack pointer check, otherwise do nothing.
+// lvaStackPointerVar - The local variable number that holds the value of the stack pointer
+// we are comparing against.
+//
+// Return Value:
+// None
+//
+void CodeGen::genStackPointerCheck(bool doStackPointerCheck, unsigned lvaStackPointerVar)
+{
+ if (doStackPointerCheck)
+ {
+ noway_assert(lvaStackPointerVar != 0xCCCCCCCC && compiler->lvaTable[lvaStackPointerVar].lvDoNotEnregister &&
+ compiler->lvaTable[lvaStackPointerVar].lvOnFrame);
+ getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, lvaStackPointerVar, 0);
+
+ BasicBlock* sp_check = genCreateTempLabel();
+ getEmitter()->emitIns_J(INS_je, sp_check);
+ instGen(INS_BREAKPOINT);
+ genDefineTempLabel(sp_check);
+ }
+}
+
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
// You have to be careful if you create basic blocks from now on
compiler->fgSafeBasicBlockCreation = false;
+#endif // DEBUG
+
+#if defined(DEBUG) && defined(_TARGET_X86_)
- // This stress mode is not compatible with fully interruptible GC
+ // Check stack pointer on call stress mode is not compatible with fully interruptible GC. REVIEW: why?
+ //
if (genInterruptible && compiler->opts.compStackCheckOnCall)
{
compiler->opts.compStackCheckOnCall = false;
}
- // This stress mode is not compatible with fully interruptible GC
- if (genInterruptible && compiler->opts.compStackCheckOnRet)
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
+
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+
+ // Check stack pointer on return stress mode is not compatible with fully interruptible GC. REVIEW: why?
+ // It is also not compatible with any function that makes a tailcall: we aren't smart enough to only
+ // insert the SP check in the non-tailcall returns.
+ //
+ if ((genInterruptible || compiler->compTailCallUsed) && compiler->opts.compStackCheckOnRet)
{
compiler->opts.compStackCheckOnRet = false;
}
-#endif // DEBUG
+
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
// Prepare the blocks for exception handling codegen: mark the blocks that needs labels.
genPrepForEHCodegen();
BasicBlock* endLabel = nullptr;
#ifdef DEBUG
- // Verify ESP
- if (compiler->opts.compStackCheckOnRet)
- {
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
-
- BasicBlock* esp_check = genCreateTempLabel();
- emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED);
- inst_JMP(jmpEqual, esp_check);
- getEmitter()->emitIns(INS_BREAKPOINT);
- genDefineTempLabel(esp_check);
- }
+ genStackPointerCheck(compiler->opts.compStackCheckOnRet, compiler->lvaReturnSpCheck);
#endif
noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes
#endif
#ifdef DEBUG
- // Update new ESP
+ // Update local variable to reflect the new stack pointer.
if (compiler->opts.compStackCheckOnRet)
{
- noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
- compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
- getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
+ noway_assert(compiler->lvaReturnSpCheck != 0xCCCCCCCC &&
+ compiler->lvaTable[compiler->lvaReturnSpCheck].lvDoNotEnregister &&
+ compiler->lvaTable[compiler->lvaReturnSpCheck].lvOnFrame);
+ getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnSpCheck, 0);
}
#endif
}
}
+#if defined(DEBUG) && defined(_TARGET_X86_)
+ // Store the stack pointer so we can check it after the call.
+ if (compiler->opts.compStackCheckOnCall && call->gtCallType == CT_USER_FUNC)
+ {
+ noway_assert(compiler->lvaCallSpCheck != 0xCCCCCCCC &&
+ compiler->lvaTable[compiler->lvaCallSpCheck].lvDoNotEnregister &&
+ compiler->lvaTable[compiler->lvaCallSpCheck].lvOnFrame);
+ getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaCallSpCheck, 0);
+ }
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
+
bool fPossibleSyncHelperCall = false;
CorInfoHelpFunc helperNum = CORINFO_HELP_UNDEF;
gcInfo.gcMarkRegSetNpt(RBM_INTRET);
}
+#if defined(DEBUG) && defined(_TARGET_X86_)
+ if (compiler->opts.compStackCheckOnCall && call->gtCallType == CT_USER_FUNC)
+ {
+ noway_assert(compiler->lvaCallSpCheck != 0xCCCCCCCC &&
+ compiler->lvaTable[compiler->lvaCallSpCheck].lvDoNotEnregister &&
+ compiler->lvaTable[compiler->lvaCallSpCheck].lvOnFrame);
+ if (!fCallerPop && (stackArgBytes != 0))
+ {
+ // ECX is trashed, so can be used to compute the expected SP. We saved the value of SP
+ // after pushing all the stack arguments, but the caller popped the arguments, so we need
+ // to do some math to figure a good comparison.
+ getEmitter()->emitIns_R_R(INS_mov, EA_4BYTE, REG_ARG_0, REG_SPBASE);
+ getEmitter()->emitIns_R_I(INS_sub, EA_4BYTE, REG_ARG_0, stackArgBytes);
+ getEmitter()->emitIns_S_R(INS_cmp, EA_4BYTE, REG_ARG_0, compiler->lvaCallSpCheck, 0);
+ }
+ else
+ {
+ getEmitter()->emitIns_S_R(INS_cmp, EA_4BYTE, REG_SPBASE, compiler->lvaCallSpCheck, 0);
+ }
+
+ BasicBlock* sp_check = genCreateTempLabel();
+ getEmitter()->emitIns_J(INS_je, sp_check);
+ instGen(INS_BREAKPOINT);
+ genDefineTempLabel(sp_check);
+ }
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
+
#if !FEATURE_EH_FUNCLETS
//-------------------------------------------------------------------------
// Create a label for tracking of region protected by the monitor in synchronized methods.
#ifdef DEBUG
assert(!codeGen->isGCTypeFixed());
opts.compGcChecks = (JitConfig.JitGCChecks() != 0) || compStressCompile(STRESS_GENERIC_VARN, 5);
+#endif
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
enum
{
STACK_CHECK_ON_RETURN = 0x1,
STACK_CHECK_ON_CALL = 0x2,
- STACK_CHECK_ALL = 0x3,
+ STACK_CHECK_ALL = 0x3
};
DWORD dwJitStackChecks = JitConfig.JitStackChecks();
{
dwJitStackChecks = STACK_CHECK_ALL;
}
- opts.compStackCheckOnRet = (dwJitStackChecks & DWORD(STACK_CHECK_ON_RETURN)) != 0;
+ opts.compStackCheckOnRet = (dwJitStackChecks & DWORD(STACK_CHECK_ON_RETURN)) != 0;
+#if defined(_TARGET_X86_)
opts.compStackCheckOnCall = (dwJitStackChecks & DWORD(STACK_CHECK_ON_CALL)) != 0;
-#endif
+#endif // defined(_TARGET_X86_)
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
#if MEASURE_MEM_ALLOC
s_dspMemStats = (JitConfig.DisplayMemStats() != 0);
unsigned lvaPromotedStructAssemblyScratchVar;
#endif // _TARGET_ARM_
-#ifdef DEBUG
- unsigned lvaReturnEspCheck; // confirms ESP not corrupted on return
- unsigned lvaCallEspCheck; // confirms ESP not corrupted after a call
-#endif
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+
+ unsigned lvaReturnSpCheck; // Stores SP to confirm it is not corrupted on return.
+
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
+
+#if defined(DEBUG) && defined(_TARGET_X86_)
+
+ unsigned lvaCallSpCheck; // Stores SP to confirm it is not corrupted after every call.
+
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
unsigned lvaGenericsContextUseCount;
#endif
#ifdef DEBUG
- bool compGcChecks; // Check arguments and return values to ensure they are sane
- bool compStackCheckOnRet; // Check ESP on return to ensure it is correct
- bool compStackCheckOnCall; // Check ESP after every call to ensure it is correct
-
+ bool compGcChecks; // Check arguments and return values to ensure they are sane
#endif
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
+
+ bool compStackCheckOnRet; // Check stack pointer on return to ensure it is correct.
+
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
+
+#if defined(DEBUG) && defined(_TARGET_X86_)
+
+ bool compStackCheckOnCall; // Check stack pointer after call to ensure it is correct. Only for x86.
+
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
+
bool compNeedSecurityCheck; // This flag really means where or not a security object needs
// to be allocated on the stack.
// It will be set to true in the following cases:
}
}
}
+#endif // DEBUG
+#if defined(DEBUG) && defined(_TARGET_XARCH_)
if (opts.compStackCheckOnRet)
{
- lvaReturnEspCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("ReturnEspCheck"));
- lvaTable[lvaReturnEspCheck].lvType = TYP_INT;
+ lvaReturnSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("ReturnSpCheck"));
+ lvaTable[lvaReturnSpCheck].lvType = TYP_I_IMPL;
}
+#endif // defined(DEBUG) && defined(_TARGET_XARCH_)
+#if defined(DEBUG) && defined(_TARGET_X86_)
if (opts.compStackCheckOnCall)
{
- lvaCallEspCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("CallEspCheck"));
- lvaTable[lvaCallEspCheck].lvType = TYP_INT;
+ lvaCallSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("CallSpCheck"));
+ lvaTable[lvaCallSpCheck].lvType = TYP_I_IMPL;
}
-#endif // DEBUG
+#endif // defined(DEBUG) && defined(_TARGET_X86_)
/* Filter out unimported BBs */