From 43e3118e5f0473088461ff1d7db4ab2a2f25f7d2 Mon Sep 17 00:00:00 2001 From: Bruce Forstall Date: Fri, 26 Oct 2018 00:06:38 -0700 Subject: [PATCH] Fix SP check for x64/x86, remove for arm32/arm64 The actual checking had gotten lost between JIT32 and RyuJIT. I fixed the "on return from function" case for x86/x64, and the "around every call site" case for x86. I removed the arm64 case because it's not easy to store SP to a stack local or directly compare SP against a stack local without a temporary. Also, for the fixed outgoing arg space ABIs (all but x86), these checks don't seem too useful anyway, so I also removed the arm case. Commit migrated from https://github.com/dotnet/coreclr/commit/8cbbce04c80f03f90370231847865f620150f628 --- src/coreclr/src/jit/codegen.h | 4 +++ src/coreclr/src/jit/codegenarm.cpp | 28 --------------- src/coreclr/src/jit/codegenarm64.cpp | 28 --------------- src/coreclr/src/jit/codegencommon.cpp | 68 +++++++++++++++++++++++++++++++---- src/coreclr/src/jit/codegenlinear.cpp | 20 ++++++++--- src/coreclr/src/jit/codegenxarch.cpp | 63 ++++++++++++++++++++++---------- src/coreclr/src/jit/compiler.cpp | 10 ++++-- src/coreclr/src/jit/compiler.h | 32 ++++++++++++----- src/coreclr/src/jit/morph.cpp | 14 +++++--- 9 files changed, 166 insertions(+), 101 deletions(-) diff --git a/src/coreclr/src/jit/codegen.h b/src/coreclr/src/jit/codegen.h index fa12670..a0452de 100644 --- a/src/coreclr/src/jit/codegen.h +++ b/src/coreclr/src/jit/codegen.h @@ -1185,6 +1185,10 @@ protected: #endif // !_TARGET_X86_ #endif // !FEATURE_PUT_STRUCT_ARG_STK +#if defined(DEBUG) && defined(_TARGET_XARCH_) + void genStackPointerCheck(bool doStackPointerCheck, unsigned lvaStackPointerVar); +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) + #ifdef DEBUG GenTree* lastConsumedNode; void genNumberOperandUse(GenTree* const operand, int& useNum) const; diff --git a/src/coreclr/src/jit/codegenarm.cpp b/src/coreclr/src/jit/codegenarm.cpp index 1e6044f..21267a6 100644 --- a/src/coreclr/src/jit/codegenarm.cpp +++ b/src/coreclr/src/jit/codegenarm.cpp @@ -277,23 +277,6 @@ void CodeGen::genLclHeap(GenTree* tree) BasicBlock* endLabel = nullptr; unsigned stackAdjustment = 0; -#ifdef DEBUG - // Verify ESP - if (compiler->opts.compStackCheckOnRet) - { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0); - - BasicBlock* esp_check = genCreateTempLabel(); - emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED); - inst_JMP(jmpEqual, esp_check); - getEmitter()->emitIns(INS_BREAKPOINT); - genDefineTempLabel(esp_check); - } -#endif - noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes noway_assert(genStackLevel == 0); // Can't have anything on the stack @@ -504,17 +487,6 @@ BAILOUT: } #endif -#ifdef DEBUG - // Update new ESP - if (compiler->opts.compStackCheckOnRet) - { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0); - } -#endif - genProduceReg(tree); } diff --git a/src/coreclr/src/jit/codegenarm64.cpp b/src/coreclr/src/jit/codegenarm64.cpp index b2a793b..affef95 100644 --- a/src/coreclr/src/jit/codegenarm64.cpp +++ b/src/coreclr/src/jit/codegenarm64.cpp @@ -1845,23 +1845,6 @@ void CodeGen::genLclHeap(GenTree* tree) BasicBlock* loop = nullptr; unsigned stackAdjustment = 0; -#ifdef DEBUG - // Verify ESP - if (compiler->opts.compStackCheckOnRet) - { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0); - - BasicBlock* esp_check = genCreateTempLabel(); - emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED); - inst_JMP(jmpEqual, esp_check); - getEmitter()->emitIns(INS_bkpt); - genDefineTempLabel(esp_check); - } -#endif - noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes noway_assert(genStackLevel == 0); // Can't have anything on the stack @@ -2115,17 +2098,6 @@ BAILOUT: } #endif -#ifdef DEBUG - // Update new ESP - if (compiler->opts.compStackCheckOnRet) - { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, targetReg, compiler->lvaReturnEspCheck, 0); - } -#endif - genProduceReg(tree); } diff --git a/src/coreclr/src/jit/codegencommon.cpp b/src/coreclr/src/jit/codegencommon.cpp index 0d4cd7d..79ca6c0 100644 --- a/src/coreclr/src/jit/codegencommon.cpp +++ b/src/coreclr/src/jit/codegencommon.cpp @@ -3691,6 +3691,7 @@ void CodeGen::genFnPrologCalleeRegArgs(regNumber xtraReg, bool* pXtraRegClobbere #if !defined(UNIX_AMD64_ABI) assert((i > 0) || (regNum == varDsc->lvArgReg)); #endif // defined(UNIX_AMD64_ABI) + // Is the arg dead on entry to the method ? if ((regArgMaskLive & genRegMask(regNum)) == 0) @@ -3916,6 +3917,7 @@ void CodeGen::genFnPrologCalleeRegArgs(regNumber xtraReg, bool* pXtraRegClobbere continue; } #endif // defined(UNIX_AMD64_ABI) + // If the arg is dead on entry to the method, skip it if (regArgTab[argNum].processed) @@ -4543,6 +4545,7 @@ void CodeGen::genFnPrologCalleeRegArgs(regNumber xtraReg, bool* pXtraRegClobbere getEmitter()->emitIns_R_R_I(INS_mov, EA_8BYTE, destRegNum, nextRegNum, 1); } #endif // defined(_TARGET_ARM64_) && defined(FEATURE_SIMD) + // Mark the rest of the argument registers corresponding to this multi-reg type as // being processed and no longer live. for (int regSlot = 1; regSlot < argRegCount; regSlot++) @@ -8035,6 +8038,7 @@ void CodeGen::genFnProlog() noway_assert(!isFramePointerUsed() || loOffs != 0); #endif // !defined(_TARGET_AMD64_) + // printf(" Untracked tmp at [EBP-%04X]\n", -stkOffs); hasUntrLcl = true; @@ -8581,15 +8585,15 @@ void CodeGen::genFnProlog() #endif // _TARGET_X86_ -#ifdef DEBUG +#if defined(DEBUG) && defined(_TARGET_XARCH_) if (compiler->opts.compStackCheckOnRet) { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0); + noway_assert(compiler->lvaReturnSpCheck != 0xCCCCCCCC && + compiler->lvaTable[compiler->lvaReturnSpCheck].lvDoNotEnregister && + compiler->lvaTable[compiler->lvaReturnSpCheck].lvOnFrame); + getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnSpCheck, 0); } -#endif +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) getEmitter()->emitEndProlog(); compiler->unwindEndProlog(); @@ -11927,4 +11931,56 @@ void CodeGen::genReturn(GenTree* treeNode) } } #endif // PROFILING_SUPPORTED + +#if defined(DEBUG) && defined(_TARGET_XARCH_) + bool doStackPointerCheck = compiler->opts.compStackCheckOnRet; + +#if FEATURE_EH_FUNCLETS + // Don't do stack pointer check at the return from a funclet; only for the main function. + if (compiler->funCurrentFunc()->funKind != FUNC_ROOT) + { + doStackPointerCheck = false; + } +#else // !FEATURE_EH_FUNCLETS + // Don't generate stack checks for x86 finally/filter EH returns: these are not invoked + // with the same SP as the main function. See also CodeGen::genEHFinallyOrFilterRet(). + if ((compiler->compCurBB->bbJumpKind == BBJ_EHFINALLYRET) || (compiler->compCurBB->bbJumpKind == BBJ_EHFILTERRET)) + { + doStackPointerCheck = false; + } +#endif // !FEATURE_EH_FUNCLETS + + genStackPointerCheck(doStackPointerCheck, compiler->lvaReturnSpCheck); +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) } + +#if defined(DEBUG) && defined(_TARGET_XARCH_) + +//------------------------------------------------------------------------ +// genStackPointerCheck: Generate code to check the stack pointer against a saved value. +// This is a debug check. +// +// Arguments: +// doStackPointerCheck - If true, do the stack pointer check, otherwise do nothing. +// lvaStackPointerVar - The local variable number that holds the value of the stack pointer +// we are comparing against. +// +// Return Value: +// None +// +void CodeGen::genStackPointerCheck(bool doStackPointerCheck, unsigned lvaStackPointerVar) +{ + if (doStackPointerCheck) + { + noway_assert(lvaStackPointerVar != 0xCCCCCCCC && compiler->lvaTable[lvaStackPointerVar].lvDoNotEnregister && + compiler->lvaTable[lvaStackPointerVar].lvOnFrame); + getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, lvaStackPointerVar, 0); + + BasicBlock* sp_check = genCreateTempLabel(); + getEmitter()->emitIns_J(INS_je, sp_check); + instGen(INS_BREAKPOINT); + genDefineTempLabel(sp_check); + } +} + +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) diff --git a/src/coreclr/src/jit/codegenlinear.cpp b/src/coreclr/src/jit/codegenlinear.cpp index 9647600..f474b88 100644 --- a/src/coreclr/src/jit/codegenlinear.cpp +++ b/src/coreclr/src/jit/codegenlinear.cpp @@ -41,19 +41,31 @@ void CodeGen::genCodeForBBlist() // You have to be careful if you create basic blocks from now on compiler->fgSafeBasicBlockCreation = false; +#endif // DEBUG + +#if defined(DEBUG) && defined(_TARGET_X86_) - // This stress mode is not compatible with fully interruptible GC + // Check stack pointer on call stress mode is not compatible with fully interruptible GC. REVIEW: why? + // if (genInterruptible && compiler->opts.compStackCheckOnCall) { compiler->opts.compStackCheckOnCall = false; } - // This stress mode is not compatible with fully interruptible GC - if (genInterruptible && compiler->opts.compStackCheckOnRet) +#endif // defined(DEBUG) && defined(_TARGET_X86_) + +#if defined(DEBUG) && defined(_TARGET_XARCH_) + + // Check stack pointer on return stress mode is not compatible with fully interruptible GC. REVIEW: why? + // It is also not compatible with any function that makes a tailcall: we aren't smart enough to only + // insert the SP check in the non-tailcall returns. + // + if ((genInterruptible || compiler->compTailCallUsed) && compiler->opts.compStackCheckOnRet) { compiler->opts.compStackCheckOnRet = false; } -#endif // DEBUG + +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) // Prepare the blocks for exception handling codegen: mark the blocks that needs labels. genPrepForEHCodegen(); diff --git a/src/coreclr/src/jit/codegenxarch.cpp b/src/coreclr/src/jit/codegenxarch.cpp index 54ff6dc..007cc85 100644 --- a/src/coreclr/src/jit/codegenxarch.cpp +++ b/src/coreclr/src/jit/codegenxarch.cpp @@ -2152,20 +2152,7 @@ void CodeGen::genLclHeap(GenTree* tree) BasicBlock* endLabel = nullptr; #ifdef DEBUG - // Verify ESP - if (compiler->opts.compStackCheckOnRet) - { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0); - - BasicBlock* esp_check = genCreateTempLabel(); - emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED); - inst_JMP(jmpEqual, esp_check); - getEmitter()->emitIns(INS_BREAKPOINT); - genDefineTempLabel(esp_check); - } + genStackPointerCheck(compiler->opts.compStackCheckOnRet, compiler->lvaReturnSpCheck); #endif noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes @@ -2473,13 +2460,13 @@ BAILOUT: #endif #ifdef DEBUG - // Update new ESP + // Update local variable to reflect the new stack pointer. if (compiler->opts.compStackCheckOnRet) { - noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister && - compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame); - getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0); + noway_assert(compiler->lvaReturnSpCheck != 0xCCCCCCCC && + compiler->lvaTable[compiler->lvaReturnSpCheck].lvDoNotEnregister && + compiler->lvaTable[compiler->lvaReturnSpCheck].lvOnFrame); + getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnSpCheck, 0); } #endif @@ -5167,6 +5154,17 @@ void CodeGen::genCallInstruction(GenTreeCall* call) } } +#if defined(DEBUG) && defined(_TARGET_X86_) + // Store the stack pointer so we can check it after the call. + if (compiler->opts.compStackCheckOnCall && call->gtCallType == CT_USER_FUNC) + { + noway_assert(compiler->lvaCallSpCheck != 0xCCCCCCCC && + compiler->lvaTable[compiler->lvaCallSpCheck].lvDoNotEnregister && + compiler->lvaTable[compiler->lvaCallSpCheck].lvOnFrame); + getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, REG_SPBASE, compiler->lvaCallSpCheck, 0); + } +#endif // defined(DEBUG) && defined(_TARGET_X86_) + bool fPossibleSyncHelperCall = false; CorInfoHelpFunc helperNum = CORINFO_HELP_UNDEF; @@ -5481,6 +5479,33 @@ void CodeGen::genCallInstruction(GenTreeCall* call) gcInfo.gcMarkRegSetNpt(RBM_INTRET); } +#if defined(DEBUG) && defined(_TARGET_X86_) + if (compiler->opts.compStackCheckOnCall && call->gtCallType == CT_USER_FUNC) + { + noway_assert(compiler->lvaCallSpCheck != 0xCCCCCCCC && + compiler->lvaTable[compiler->lvaCallSpCheck].lvDoNotEnregister && + compiler->lvaTable[compiler->lvaCallSpCheck].lvOnFrame); + if (!fCallerPop && (stackArgBytes != 0)) + { + // ECX is trashed, so can be used to compute the expected SP. We saved the value of SP + // after pushing all the stack arguments, but the caller popped the arguments, so we need + // to do some math to figure a good comparison. + getEmitter()->emitIns_R_R(INS_mov, EA_4BYTE, REG_ARG_0, REG_SPBASE); + getEmitter()->emitIns_R_I(INS_sub, EA_4BYTE, REG_ARG_0, stackArgBytes); + getEmitter()->emitIns_S_R(INS_cmp, EA_4BYTE, REG_ARG_0, compiler->lvaCallSpCheck, 0); + } + else + { + getEmitter()->emitIns_S_R(INS_cmp, EA_4BYTE, REG_SPBASE, compiler->lvaCallSpCheck, 0); + } + + BasicBlock* sp_check = genCreateTempLabel(); + getEmitter()->emitIns_J(INS_je, sp_check); + instGen(INS_BREAKPOINT); + genDefineTempLabel(sp_check); + } +#endif // defined(DEBUG) && defined(_TARGET_X86_) + #if !FEATURE_EH_FUNCLETS //------------------------------------------------------------------------- // Create a label for tracking of region protected by the monitor in synchronized methods. diff --git a/src/coreclr/src/jit/compiler.cpp b/src/coreclr/src/jit/compiler.cpp index 6d7079d..67465f7 100644 --- a/src/coreclr/src/jit/compiler.cpp +++ b/src/coreclr/src/jit/compiler.cpp @@ -3486,12 +3486,14 @@ void Compiler::compInitOptions(JitFlags* jitFlags) #ifdef DEBUG assert(!codeGen->isGCTypeFixed()); opts.compGcChecks = (JitConfig.JitGCChecks() != 0) || compStressCompile(STRESS_GENERIC_VARN, 5); +#endif +#if defined(DEBUG) && defined(_TARGET_XARCH_) enum { STACK_CHECK_ON_RETURN = 0x1, STACK_CHECK_ON_CALL = 0x2, - STACK_CHECK_ALL = 0x3, + STACK_CHECK_ALL = 0x3 }; DWORD dwJitStackChecks = JitConfig.JitStackChecks(); @@ -3499,9 +3501,11 @@ void Compiler::compInitOptions(JitFlags* jitFlags) { dwJitStackChecks = STACK_CHECK_ALL; } - opts.compStackCheckOnRet = (dwJitStackChecks & DWORD(STACK_CHECK_ON_RETURN)) != 0; + opts.compStackCheckOnRet = (dwJitStackChecks & DWORD(STACK_CHECK_ON_RETURN)) != 0; +#if defined(_TARGET_X86_) opts.compStackCheckOnCall = (dwJitStackChecks & DWORD(STACK_CHECK_ON_CALL)) != 0; -#endif +#endif // defined(_TARGET_X86_) +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) #if MEASURE_MEM_ALLOC s_dspMemStats = (JitConfig.DisplayMemStats() != 0); diff --git a/src/coreclr/src/jit/compiler.h b/src/coreclr/src/jit/compiler.h index eb6255d..c064017 100644 --- a/src/coreclr/src/jit/compiler.h +++ b/src/coreclr/src/jit/compiler.h @@ -2856,10 +2856,17 @@ public: unsigned lvaPromotedStructAssemblyScratchVar; #endif // _TARGET_ARM_ -#ifdef DEBUG - unsigned lvaReturnEspCheck; // confirms ESP not corrupted on return - unsigned lvaCallEspCheck; // confirms ESP not corrupted after a call -#endif +#if defined(DEBUG) && defined(_TARGET_XARCH_) + + unsigned lvaReturnSpCheck; // Stores SP to confirm it is not corrupted on return. + +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) + +#if defined(DEBUG) && defined(_TARGET_X86_) + + unsigned lvaCallSpCheck; // Stores SP to confirm it is not corrupted after every call. + +#endif // defined(DEBUG) && defined(_TARGET_X86_) unsigned lvaGenericsContextUseCount; @@ -8264,12 +8271,21 @@ public: #endif #ifdef DEBUG - bool compGcChecks; // Check arguments and return values to ensure they are sane - bool compStackCheckOnRet; // Check ESP on return to ensure it is correct - bool compStackCheckOnCall; // Check ESP after every call to ensure it is correct - + bool compGcChecks; // Check arguments and return values to ensure they are sane #endif +#if defined(DEBUG) && defined(_TARGET_XARCH_) + + bool compStackCheckOnRet; // Check stack pointer on return to ensure it is correct. + +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) + +#if defined(DEBUG) && defined(_TARGET_X86_) + + bool compStackCheckOnCall; // Check stack pointer after call to ensure it is correct. Only for x86. + +#endif // defined(DEBUG) && defined(_TARGET_X86_) + bool compNeedSecurityCheck; // This flag really means where or not a security object needs // to be allocated on the stack. // It will be set to true in the following cases: diff --git a/src/coreclr/src/jit/morph.cpp b/src/coreclr/src/jit/morph.cpp index 1f6de01..80de2e8 100644 --- a/src/coreclr/src/jit/morph.cpp +++ b/src/coreclr/src/jit/morph.cpp @@ -16694,19 +16694,23 @@ void Compiler::fgMorph() } } } +#endif // DEBUG +#if defined(DEBUG) && defined(_TARGET_XARCH_) if (opts.compStackCheckOnRet) { - lvaReturnEspCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("ReturnEspCheck")); - lvaTable[lvaReturnEspCheck].lvType = TYP_INT; + lvaReturnSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("ReturnSpCheck")); + lvaTable[lvaReturnSpCheck].lvType = TYP_I_IMPL; } +#endif // defined(DEBUG) && defined(_TARGET_XARCH_) +#if defined(DEBUG) && defined(_TARGET_X86_) if (opts.compStackCheckOnCall) { - lvaCallEspCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("CallEspCheck")); - lvaTable[lvaCallEspCheck].lvType = TYP_INT; + lvaCallSpCheck = lvaGrabTempWithImplicitUse(false DEBUGARG("CallSpCheck")); + lvaTable[lvaCallSpCheck].lvType = TYP_I_IMPL; } -#endif // DEBUG +#endif // defined(DEBUG) && defined(_TARGET_X86_) /* Filter out unimported BBs */ -- 2.7.4