? "<none>"
: GetRegName(hdrdecoder.GetStackBaseRegister()));
+#ifdef _TARGET_AMD64_
gcPrintf("Wants Report Only Leaf: %u\n", hdrdecoder.WantsReportOnlyLeaf());
-
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ gcPrintf("Has tailcalls: %u\n", hdrdecoder.HasTailCalls());
+#endif // _TARGET_AMD64_
#ifdef FIXED_STACK_PARAMETER_SCRATCH_AREA
gcPrintf("Size of parameter area: %x\n", hdrdecoder.GetSizeOfStackParameterArea());
#endif
(GcInfoDecoderFlags)( DECODE_SECURITY_OBJECT
| DECODE_CODE_LENGTH
| DECODE_VARARG
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ | DECODE_HAS_TAILCALLS
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
+
| DECODE_INTERRUPTIBILITY),
offset);
m_StackBaseRegister = NO_STACK_BASE_REGISTER;
m_SizeOfEditAndContinuePreservedArea = NO_SIZE_OF_EDIT_AND_CONTINUE_PRESERVED_AREA;
m_ReversePInvokeFrameSlot = NO_REVERSE_PINVOKE_FRAME;
+#ifdef _TARGET_AMD64_
m_WantsReportOnlyLeaf = false;
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ m_HasTailCalls = false;
+#endif // _TARGET_AMD64_
m_IsVarArg = false;
m_pLastInterruptibleRange = NULL;
m_SizeOfEditAndContinuePreservedArea = slots;
}
+#ifdef _TARGET_AMD64_
void GcInfoEncoder::SetWantsReportOnlyLeaf()
{
m_WantsReportOnlyLeaf = true;
}
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+void GcInfoEncoder::SetHasTailCalls()
+{
+ m_HasTailCalls = true;
+}
+#endif // _TARGET_AMD64_
#ifdef FIXED_STACK_PARAMETER_SCRATCH_AREA
void GcInfoEncoder::SetSizeOfStackOutgoingAndScratchArea( UINT32 size )
UINT32 hasReversePInvokeFrame = (m_ReversePInvokeFrameSlot != NO_REVERSE_PINVOKE_FRAME);
BOOL slimHeader = (!m_IsVarArg && !hasSecurityObject && !hasGSCookie && (m_PSPSymStackSlot == NO_PSP_SYM) &&
- !hasContextParamType && !m_WantsReportOnlyLeaf && (m_InterruptibleRanges.Count() == 0) && !hasReversePInvokeFrame &&
+ !hasContextParamType && (m_InterruptibleRanges.Count() == 0) && !hasReversePInvokeFrame &&
((m_StackBaseRegister == NO_STACK_BASE_REGISTER) || (NORMALIZE_STACK_BASE_REGISTER(m_StackBaseRegister) == 0))) &&
(m_SizeOfEditAndContinuePreservedArea == NO_SIZE_OF_EDIT_AND_CONTINUE_PRESERVED_AREA) &&
+#ifdef _TARGET_AMD64_
+ !m_WantsReportOnlyLeaf &&
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ !m_HasTailCalls &&
+#endif // _TARGET_AMD64_
!IsStructReturnKind(m_ReturnKind);
// All new code is generated for the latest GCINFO_VERSION.
GCINFO_WRITE(m_Info1, ((m_PSPSymStackSlot != NO_PSP_SYM) ? 1 : 0), 1, FlagsSize);
GCINFO_WRITE(m_Info1, m_contextParamType, 2, FlagsSize);
GCINFO_WRITE(m_Info1, ((m_StackBaseRegister != NO_STACK_BASE_REGISTER) ? 1 : 0), 1, FlagsSize);
+#ifdef _TARGET_AMD64_
GCINFO_WRITE(m_Info1, (m_WantsReportOnlyLeaf ? 1 : 0), 1, FlagsSize);
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ GCINFO_WRITE(m_Info1, (m_HasTailCalls ? 1 : 0), 1, FlagsSize);
+#endif // _TARGET_AMD64_
GCINFO_WRITE(m_Info1, ((m_SizeOfEditAndContinuePreservedArea != NO_SIZE_OF_EDIT_AND_CONTINUE_PRESERVED_AREA) ? 1 : 0), 1, FlagsSize);
GCINFO_WRITE(m_Info1, (hasReversePInvokeFrame ? 1 : 0), 1, FlagsSize);
virtual bool IsGcSafe(EECodeInfo *pCodeInfo,
DWORD dwRelOffset) = 0;
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+virtual bool HasTailCalls(EECodeInfo *pCodeInfo) = 0;
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
+
#if defined(_TARGET_AMD64_) && defined(_DEBUG)
/*
Locates the end of the last interruptible region in the given code range.
bool IsGcSafe( EECodeInfo *pCodeInfo,
DWORD dwRelOffset);
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+virtual
+bool HasTailCalls(EECodeInfo *pCodeInfo);
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
+
#if defined(_TARGET_AMD64_) && defined(_DEBUG)
/*
Locates the end of the last interruptible region in the given code range.
DECODE_PROLOG_LENGTH = 0x400, // length of the prolog (used to avoid reporting generics context)
DECODE_EDIT_AND_CONTINUE = 0x800,
DECODE_REVERSE_PINVOKE_VAR = 0x1000,
- DECODE_RETURN_KIND = 0x2000
+ DECODE_RETURN_KIND = 0x2000,
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ DECODE_HAS_TAILCALLS = 0x4000,
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
};
enum GcInfoHeaderFlags
GC_INFO_HAS_GENERICS_INST_CONTEXT_MD = 0x20,
GC_INFO_HAS_GENERICS_INST_CONTEXT_THIS = 0x30,
GC_INFO_HAS_STACK_BASE_REGISTER = 0x40,
+#ifdef _TARGET_AMD64_
GC_INFO_WANTS_REPORT_ONLY_LEAF = 0x80,
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ GC_INFO_HAS_TAILCALLS = 0x80,
+#endif // _TARGET_AMD64_
GC_INFO_HAS_EDIT_AND_CONTINUE_PRESERVED_SLOTS = 0x100,
GC_INFO_REVERSE_PINVOKE_FRAME = 0x200,
bool HasMethodTableGenericsInstContext();
bool GetIsVarArg();
bool WantsReportOnlyLeaf();
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ bool HasTailCalls();
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
ReturnKind GetReturnKind();
UINT32 GetCodeLength();
UINT32 GetStackBaseRegister();
bool m_IsVarArg;
bool m_GenericSecretParamIsMD;
bool m_GenericSecretParamIsMT;
+#ifdef _TARGET_AMD64_
bool m_WantsReportOnlyLeaf;
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ bool m_HasTailCalls;
+#endif // _TARGET_AMD64_
INT32 m_SecurityObjectStackSlot;
INT32 m_GSCookieStackSlot;
INT32 m_ReversePInvokeFrameStackSlot;
hasPSPSymStackSlot,
hasGenericsInstContextStackSlot,
hasStackBaseregister,
- wantsReportOnlyLeaf,
+ wantsReportOnlyLeaf (AMD64 use only),
+ hasTailCalls (ARM/ARM64 only)
hasSizeOfEditAndContinuePreservedArea
hasReversePInvokeFrame,
- ReturnKind (Fat: 4 bits)
// Number of slots preserved during EnC remap
void SetSizeOfEditAndContinuePreservedArea( UINT32 size );
+#ifdef _TARGET_AMD64_
// Used to only report a frame once for the leaf function/funclet
// instead of once for each live function/funclet on the stack.
// Called only by RyuJIT (not JIT64)
void SetWantsReportOnlyLeaf();
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ void SetHasTailCalls();
+#endif // _TARGET_AMD64_
#ifdef FIXED_STACK_PARAMETER_SCRATCH_AREA
void SetSizeOfStackOutgoingAndScratchArea( UINT32 size );
GcInfoArrayList<LifetimeTransition, 64> m_LifetimeTransitions;
bool m_IsVarArg;
+#if defined(_TARGET_AMD64_)
bool m_WantsReportOnlyLeaf;
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ bool m_HasTailCalls;
+#endif // _TARGET_AMD64_
INT32 m_SecurityObjectStackSlot;
INT32 m_GSCookieStackSlot;
UINT32 m_GSCookieValidRangeStart;
/* Assume that we not fully interruptible */
genInterruptible = false;
+#ifdef _TARGET_ARMARCH_
+ hasTailCalls = false;
+#endif // _TARGET_ARMARCH_
#ifdef DEBUG
genInterruptibleUsed = false;
genCurDispOffset = (unsigned)-1;
if (jmpEpilog)
{
+#ifdef _TARGET_ARMARCH_
+ hasTailCalls = true;
+#endif // _TARGET_ARMARCH_
+
noway_assert(block->bbJumpKind == BBJ_RETURN);
noway_assert(block->bbTreeList != nullptr);
m_cgInterruptible = value;
}
+#ifdef _TARGET_ARMARCH_
+ __declspec(property(get = getHasTailCalls, put = setHasTailCalls)) bool hasTailCalls;
+ bool getHasTailCalls()
+ {
+ return m_cgHasTailCalls;
+ }
+ void setHasTailCalls(bool value)
+ {
+ m_cgHasTailCalls = value;
+ }
+#endif // _TARGET_ARMARCH_
+
private:
bool m_cgInterruptible;
+#ifdef _TARGET_ARMARCH_
+ bool m_cgHasTailCalls;
+#endif // _TARGET_ARMARCH_
// The following will be set to true if we've determined that we need to
// generate a full-blown pointer register map for the current method.
codeGen->setInterruptible(value);
}
+#ifdef _TARGET_ARMARCH_
+ __declspec(property(get = getHasTailCalls, put = setHasTailCalls)) bool hasTailCalls;
+ bool getHasTailCalls()
+ {
+ return codeGen->hasTailCalls;
+ }
+ void setHasTailCalls(bool value)
+ {
+ codeGen->setHasTailCalls(value);
+ }
+#endif // _TARGET_ARMARCH_
+
#if DOUBLE_ALIGN
const bool genDoubleAlign()
{
}
}
+#ifdef _TARGET_AMD64_
void SetWantsReportOnlyLeaf()
{
m_gcInfoEncoder->SetWantsReportOnlyLeaf();
printf("Set WantsReportOnlyLeaf.\n");
}
}
+#elif defined(_TARGET_ARMARCH_)
+ void SetHasTailCalls()
+ {
+ m_gcInfoEncoder->SetHasTailCalls();
+ if (m_doLogging)
+ {
+ printf("Set HasTailCalls.\n");
+ }
+ }
+#endif // _TARGET_AMD64_
void SetSizeOfStackOutgoingAndScratchArea(UINT32 size)
{
#endif // !_TARGET_AMD64_
}
+#ifdef _TARGET_AMD64_
if (compiler->ehAnyFunclets())
{
// Set this to avoid double-reporting the parent frame (unlike JIT64)
gcInfoEncoderWithLog->SetWantsReportOnlyLeaf();
}
+#endif // _TARGET_AMD64_
+
#endif // FEATURE_EH_FUNCLETS
+#ifdef _TARGET_ARMARCH_
+ if (compiler->codeGen->hasTailCalls)
+ {
+ gcInfoEncoderWithLog->SetHasTailCalls();
+ }
+#endif // _TARGET_ARMARCH_
+
#if FEATURE_FIXED_OUT_ARGS
// outgoing stack area size
gcInfoEncoderWithLog->SetSizeOfStackOutgoingAndScratchArea(compiler->lvaOutgoingArgSpaceSize);
set(RYUJIT_LINK_LIBRARIES
utilcodestaticnohost
- gcinfo
+ gcinfo_arm
)
if(CLR_CMAKE_PLATFORM_UNIX)
return gcInfoDecoder.IsInterruptible();
}
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+bool EECodeManager::HasTailCalls( EECodeInfo *pCodeInfo)
+{
+ CONTRACTL {
+ NOTHROW;
+ GC_NOTRIGGER;
+ } CONTRACTL_END;
+
+ GCInfoToken gcInfoToken = pCodeInfo->GetGCInfoToken();
+
+ GcInfoDecoder gcInfoDecoder(
+ gcInfoToken,
+ DECODE_HAS_TAILCALLS,
+ 0
+ );
+
+ return gcInfoDecoder.HasTailCalls();
+}
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
#if defined(_TARGET_AMD64_) && defined(_DEBUG)
m_GenericSecretParamIsMD = (headerFlags & GC_INFO_HAS_GENERICS_INST_CONTEXT_MASK) == GC_INFO_HAS_GENERICS_INST_CONTEXT_MD;
m_GenericSecretParamIsMT = (headerFlags & GC_INFO_HAS_GENERICS_INST_CONTEXT_MASK) == GC_INFO_HAS_GENERICS_INST_CONTEXT_MT;
int hasStackBaseRegister = headerFlags & GC_INFO_HAS_STACK_BASE_REGISTER;
+#ifdef _TARGET_AMD64_
m_WantsReportOnlyLeaf = ((headerFlags & GC_INFO_WANTS_REPORT_ONLY_LEAF) != 0);
+#elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ m_HasTailCalls = ((headerFlags & GC_INFO_HAS_TAILCALLS) != 0);
+#endif // _TARGET_AMD64_
int hasSizeOfEditAndContinuePreservedArea = headerFlags & GC_INFO_HAS_EDIT_AND_CONTINUE_PRESERVED_SLOTS;
int hasReversePInvokeFrame = false;
return m_IsVarArg;
}
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+bool GcInfoDecoder::HasTailCalls()
+{
+ _ASSERTE( m_Flags & DECODE_HAS_TAILCALLS );
+ return m_HasTailCalls;
+}
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
+
bool GcInfoDecoder::WantsReportOnlyLeaf()
{
+ // Only AMD64 with JIT64 can return false here.
+#ifdef _TARGET_AMD64_
return m_WantsReportOnlyLeaf;
+#else
+ return true;
+#endif
}
UINT32 GcInfoDecoder::GetCodeLength()
return GetCodeManager()->IsGcSafe(&codeInfo, GetRelOffset());
}
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+bool CrawlFrame::HasTailCalls()
+{
+ CONTRACTL {
+ NOTHROW;
+ GC_NOTRIGGER;
+ SUPPORTS_DAC;
+ } CONTRACTL_END;
+
+ return GetCodeManager()->HasTailCalls(&codeInfo);
+}
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
+
inline void CrawlFrame::GotoNextFrame()
{
CONTRACTL {
*/
bool IsGcSafe();
+#if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
+ bool HasTailCalls();
+#endif // _TARGET_ARM_ || _TARGET_ARM64_
PREGDISPLAY GetRegisterSet()
{
// For this, we may need JIT support to do so.
notJittedCase = true;
}
+ else if (pCF->HasTailCalls())
+ {
+ // Do not hijack functions that have tail calls, since there are two problems:
+ // 1. When a function that tail calls another one is hijacked, the LR may be
+ // stored at a different location in the stack frame of the tail call target.
+ // So just by performing tail call, the hijacked location becomes invalid and
+ // unhijacking would corrupt stack by writing to that location.
+ // 2. There is a small window after the caller pops LR from the stack in its
+ // epilog and before the tail called function pushes LR in its prolog when
+ // the hijacked return address would not be not on the stack and so we would
+ // not be able to unhijack.
+ notJittedCase = true;
+ }
else
{
// This is the case of IP being inside the method body and LR is
WorkingDir=JIT\Methodical\tailcall_v4\hijacking
Expected=0
MaxAllowedDurationSeconds=600
-Categories=EXPECTED_FAIL;4122;LONG_RUNNING
+Categories=EXPECTED_PASS;LONG_RUNNING
HostStyle=0
[smallFrame.cmd_7667]
JIT/jit64/opt/cse/HugeField2/HugeField2.sh
CoreMangLib/cti/system/string/StringFormat1/StringFormat1.sh
CoreMangLib/cti/system/string/StringFormat2/StringFormat2.sh
-JIT/Methodical/tailcall_v4/hijacking/hijacking.sh