ULONG fn__ThePreStubPatchLabel;
ULONG fn__PrecodeFixupThunk;
+#ifdef TARGET_ARM64
+ ULONG fn__PrecodeCompactedFixupThunk;
+#endif // TARGET_ARM64
#ifdef FEATURE_PREJIT
ULONG fn__StubDispatchFixupStub;
ULONG fn__StubDispatchFixupPatchLabel;
b C_FUNC(ThePreStub)
NESTED_END PrecodeFixupThunk, _TEXT
+
+NESTED_ENTRY PrecodeCompactedFixupThunk, _TEXT, NoHandler
+ // x11 = CompactedFixupPrecode *
+ // On Exit
+ // x12 = MethodDesc*
+ PROLOG_WITH_TRANSITION_BLOCK
+
+ mov x0, x11
+
+ bl C_FUNC(PreStubGetMethodDescForCompactedFixup)
+
+ mov METHODDESC_REGISTER, x0
+
+ EPILOG_WITH_TRANSITION_BLOCK_TAILCALL
+ b C_FUNC(ThePreStub)
+NESTED_END PrecodeFixupThunk, _TEXT
// ------------------------------------------------------------------
NESTED_ENTRY ThePreStub, _TEXT, NoHandler
#define HAS_FIXUP_PRECODE 1
#define HAS_FIXUP_PRECODE_CHUNKS 1
+#define HAS_COMPACTED_FIXUP_PRECODE_CHUNKS 1 // Depends on HAS_FIXUP_PRECODE
+
// ThisPtrRetBufPrecode one is necessary for closed delegates over static methods with return buffer
#define HAS_THISPTR_RETBUF_PRECODE 1
// this is the offset by which it should be decremented to arrive at the callsite.
#define STACKWALK_CONTROLPC_ADJUST_OFFSET 4
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+#define LDRX12_IMM_OPCODE 0x5800000c // https://developer.arm.com/documentation/ddi0487/ga C6.2.133
+#define LDRX12_IMM_OPCODE_MASK 0xff00001f
+#define LDRX12_IMM_OFFSET_SHIFT 3
+#define LDRX12_IMM_OFFSET_MASK 0x00ffffe0
+#define LDRX12_IMM_OFFSET_ALIGNMENT 0x00000004 // Same as instruction size
+#define LDRX12_IMM_OFFSET_MAX 0x00100000
+#define COMPACTED_FIXUP_PRECODE_ADR 0x1000000b
+#define COMPACTED_FIXUP_PRECODE_BR 0xd61f0180
+
+static_assert_no_msg((LDRX12_IMM_OPCODE_MASK & LDRX12_IMM_OPCODE) == LDRX12_IMM_OPCODE);
+static_assert_no_msg(LDRX12_IMM_OFFSET_MASK == ~LDRX12_IMM_OPCODE_MASK);
+static_assert_no_msg(((LDRX12_IMM_OFFSET_MASK >> LDRX12_IMM_OFFSET_SHIFT) + LDRX12_IMM_OFFSET_ALIGNMENT) >> 1 == LDRX12_IMM_OFFSET_MAX);
+
+#define COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES 0
+
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
inline
ARG_SLOT FPSpillToR8(void* pSpillSlot)
{
};
typedef DPTR(FixupPrecode) PTR_FixupPrecode;
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+EXTERN_C VOID STDCALL PrecodeCompactedFixupThunk();
+
+class CompactedFixupPrecodeChunk
+{
+/*
+fixup0:
+ adr x11, #0
+ ldr x12, target0
+ br x12
+
+fixup1:
+ adr x11, #0
+ ldr x12, target1 // offset = 4*((count - i)*5 + 2 + align) => count - i = (offset/4 - 2 - align)/5
+ br x12
+
+fixup2:
+ adr x11, #0
+ ldr x12, target2
+ br x12
+
+align:
+ // count%2 ? nop : nothing
+
+pMethodDescChunk: // pMethodDescChunk should be somewhere to find MethodDescChunk.
+ nop // This field and targets should be 8-aligned.
+ nop
+
+target2: // Reverse order is needed to make (ldr offset) linear function of (count - i).
+ nop // It makes (count - i) a linear function of (ldr offset). (count - i) knowledge allows
+ nop // to find pMethodDescChunk, and read count value from it.
+
+target1:
+ nop
+ nop
+
+target0:
+ nop
+ nop
+
+index0_2: // m_MethodDescChunkIndex, i.e offset of MethodDesc in MethodDescChunk.
+ nop // Indexes makes search of MethodDesc as O(1) but uses 1 byte/method.
+ // It is optional, instead we may save this 1 byte/method and calculate MethodDesc as O(n).
+*/
+ struct Base { // One per chunk
+ TADDR m_pMethodDescChunk;
+ };
+
+ struct Target { // Targets in reverse order
+ TADDR m_pTarget;
+ };
+
+ struct MethodDescIndex {
+ BYTE m_MethodDescChunkIndex;
+ };
+
+public:
+ struct Code {
+ static const int Type = 0x0B;
+
+ // adr x11, #0
+ // ldr x12, m_pTargetLocation ; =m_pTarget
+ // br x12
+ UINT32 m_rgCode[3];
+
+ void Init(int count, int i, MethodDescChunk* pMethodDescChunk, LoaderAllocator* pLoaderAllocator, int iMethodDescChunkIndex);
+ void Init(CompactedFixupPrecodeChunk::Code* pPrecodeRX, MethodDesc* pMD, LoaderAllocator* pLoaderAllocator);
+
+ static UINT32 EncodeLDRx12(int offset)
+ {
+ _ASSERTE((offset & (LDRX12_IMM_OFFSET_ALIGNMENT - 1)) == 0); // Offset shoud be multiple of 4
+ _ASSERTE(0 <= offset && offset < LDRX12_IMM_OFFSET_MAX); // Dont support negative offset
+
+ UINT32 res = offset << LDRX12_IMM_OFFSET_SHIFT;
+
+ _ASSERTE((res & LDRX12_IMM_OPCODE_MASK) == 0);
+
+ return (UINT32) res | LDRX12_IMM_OPCODE;
+ }
+
+ static int DecodeLDRx12(UINT32 opcode)
+ {
+ _ASSERTE((opcode & LDRX12_IMM_OPCODE_MASK) == LDRX12_IMM_OPCODE);
+
+ int res = (opcode & LDRX12_IMM_OFFSET_MASK) >> LDRX12_IMM_OFFSET_SHIFT;
+ _ASSERTE(0 <= res && res < LDRX12_IMM_OFFSET_MAX); // Dont support negative offset
+
+ return res;
+ }
+
+ int GetBackIndex() // Calculate (count - index) from ldr
+ {
+ // count - i = (offset - 8 - 4 - align)/20 + 1
+ int res = DecodeLDRx12(m_rgCode[1]);
+ const int CT_Size = sizeof(Code) + sizeof(Target); // One Compacted Fixup Precode size except MethodDescIndex
+ const int pMDC_Size = sizeof(Base); // Central pMethodDescChunk size
+ const int align_Size = sizeof(m_rgCode[0]); // alignment nop size
+ const int instr_Size = sizeof(m_rgCode[2]); // br instruction size
+
+ res -= pMDC_Size + 2*instr_Size; // 2 instructions is ldr and br in current precode
+
+ res = (res % CT_Size != 0) ? res - align_Size : res;
+ _ASSERTE(CT_Size > align_Size);
+ _ASSERTE((res % CT_Size) == 0);
+
+ return res / CT_Size + 1;
+ }
+
+ PCODE GetTarget()
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+
+ return ((Target*) (GetTargetAddr()))->m_pTarget;
+ }
+
+ PCODE GetTargetAddr()
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+
+ return dac_cast<TADDR>(&m_rgCode[1]) + DecodeLDRx12(m_rgCode[1]);
+ }
+
+ TADDR GetBase() // Pointer to central pMethodDescChunk
+ {
+ LIMITED_METHOD_CONTRACT;
+ SUPPORTS_DAC;
+
+ return GetTargetAddr() - (GetBackIndex() - 1) * (sizeof(Target)) - sizeof(Base);
+ }
+
+ PTR_MethodDescChunk GetMethodDescChunk()
+ {
+ return PTR_MethodDescChunk(((Base*) GetBase())->m_pMethodDescChunk);
+ }
+
+ int GetCount();
+ TADDR GetMethodDesc();
+ MethodDescIndex* GetMethodDescIndex();
+
+ size_t GetSizeRW()
+ {
+ LIMITED_METHOD_CONTRACT;
+
+#if COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ return dac_cast<TADDR>(GetMethodDescIndex()) + sizeof(MethodDescIndex) - dac_cast<TADDR>(this);
+#else // COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ return GetTargetAddr() + sizeof(Target) - dac_cast<TADDR>(this);
+#endif // COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ }
+
+#ifndef DACCESS_COMPILE
+ void ResetTargetInterlocked()
+ {
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ }
+ CONTRACTL_END;
+
+ ExecutableWriterHolder<Target> precodeWriterHolder((Target*) GetTargetAddr(), sizeof(Target));
+ InterlockedExchange64((LONGLONG*)&precodeWriterHolder.GetRW()->m_pTarget, (TADDR)GetEEFuncEntryPoint(PrecodeCompactedFixupThunk));
+ }
+
+ BOOL SetTargetInterlocked(TADDR target, TADDR expected)
+ {
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ }
+ CONTRACTL_END;
+
+ ExecutableWriterHolder<Target> precodeWriterHolder((Target *) GetTargetAddr(), sizeof(Target));
+ return (TADDR)InterlockedCompareExchange64(
+ (LONGLONG*)&precodeWriterHolder.GetRW()->m_pTarget, (TADDR)target, (TADDR)expected) == expected;
+ }
+#endif // !DACCESS_COMPILE
+
+ static BOOL IsCompactedFixupPrecodeByASM(PCODE addr)
+ {
+ PTR_DWORD pInstr = dac_cast<PTR_DWORD>(PCODEToPINSTR(addr));
+ return
+ (pInstr[0] == COMPACTED_FIXUP_PRECODE_ADR) &&
+ ((pInstr[1] & LDRX12_IMM_OPCODE_MASK) == LDRX12_IMM_OPCODE) &&
+ (pInstr[2] == COMPACTED_FIXUP_PRECODE_BR);
+ }
+
+#ifdef DACCESS_COMPILE
+ void EnumMemoryRegions(CLRDataEnumMemoryFlags flags);
+#endif
+ };
+
+ static SIZE_T Size(int count) {
+ const int align_Size = sizeof(Code::m_rgCode[0]) * (count & 1); // alignment nop size
+
+ return count * (sizeof(Code) + sizeof(Target) + sizeof(MethodDescIndex) * COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES) + sizeof(Base) + align_Size;
+ };
+
+ static void Init(TADDR pPrecodeChunk, MethodDescChunk* pMDChunk, LoaderAllocator* pLoaderAllocator);
+};
+
+typedef DPTR(CompactedFixupPrecodeChunk::Code) PTR_CompactedFixupPrecode;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
// Precode to shuffle this and retbuf for closed delegates over static methods with return buffer
struct ThisPtrRetBufPrecode {
}
#endif
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (CompactedFixupPrecodeChunk::Code::IsCompactedFixupPrecodeByASM(pCode))
+ {
+ PCODE pTarget = dac_cast<PTR_CompactedFixupPrecode>(pInstr)->GetTarget();
+
+ if (isJump(pTarget))
+ {
+ pTarget = decodeJump(pTarget);
+ }
+
+ return pTarget == (TADDR)PrecodeCompactedFixupThunk;
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
// StubPrecode
if (pInstr[0] == 0x10000089 && // adr x9, #16
pInstr[1] == 0xA940312A && // ldp x10,x12,[x9]
#endif // !DACCESS_COMPILE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+void CompactedFixupPrecodeChunk::Code::Init(int count, int i, MethodDescChunk* pMethodDescChunk, LoaderAllocator* pLoaderAllocator, int iMethodDescChunkIndex)
+{
+ int align = (count & 1) * sizeof(m_rgCode[0]);
+ int offset = (sizeof(Code) + sizeof(Target)) * (count - i - 1) + 2 * sizeof(m_rgCode[0]) + sizeof(Base) + align;
+
+ m_rgCode[0] = COMPACTED_FIXUP_PRECODE_ADR;
+ m_rgCode[1] = EncodeLDRx12(offset);
+ m_rgCode[2] = COMPACTED_FIXUP_PRECODE_BR;
+
+ _ASSERTE(offset == DecodeLDRx12(m_rgCode[1]));
+
+ if (pLoaderAllocator != NULL)
+ {
+ ((Target*)GetTargetAddr())->m_pTarget = GetEEFuncEntryPoint(PrecodeCompactedFixupThunk);
+ }
+
+ if (((Base*) GetBase())->m_pMethodDescChunk == NULL)
+ ((Base*) GetBase())->m_pMethodDescChunk = dac_cast<TADDR>(pMethodDescChunk);
+
+#if COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ _ASSERTE(FitsInU1(iMethodDescChunkIndex));
+ ((MethodDescIndex*)GetMethodDescIndex())->m_MethodDescChunkIndex = static_cast<BYTE>(iMethodDescChunkIndex);
+#endif // COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+}
+
+void CompactedFixupPrecodeChunk::Code::Init(CompactedFixupPrecodeChunk::Code* pPrecodeRX, MethodDesc* pMD, LoaderAllocator* pLoaderAllocator)
+{
+ WRAPPER_NO_CONTRACT;
+
+ MethodDescChunk *pChunk = pMD->GetMethodDescChunk();
+
+ _ASSERTE((dac_cast<TADDR>(this) - pChunk->GetTemporaryEntryPoints()) % sizeof(Code) == 0);
+
+ Init(pChunk->GetCount(), (dac_cast<TADDR>(this) - pChunk->GetTemporaryEntryPoints()) / sizeof(Code), pChunk, pLoaderAllocator, pMD->GetMethodDescIndex());
+
+ _ASSERTE(pPrecodeRX->GetMethodDesc() == (TADDR)pMD);
+}
+
+void CompactedFixupPrecodeChunk::Init(TADDR pPrecodeChunk, MethodDescChunk* pChunk, LoaderAllocator* pLoaderAllocator)
+{
+ int count = pChunk->GetCount();
+ MethodDesc* pMD = pChunk->GetFirstMethodDesc();
+ TADDR pPrecode = pPrecodeChunk;
+ int iMethodDescChunkIndex = 0;
+
+ for (int i = 0; i < count; i++) {
+ ((CompactedFixupPrecodeChunk::Code*)pPrecode)->Init(count, i, pChunk, pLoaderAllocator, iMethodDescChunkIndex / MethodDesc::ALIGNMENT);
+ pPrecode += sizeof(CompactedFixupPrecodeChunk::Code);
+ iMethodDescChunkIndex += pMD->SizeOf();
+ pMD = PTR_MethodDesc(dac_cast<TADDR>(pMD) + pMD->SizeOf());
+ }
+}
+
+int CompactedFixupPrecodeChunk::Code::GetCount()
+{
+ return GetMethodDescChunk()->GetCount();
+}
+
+#if COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+CompactedFixupPrecodeChunk::MethodDescIndex* CompactedFixupPrecodeChunk::Code::GetMethodDescIndex()
+{
+ return (MethodDescIndex*) (GetTargetAddr() + sizeof(Target) + (sizeof(Target) + sizeof(MethodDescIndex)) * (GetCount() - GetBackIndex()));
+}
+
+TADDR CompactedFixupPrecodeChunk::Code::GetMethodDesc()
+{
+ return dac_cast<TADDR>(GetMethodDescChunk()->GetFirstMethodDesc()) + GetMethodDescIndex()->m_MethodDescChunkIndex * MethodDesc::ALIGNMENT;
+}
+#else // COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+TADDR CompactedFixupPrecodeChunk::Code::GetMethodDesc()
+{
+ TADDR pMD = dac_cast<TADDR>(GetMethodDescChunk()->GetFirstMethodDesc());
+ const int index = GetCount() - GetBackIndex();
+
+ for (int i = 0; i < index; i++) {
+ pMD += PTR_MethodDesc(pMD)->SizeOf();
+ }
+
+ return pMD;
+}
+#endif // COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+
+#ifdef DACCESS_COMPILE
+void CompactedFixupPrecodeChunk::Code::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
+{
+ SUPPORTS_DAC;
+ DacEnumMemoryRegion(dac_cast<TADDR>(this), sizeof(Code));
+ DacEnumMemoryRegion(GetTargetAddr(), sizeof(Target));
+#if COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ DacEnumMemoryRegion(dac_cast<TADDR>(GetMethodDescIndex()), sizeof(MethodDescIndex));
+#endif // COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+
+ DacEnumMemoryRegion(GetBase(), sizeof(TADDR));
+}
+#endif // DACCESS_COMPILE
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
void UpdateRegDisplayFromCalleeSavedRegisters(REGDISPLAY * pRD, CalleeSavedRegisters * pCalleeSaved)
{
LIMITED_METHOD_CONTRACT;
PCODE target = NULL;
bool setTargetAfterAddingToHashTable = false;
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ _ASSERTE(type != PRECODE_COMPACTED_FIXUP);
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
if (type != GetDefaultType(pMD) &&
// Always use stable entrypoint for LCG. If the cached precode pointed directly to JITed code,
// we would not be able to reuse it when the DynamicMethodDesc got reused for a new DynamicMethod.
MethodDescChunk* pChunk = GetMethodDescChunk();
_ASSERTE(pChunk->HasTemporaryEntryPoints());
- int lo = 0, hi = pChunk->GetCount() - 1;
+ int lo = 0;
- // Find the temporary entrypoint in the chunk by binary search
- while (lo < hi)
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS && !COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ if (((Precode*)pChunk->GetTemporaryEntryPoints())->GetType() == PRECODE_COMPACTED_FIXUP)
{
- int mid = (lo + hi) / 2;
+ // If precodes does not have indexes, the fastest way to find precode is to make brute force search
+ TADDR pMethodDesc = dac_cast<TADDR>(pChunk->GetFirstMethodDesc());
- TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(mid);
+ for (; lo < pChunk->GetCount() && PTR_MethodDesc(pMethodDesc) != this; lo++)
+ pMethodDesc += PTR_MethodDesc(pMethodDesc)->SizeOf();
+ }
+ else
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS && !COMPACTED_FIXUP_PRECODE_CHUNK_HAS_INDEXES
+ {
+ int hi = pChunk->GetCount() - 1;
- MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint);
- if (PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD))
- return pEntryPoint;
+ // Find the temporary entrypoint in the chunk by binary search
+ while (lo < hi)
+ {
+ int mid = (lo + hi) / 2;
- if (PTR_HOST_TO_TADDR(this) > PTR_HOST_TO_TADDR(pMD))
- lo = mid + 1;
- else
- hi = mid - 1;
- }
+ TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(mid);
+
+ MethodDesc * pMD = MethodDesc::GetMethodDescFromStubAddr(pEntryPoint);
+ if (PTR_HOST_TO_TADDR(this) == PTR_HOST_TO_TADDR(pMD))
+ return pEntryPoint;
- _ASSERTE(lo == hi);
+ if (PTR_HOST_TO_TADDR(this) > PTR_HOST_TO_TADDR(pMD))
+ lo = mid + 1;
+ else
+ hi = mid - 1;
+ }
+
+ _ASSERTE(lo == hi);
+ }
TADDR pEntryPoint = pChunk->GetTemporaryEntryPoint(lo);
}
// Allocate the precode if necessary
- if (requiredType != availableType)
+ if (requiredType != availableType
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ && !(requiredType == PRECODE_FIXUP && availableType == PRECODE_COMPACTED_FIXUP)
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ )
{
// code:Precode::AllocateTemporaryEntryPoints should always create precode of the right type for dynamic methods.
// If we took this path for dynamic methods, the precode may leak since we may allocate it in domain-neutral loader heap.
#ifdef HAS_FIXUP_PRECODE
case PRECODE_FIXUP:
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
#endif // HAS_THISPTR_RETBUF_PRECODE
case PRECODE_FIXUP:
return sizeof(FixupPrecode);
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ return sizeof(CompactedFixupPrecodeChunk::Code);
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
return sizeof(ThisPtrRetBufPrecode);
target = AsFixupPrecode()->GetTarget();
break;
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ target = AsCompactedFixupPrecode()->GetTarget();
+ break;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
target = AsThisPtrRetBufPrecode()->GetTarget();
pMD = AsFixupPrecode()->GetMethodDesc();
break;
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ pMD = AsCompactedFixupPrecode()->GetMethodDesc();
+ break;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
pMD = AsThisPtrRetBufPrecode()->GetMethodDesc();
}
#endif // HAS_FIXUP_PRECODE_CHUNKS
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNK
+ if (pMDfromPrecode == NULL)
+ {
+ _ASSERTE(GetType() != PRECODE_COMPACTED_FIXUP);
+ }
+#endif
+
return FALSE;
}
return TRUE;
#endif
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (IsPointingTo(target, GetEEFuncEntryPoint(PrecodeCompactedFixupThunk)))
+ return TRUE;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
#ifdef FEATURE_PREJIT
Module *pZapModule = GetMethodDesc()->GetZapModule();
if (pZapModule != NULL)
return PTR_Precode(temporaryEntryPoints + index * sizeof(FixupPrecode));
}
#endif
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (t == PRECODE_COMPACTED_FIXUP)
+ {
+ return PTR_Precode(temporaryEntryPoints + index * sizeof(CompactedFixupPrecodeChunk::Code));
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
SIZE_T oneSize = SizeOfTemporaryEntryPoint(t);
return PTR_Precode(temporaryEntryPoints + index * oneSize);
}
WRAPPER_NO_CONTRACT;
SUPPORTS_DAC;
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (t == PRECODE_COMPACTED_FIXUP)
+ {
+ // Did not thought about combination of arm64-specific CompactedFixups and amd64-specific JumpStubs
+ _ASSERTE(!preallocateJumpStubs);
+ return CompactedFixupPrecodeChunk::Size(count);
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_FIXUP_PRECODE_CHUNKS
if (t == PRECODE_FIXUP)
{
SIZE_T size;
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ _ASSERTE(t != PRECODE_COMPACTED_FIXUP);
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_FIXUP_PRECODE_CHUNKS
if (t == PRECODE_FIXUP)
{
((FixupPrecode*)this)->Init((FixupPrecode*)pPrecodeRX, pMD, pLoaderAllocator);
break;
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ ((CompactedFixupPrecodeChunk::Code*)this)->Init((CompactedFixupPrecodeChunk::Code*)pPrecodeRX, pMD, pLoaderAllocator);
+ break;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
((ThisPtrRetBufPrecode*)this)->Init(pMD, pLoaderAllocator);
break;
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ AsCompactedFixupPrecode()->ResetTargetInterlocked();
+ break;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
default:
UnexpectedPrecodeType("Precode::ResetTargetInterlocked", precodeType);
break;
break;
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ ret = AsCompactedFixupPrecode()->SetTargetInterlocked(target, expected);
+ break;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
ret = AsThisPtrRetBufPrecode()->SetTargetInterlocked(target, expected);
}
else
#endif
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (t == PRECODE_COMPACTED_FIXUP)
+ {
+ // The writeable size the Init method accesses is dynamic depending on
+ // the FixupPrecode members.
+ size = ((CompactedFixupPrecodeChunk::Code*)this)->GetSizeRW();
+ }
+ else
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
{
size = Precode::SizeOf(t);
}
}
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ // Default to faster fixup precode if possible
+ if (!pFirstMD->RequiresMethodDescCallingConvention(count > 1))
+ {
+ t = PRECODE_COMPACTED_FIXUP;
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
SIZE_T totalSize = SizeOfTemporaryEntryPoints(t, preallocateJumpStubs, count);
#ifdef HAS_COMPACT_ENTRYPOINTS
TADDR temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(totalSize, AlignOf(t)));
ExecutableWriterHolder<void> entryPointsWriterHolder((void*)temporaryEntryPoints, totalSize);
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (t == PRECODE_COMPACTED_FIXUP)
+ {
+ TADDR entryPoint = temporaryEntryPoints;
+ TADDR entryPointRW = (TADDR)entryPointsWriterHolder.GetRW();
+
+ ((CompactedFixupPrecodeChunk *)entryPointRW)->Init(entryPoint, pChunk, pLoaderAllocator);
+
+#ifdef FEATURE_PERFMAP
+ PerfMap::LogStubs(__FUNCTION__, "PRECODE_COMPACTED_FIXUP", (PCODE)temporaryEntryPoints, totalSize);
+#endif
+ ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, totalSize);
+
+ return temporaryEntryPoints;
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
#ifdef HAS_FIXUP_PRECODE_CHUNKS
if (t == PRECODE_FIXUP)
{
}
#endif
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (t == PRECODE_COMPACTED_FIXUP)
+ {
+ AsCompactedFixupPrecode()->EnumMemoryRegions(flags);
+ return;
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
DacEnumMemoryRegion(GetStart(), SizeOf(t));
}
#endif
#ifdef HAS_FIXUP_PRECODE
PRECODE_FIXUP = FixupPrecode::Type,
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ PRECODE_COMPACTED_FIXUP = CompactedFixupPrecodeChunk::Code::Type,
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
#ifdef HAS_THISPTR_RETBUF_PRECODE
PRECODE_THISPTR_RETBUF = ThisPtrRetBufPrecode::Type,
#endif // HAS_THISPTR_RETBUF_PRECODE
}
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ CompactedFixupPrecodeChunk::Code* AsCompactedFixupPrecode()
+ {
+ LIMITED_METHOD_CONTRACT;
+ SUPPORTS_DAC;
+
+ return dac_cast<PTR_CompactedFixupPrecode>(this);
+ }
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
#ifdef HAS_THISPTR_RETBUF_PRECODE
ThisPtrRetBufPrecode* AsThisPtrRetBufPrecode()
{
_ASSERTE (align >= sizeof(void*));
#endif // TARGET_ARM && HAS_COMPACT_ENTRYPOINTS
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ // Called only for allocate whole CompactedFixupPrecode chunk.
+ // CompactedFixupPrecode chunk have to be aligned as ptr to hold pointers to MethodDescChunk and targets.
+ if (t == PRECODE_COMPACTED_FIXUP)
+ _ASSERTE(align >= alignof(void*));
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
return align;
}
// Always do consistency check in debug
if (fSpeculative INDEBUG(|| TRUE))
{
- if (!IS_ALIGNED(pInstr, PRECODE_ALIGNMENT) || !IsValidType(PTR_Precode(pInstr)->GetType()))
+ if ((!IS_ALIGNED(pInstr, PRECODE_ALIGNMENT) || !IsValidType(PTR_Precode(pInstr)->GetType()))
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ && !(IS_ALIGNED(pInstr, LDRX12_IMM_OFFSET_ALIGNMENT) && // Alignment for instruction size
+ IsValidType(PTR_Precode(pInstr)->GetType()) == PRECODE_COMPACTED_FIXUP)
+#endif
+ )
{
if (fSpeculative) return NULL;
_ASSERTE(!"Precode::GetPrecodeFromEntryPoint: Unexpected code in precode");
LIMITED_METHOD_DAC_CONTRACT;
#ifdef HAS_FIXUP_PRECODE_CHUNKS
_ASSERTE(t != PRECODE_FIXUP);
+#endif
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ _ASSERTE(t != PRECODE_COMPACTED_FIXUP);
#endif
return ALIGN_UP(SizeOf(t), AlignOf(t));
}
#endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (TARGET_ARM)
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
+extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactedFixup(PCODE pCode)
+{
+ _ASSERTE(CompactedFixupPrecodeChunk::Code::IsCompactedFixupPrecodeByASM(pCode));
+
+ return PTR_MethodDesc(PTR_CompactedFixupPrecode(pCode)->GetMethodDesc());
+}
+
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
//=============================================================================
// This function generates the real code when from Preemptive mode.
// It is specifically designed to work with the UnmanagedCallersOnlyAttribute.
}
#endif //HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ if (pMD->HasPrecode() && pMD->GetPrecode()->GetType() == PRECODE_COMPACTED_FIXUP
+ && pMD->IsNativeCodeStableAfterInit())
+ {
+ PCODE pDirectTarget = pMD->IsFCall() ? ECall::GetFCallImpl(pMD) : pMD->GetNativeCode();
+ if (pDirectTarget != NULL)
+ pCode = pDirectTarget;
+ }
+#endif //HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
{
CORCOMPILE_EXTERNAL_METHOD_THUNK * pThunk = (CORCOMPILE_EXTERNAL_METHOD_THUNK *)pIndirection;
break;
#endif // HAS_FIXUP_PRECODE
+#if HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+ case PRECODE_COMPACTED_FIXUP:
+ break;
+#endif // HAS_COMPACTED_FIXUP_PRECODE_CHUNKS
+
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
break;