From: Brian Sullivan Date: Sat, 22 Apr 2017 11:16:25 +0000 (-0700) Subject: Two improvments for LCG jump stubs (dynamic methods): (dotnet/coreclr#9160) X-Git-Tag: submit/tizen/20210909.063632~11030^2~7158 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=20d286eb04fa7e9bdf946c128257d41fb62adda9;p=platform%2Fupstream%2Fdotnet%2Fruntime.git Two improvments for LCG jump stubs (dynamic methods): (dotnet/coreclr#9160) When requesting a jump stub block for LCG, ask for a block of 4 jump stubs instead of 32 Enable caching and cache lookup for LCG jump stubs Commit migrated from https://github.com/dotnet/coreclr/commit/ea25e8d23626f1f5734016e7c9acb83546688a8e --- diff --git a/src/coreclr/src/vm/codeman.cpp b/src/coreclr/src/vm/codeman.cpp index 73b4c06..db9d786 100644 --- a/src/coreclr/src/vm/codeman.cpp +++ b/src/coreclr/src/vm/codeman.cpp @@ -3177,8 +3177,8 @@ void EEJitManager::RemoveJitData (CodeHeader * pCHdr, size_t GCinfo_len, size_t LCGMethodResolver * pResolver = pMD->AsDynamicMethodDesc()->GetLCGMethodResolver(); - // Clear the pointer only if it matches what we are about to free. There may be cases where the JIT is reentered and - // the method JITed multiple times. + // Clear the pointer only if it matches what we are about to free. + // There can be cases where the JIT is reentered and we JITed the method multiple times. if (pResolver->m_recordCodePointer == codeStart) pResolver->m_recordCodePointer = NULL; } @@ -4885,7 +4885,7 @@ void ExecutionManager::Unload(LoaderAllocator *pLoaderAllocator) */ StackwalkCache::Invalidate(pLoaderAllocator); - JumpStubCache * pJumpStubCache = (JumpStubCache *)pLoaderAllocator->m_pJumpStubCache; + JumpStubCache * pJumpStubCache = (JumpStubCache *) pLoaderAllocator->m_pJumpStubCache; if (pJumpStubCache != NULL) { delete pJumpStubCache; @@ -4954,18 +4954,33 @@ PCODE ExecutionManager::jumpStub(MethodDesc* pMD, PCODE target, PCODE jumpStub = NULL; if (pLoaderAllocator == NULL) + { pLoaderAllocator = pMD->GetLoaderAllocatorForCode(); + } _ASSERTE(pLoaderAllocator != NULL); - bool isLCG = pMD && pMD->IsLCGMethod(); + bool isLCG = pMD && pMD->IsLCGMethod(); + LCGMethodResolver * pResolver = nullptr; + JumpStubCache * pJumpStubCache = (JumpStubCache *) pLoaderAllocator->m_pJumpStubCache; - CrstHolder ch(&m_JumpStubCrst); + if (isLCG) + { + pResolver = pMD->AsDynamicMethodDesc()->GetLCGMethodResolver(); + pJumpStubCache = pResolver->m_pJumpStubCache; + } - JumpStubCache * pJumpStubCache = (JumpStubCache *)pLoaderAllocator->m_pJumpStubCache; + CrstHolder ch(&m_JumpStubCrst); if (pJumpStubCache == NULL) { pJumpStubCache = new JumpStubCache(); - pLoaderAllocator->m_pJumpStubCache = pJumpStubCache; + if (isLCG) + { + pResolver->m_pJumpStubCache = pJumpStubCache; + } + else + { + pLoaderAllocator->m_pJumpStubCache = pJumpStubCache; + } } if (isLCG) @@ -5018,9 +5033,19 @@ PCODE ExecutionManager::getNextJumpStub(MethodDesc* pMD, PCODE target, POSTCONDITION(RETVAL != NULL); } CONTRACT_END; - BYTE * jumpStub = NULL; - bool isLCG = pMD && pMD->IsLCGMethod(); - JumpStubBlockHeader ** ppHead = isLCG ? &(pMD->AsDynamicMethodDesc()->GetLCGMethodResolver()->m_jumpStubBlock) : &(((JumpStubCache *)(pLoaderAllocator->m_pJumpStubCache))->m_pBlocks); + DWORD numJumpStubs = DEFAULT_JUMPSTUBS_PER_BLOCK; // a block of 32 JumpStubs + BYTE * jumpStub = NULL; + bool isLCG = pMD && pMD->IsLCGMethod(); + JumpStubCache * pJumpStubCache = (JumpStubCache *) pLoaderAllocator->m_pJumpStubCache; + + if (isLCG) + { + LCGMethodResolver * pResolver; + pResolver = pMD->AsDynamicMethodDesc()->GetLCGMethodResolver(); + pJumpStubCache = pResolver->m_pJumpStubCache; + } + + JumpStubBlockHeader ** ppHead = &(pJumpStubCache->m_pBlocks); JumpStubBlockHeader * curBlock = *ppHead; // allocate a new jumpstub from 'curBlock' if it is not fully allocated @@ -5039,7 +5064,6 @@ PCODE ExecutionManager::getNextJumpStub(MethodDesc* pMD, PCODE target, goto DONE; } } - curBlock = curBlock->m_next; } @@ -5047,6 +5071,24 @@ PCODE ExecutionManager::getNextJumpStub(MethodDesc* pMD, PCODE target, if (isLCG) { + // For LCG we request a small block of 4 jumpstubs, because we can not share them + // with any other methods and very frequently our method only needs one jump stub. + // Using 4 gives a request size of (32 + 4*12) or 80 bytes. + // Also note that request sizes are rounded up to a multiples of 16. + // The request size is calculated into 'blockSize' in allocJumpStubBlock. + // For x64 the value of BACK_TO_BACK_JUMP_ALLOCATE_SIZE is 12 bytes + // and the sizeof(JumpStubBlockHeader) is 32. + // + + numJumpStubs = 4; + +#ifdef _TARGET_ARM64_ + // Note this these values are not requirements, instead we are + // just confirming the values that are mentioned in the comments. + _ASSERTE(BACK_TO_BACK_JUMP_ALLOCATE_SIZE == 12); + _ASSERTE(sizeof(JumpStubBlockHeader) == 32); +#endif + // Increment counter of LCG jump stub block allocations m_LCG_JumpStubBlockAllocCount++; } @@ -5056,9 +5098,12 @@ PCODE ExecutionManager::getNextJumpStub(MethodDesc* pMD, PCODE target, m_normal_JumpStubBlockAllocCount++; } - // allocJumpStubBlock will allocate from the LoaderCodeHeap for normal methods and HostCodeHeap for LCG methods - // this can throw an OM exception - curBlock = ExecutionManager::GetEEJitManager()->allocJumpStubBlock(pMD, DEFAULT_JUMPSTUBS_PER_BLOCK, loAddr, hiAddr, pLoaderAllocator); + // allocJumpStubBlock will allocate from the LoaderCodeHeap for normal methods + // and will alocate from a HostCodeHeap for LCG methods. + // + // note that this can throw an OOM exception + + curBlock = ExecutionManager::GetEEJitManager()->allocJumpStubBlock(pMD, numJumpStubs, loAddr, hiAddr, pLoaderAllocator); jumpStub = (BYTE *) curBlock + sizeof(JumpStubBlockHeader) + ((size_t) curBlock->m_used * BACK_TO_BACK_JUMP_ALLOCATE_SIZE); @@ -5078,25 +5123,16 @@ DONE: emitBackToBackJump(jumpStub, (void*) target); - if (isLCG) - { - // always get a new jump stub for LCG method - // We don't share jump stubs among different LCG methods so that the jump stubs used - // by every LCG method can be cleaned up individually - // There is not much benefit to share jump stubs within one LCG method anyway. - } - else - { - JumpStubCache * pJumpStubCache = (JumpStubCache *)pLoaderAllocator->m_pJumpStubCache; - _ASSERTE(pJumpStubCache != NULL); + // We always add the new jumpstub to the jumpStubCache + // + _ASSERTE(pJumpStubCache != NULL); - JumpStubEntry entry; + JumpStubEntry entry; - entry.m_target = target; - entry.m_jumpStub = (PCODE)jumpStub; + entry.m_target = target; + entry.m_jumpStub = (PCODE)jumpStub; - pJumpStubCache->m_Table.Add(entry); - } + pJumpStubCache->m_Table.Add(entry); curBlock->m_used++; // record that we have used up one more jumpStub in the block @@ -5556,9 +5592,9 @@ BOOL NativeImageJitManager::JitCodeToMethodInfo(RangeSection * pRangeSection, g_IBCLogger.LogMethodCodeAccess(*ppMethodDesc); } - //Get the function entry that corresponds to the real method desc. + // Get the function entry that corresponds to the real method desc. _ASSERTE((RelativePc >= RUNTIME_FUNCTION__BeginAddress(FunctionEntry))); - + if (pCodeInfo) { pCodeInfo->m_relOffset = (DWORD) diff --git a/src/coreclr/src/vm/codeman.h b/src/coreclr/src/vm/codeman.h index 5fbddea..9d7ed4d 100644 --- a/src/coreclr/src/vm/codeman.h +++ b/src/coreclr/src/vm/codeman.h @@ -1490,6 +1490,7 @@ private: static unsigned m_LCG_JumpStubBlockAllocCount; static unsigned m_LCG_JumpStubBlockFullCount; +public: struct JumpStubCache { JumpStubCache() diff --git a/src/coreclr/src/vm/dynamicmethod.cpp b/src/coreclr/src/vm/dynamicmethod.cpp index 3eec125..2a61f97 100644 --- a/src/coreclr/src/vm/dynamicmethod.cpp +++ b/src/coreclr/src/vm/dynamicmethod.cpp @@ -917,7 +917,7 @@ void LCGMethodResolver::Reset() m_DynamicStringLiterals = NULL; m_recordCodePointer = NULL; m_UsedIndCellList = NULL; - m_jumpStubBlock = NULL; + m_pJumpStubCache = NULL; m_next = NULL; m_Code = NULL; } @@ -1035,19 +1035,24 @@ void LCGMethodResolver::Destroy(BOOL fDomainUnload) m_recordCodePointer = NULL; } - JumpStubBlockHeader* current = m_jumpStubBlock; - JumpStubBlockHeader* next; - while (current) + if (m_pJumpStubCache != NULL) { - next = current->m_next; + JumpStubBlockHeader* current = m_pJumpStubCache->m_pBlocks; + while (current) + { + JumpStubBlockHeader* next = current->m_next; + + HostCodeHeap *pHeap = current->GetHostCodeHeap(); + LOG((LF_BCL, LL_INFO1000, "Level3 - Resolver {0x%p} - Release reference to heap {%p, vt(0x%x)} \n", current, pHeap, *(size_t*)pHeap)); + pHeap->m_pJitManager->FreeCodeMemory(pHeap, current); - HostCodeHeap *pHeap = current->GetHostCodeHeap(); - LOG((LF_BCL, LL_INFO1000, "Level3 - Resolver {0x%p} - Release reference to heap {%p, vt(0x%x)} \n", current, pHeap, *(size_t*)pHeap)); - pHeap->m_pJitManager->FreeCodeMemory(pHeap, current); + current = next; + } + m_pJumpStubCache->m_pBlocks = NULL; - current = next; + delete m_pJumpStubCache; + m_pJumpStubCache = NULL; } - m_jumpStubBlock = NULL; if (m_managedResolver) { diff --git a/src/coreclr/src/vm/dynamicmethod.h b/src/coreclr/src/vm/dynamicmethod.h index a96200b..f9a92b0 100644 --- a/src/coreclr/src/vm/dynamicmethod.h +++ b/src/coreclr/src/vm/dynamicmethod.h @@ -107,6 +107,7 @@ class LCGMethodResolver : public DynamicResolver friend class ExecutionManager; friend class EEJitManager; friend class HostCodeHeap; + friend struct ExecutionManager::JumpStubCache; public: void Destroy(BOOL fDomainUnload = FALSE); @@ -162,7 +163,7 @@ private: ChunkAllocator m_jitTempData; DynamicStringLiteral* m_DynamicStringLiterals; IndCellList * m_UsedIndCellList; // list to keep track of all the indirection cells used by the jitted code - JumpStubBlockHeader* m_jumpStubBlock; + ExecutionManager::JumpStubCache * m_pJumpStubCache; }; // class LCGMethodResolver //---------------------------------------------------------------------------------------