1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
15 #include "stublink.inl"
17 #include "rtlfunctions.h"
19 #define S_BYTEPTR(x) S_SIZE_T((SIZE_T)(x))
21 #ifndef DACCESS_COMPILE
24 //************************************************************************
27 // There are two types of CodeElements: CodeRuns (a stream of uninterpreted
28 // code bytes) and LabelRefs (an instruction containing
30 //************************************************************************
33 enum CodeElementType {
39 CodeElementType m_type; // kCodeRun or kLabelRef
40 CodeElement *m_next; // ptr to next CodeElement
42 // Used as workspace during Link(): holds the offset relative to
43 // the start of the final stub.
49 //************************************************************************
50 // CodeRun: A run of uninterrupted code bytes.
51 //************************************************************************
56 #define CODERUNSIZE 32
59 struct CodeRun : public CodeElement
61 UINT m_numcodebytes; // how many bytes are actually used
62 BYTE m_codebytes[CODERUNSIZE];
65 //************************************************************************
66 // LabelRef: An instruction containing an embedded label reference
67 //************************************************************************
68 struct LabelRef : public CodeElement
70 // provides platform-specific information about the instruction
71 InstructionFormat *m_pInstructionFormat;
73 // a variation code (interpretation is specific to the InstructionFormat)
74 // typically used to customize an instruction (e.g. with a condition
81 // Workspace during the link phase
85 // Pointer to next LabelRef
86 LabelRef *m_nextLabelRef;
90 //************************************************************************
91 // IntermediateUnwindInfo
92 //************************************************************************
94 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
98 // List of unwind operations, queued in StubLinker::m_pUnwindInfoList.
99 struct IntermediateUnwindInfo
101 IntermediateUnwindInfo *pNext;
104 UNWIND_CODE rgUnwindCode[1]; // variable length, depends on first entry's UnwindOp
106 #endif // TARGET_AMD64
109 StubUnwindInfoHeapSegment *g_StubHeapSegments;
110 CrstStatic g_StubUnwindInfoHeapSegmentsCrst;
111 #ifdef _DEBUG // for unit test
112 void *__DEBUG__g_StubHeapSegments = &g_StubHeapSegments;
117 // Callback registered via RtlInstallFunctionTableCallback. Called by
118 // RtlpLookupDynamicFunctionEntry to locate RUNTIME_FUNCTION entry for a PC
119 // found within a portion of a heap that contains stub code.
122 FindStubFunctionEntry (
123 BIT64_ONLY(IN ULONG64 ControlPc)
124 NOT_BIT64(IN ULONG ControlPc),
136 CONSISTENCY_CHECK(DYNFNTABLE_STUB == IdentifyDynamicFunctionTableTypeFromContext(Context));
138 StubUnwindInfoHeapSegment *pStubHeapSegment = (StubUnwindInfoHeapSegment*)DecodeDynamicFunctionTableContext(Context);
141 // The RUNTIME_FUNCTION entry contains ULONG offsets relative to the
142 // segment base. Stub::EmitUnwindInfo ensures that this cast is valid.
144 ULONG RelativeAddress = (ULONG)((BYTE*)ControlPc - pStubHeapSegment->pbBaseAddress);
146 LOG((LF_STUBS, LL_INFO100000, "ControlPc %p, RelativeAddress 0x%x, pStubHeapSegment %p, pStubHeapSegment->pbBaseAddress %p\n",
150 pStubHeapSegment->pbBaseAddress));
153 // Search this segment's list of stubs for an entry that includes the
154 // segment-relative offset.
156 for (StubUnwindInfoHeader *pHeader = pStubHeapSegment->pUnwindHeaderList;
158 pHeader = pHeader->pNext)
160 // The entry points are in increasing address order.
161 if (RelativeAddress >= RUNTIME_FUNCTION__BeginAddress(&pHeader->FunctionEntry))
163 T_RUNTIME_FUNCTION *pCurFunction = &pHeader->FunctionEntry;
164 T_RUNTIME_FUNCTION *pPrevFunction = NULL;
166 LOG((LF_STUBS, LL_INFO100000, "pCurFunction %p, pCurFunction->BeginAddress 0x%x, pCurFunction->EndAddress 0x%x\n",
168 RUNTIME_FUNCTION__BeginAddress(pCurFunction),
169 RUNTIME_FUNCTION__EndAddress(pCurFunction, (TADDR)pStubHeapSegment->pbBaseAddress)));
171 CONSISTENCY_CHECK((RUNTIME_FUNCTION__EndAddress(pCurFunction, (TADDR)pStubHeapSegment->pbBaseAddress) > RUNTIME_FUNCTION__BeginAddress(pCurFunction)));
172 CONSISTENCY_CHECK((!pPrevFunction || RUNTIME_FUNCTION__EndAddress(pPrevFunction, (TADDR)pStubHeapSegment->pbBaseAddress) <= RUNTIME_FUNCTION__BeginAddress(pCurFunction)));
174 // The entry points are in increasing address order. They're
175 // also contiguous, so after we're sure it's after the start of
176 // the first function (checked above), we only need to test
178 if (RelativeAddress < RUNTIME_FUNCTION__EndAddress(pCurFunction, (TADDR)pStubHeapSegment->pbBaseAddress))
180 CONSISTENCY_CHECK((RelativeAddress >= RUNTIME_FUNCTION__BeginAddress(pCurFunction)));
188 // Return NULL to indicate that there is no RUNTIME_FUNCTION/unwind
189 // information for this offset.
195 bool UnregisterUnwindInfoInLoaderHeapCallback (PVOID pvArgs, PVOID pvAllocationBase, SIZE_T cbReserved)
205 // There may be multiple StubUnwindInfoHeapSegment's associated with a region.
208 LOG((LF_STUBS, LL_INFO1000, "Looking for stub unwind info for LoaderHeap segment %p size %p\n", pvAllocationBase, cbReserved));
210 CrstHolder crst(&g_StubUnwindInfoHeapSegmentsCrst);
212 StubUnwindInfoHeapSegment *pStubHeapSegment;
213 for (StubUnwindInfoHeapSegment **ppPrevStubHeapSegment = &g_StubHeapSegments;
214 (pStubHeapSegment = *ppPrevStubHeapSegment); )
216 LOG((LF_STUBS, LL_INFO10000, " have unwind info for address %p size %p\n", pStubHeapSegment->pbBaseAddress, pStubHeapSegment->cbSegment));
218 // If heap region ends before stub segment
219 if ((BYTE*)pvAllocationBase + cbReserved <= pStubHeapSegment->pbBaseAddress)
221 // The list is ordered, so address range is between segments
225 // The given heap segment base address may fall within a prereserved
226 // region that was given to the heap when the heap was constructed, so
227 // pvAllocationBase may be > pbBaseAddress. Also, there could be
228 // multiple segments for each heap region, so pvAllocationBase may be
229 // < pbBaseAddress. So...there is no meaningful relationship between
230 // pvAllocationBase and pbBaseAddress.
232 // If heap region starts before end of stub segment
233 if ((BYTE*)pvAllocationBase < pStubHeapSegment->pbBaseAddress + pStubHeapSegment->cbSegment)
235 _ASSERTE((BYTE*)pvAllocationBase + cbReserved <= pStubHeapSegment->pbBaseAddress + pStubHeapSegment->cbSegment);
237 DeleteEEFunctionTable(pStubHeapSegment);
239 if (pStubHeapSegment->pUnwindInfoTable != 0)
240 delete pStubHeapSegment->pUnwindInfoTable;
242 *ppPrevStubHeapSegment = pStubHeapSegment->pNext;
244 delete pStubHeapSegment;
248 ppPrevStubHeapSegment = &pStubHeapSegment->pNext;
252 return false; // Keep enumerating
256 VOID UnregisterUnwindInfoInLoaderHeap (UnlockedLoaderHeap *pHeap)
262 PRECONDITION(pHeap->m_fPermitStubsWithUnwindInfo);
266 pHeap->EnumPageRegions(&UnregisterUnwindInfoInLoaderHeapCallback, NULL /* pvArgs */);
269 pHeap->m_fStubUnwindInfoUnregistered = TRUE;
274 class StubUnwindInfoSegmentBoundaryReservationList
276 struct ReservationList
278 ReservationList *pNext;
280 static ReservationList *FromStub (Stub *pStub)
282 return (ReservationList*)(pStub+1);
287 return (Stub*)this - 1;
291 ReservationList *m_pList;
295 StubUnwindInfoSegmentBoundaryReservationList ()
297 LIMITED_METHOD_CONTRACT;
302 ~StubUnwindInfoSegmentBoundaryReservationList ()
304 LIMITED_METHOD_CONTRACT;
306 ReservationList *pList = m_pList;
309 ReservationList *pNext = pList->pNext;
311 ExecutableWriterHolder<Stub> stubWriterHolder(pList->GetStub(), sizeof(Stub));
312 stubWriterHolder.GetRW()->DecRef();
318 void AddStub (Stub *pStub)
320 LIMITED_METHOD_CONTRACT;
322 ReservationList *pList = ReservationList::FromStub(pStub);
324 ExecutableWriterHolder<ReservationList> listWriterHolder(pList, sizeof(ReservationList));
325 listWriterHolder.GetRW()->pNext = m_pList;
331 #endif // STUBLINKER_GENERATES_UNWIND_INFO
334 //************************************************************************
336 //************************************************************************
338 //---------------------------------------------------------------
340 //---------------------------------------------------------------
341 StubLinker::StubLinker()
350 m_pCodeElements = NULL;
351 m_pFirstCodeLabel = NULL;
352 m_pFirstLabelRef = NULL;
353 m_pPatchLabel = NULL;
354 m_pTargetMethod = NULL;
359 m_cCalleeSavedRegs = 0;
361 m_fPushArgRegs = FALSE;
363 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
365 m_pUnwindInfoCheckLabel = NULL;
368 m_pUnwindInfoList = NULL;
370 m_fHaveFramePointer = FALSE;
376 m_cCalleeSavedRegs = 0;
379 #endif // STUBLINKER_GENERATES_UNWIND_INFO
384 //---------------------------------------------------------------
385 // Append code bytes.
386 //---------------------------------------------------------------
387 VOID StubLinker::EmitBytes(const BYTE *pBytes, UINT numBytes)
396 CodeElement *pLastCodeElement = GetLastCodeElement();
397 while (numBytes != 0) {
399 if (pLastCodeElement != NULL &&
400 pLastCodeElement->m_type == CodeElement::kCodeRun) {
401 CodeRun *pCodeRun = (CodeRun*)pLastCodeElement;
402 UINT numbytessrc = numBytes;
403 UINT numbytesdst = CODERUNSIZE - pCodeRun->m_numcodebytes;
404 if (numbytesdst <= numbytessrc) {
405 CopyMemory(&(pCodeRun->m_codebytes[pCodeRun->m_numcodebytes]),
408 pCodeRun->m_numcodebytes = CODERUNSIZE;
409 pLastCodeElement = NULL;
410 pBytes += numbytesdst;
411 numBytes -= numbytesdst;
413 CopyMemory(&(pCodeRun->m_codebytes[pCodeRun->m_numcodebytes]),
416 pCodeRun->m_numcodebytes += numbytessrc;
417 pBytes += numbytessrc;
422 pLastCodeElement = AppendNewEmptyCodeRun();
428 //---------------------------------------------------------------
429 // Append code bytes.
430 //---------------------------------------------------------------
431 VOID StubLinker::Emit8 (unsigned __int8 val)
440 CodeRun *pCodeRun = GetLastCodeRunIfAny();
441 if (pCodeRun && (CODERUNSIZE - pCodeRun->m_numcodebytes) >= sizeof(val)) {
442 *((unsigned __int8 *)(pCodeRun->m_codebytes + pCodeRun->m_numcodebytes)) = val;
443 pCodeRun->m_numcodebytes += sizeof(val);
445 EmitBytes((BYTE*)&val, sizeof(val));
449 //---------------------------------------------------------------
450 // Append code bytes.
451 //---------------------------------------------------------------
452 VOID StubLinker::Emit16(unsigned __int16 val)
461 CodeRun *pCodeRun = GetLastCodeRunIfAny();
462 if (pCodeRun && (CODERUNSIZE - pCodeRun->m_numcodebytes) >= sizeof(val)) {
463 SET_UNALIGNED_16(pCodeRun->m_codebytes + pCodeRun->m_numcodebytes, val);
464 pCodeRun->m_numcodebytes += sizeof(val);
466 EmitBytes((BYTE*)&val, sizeof(val));
470 //---------------------------------------------------------------
471 // Append code bytes.
472 //---------------------------------------------------------------
473 VOID StubLinker::Emit32(unsigned __int32 val)
482 CodeRun *pCodeRun = GetLastCodeRunIfAny();
483 if (pCodeRun && (CODERUNSIZE - pCodeRun->m_numcodebytes) >= sizeof(val)) {
484 SET_UNALIGNED_32(pCodeRun->m_codebytes + pCodeRun->m_numcodebytes, val);
485 pCodeRun->m_numcodebytes += sizeof(val);
487 EmitBytes((BYTE*)&val, sizeof(val));
491 //---------------------------------------------------------------
492 // Append code bytes.
493 //---------------------------------------------------------------
494 VOID StubLinker::Emit64(unsigned __int64 val)
503 CodeRun *pCodeRun = GetLastCodeRunIfAny();
504 if (pCodeRun && (CODERUNSIZE - pCodeRun->m_numcodebytes) >= sizeof(val)) {
505 SET_UNALIGNED_64(pCodeRun->m_codebytes + pCodeRun->m_numcodebytes, val);
506 pCodeRun->m_numcodebytes += sizeof(val);
508 EmitBytes((BYTE*)&val, sizeof(val));
512 //---------------------------------------------------------------
513 // Append pointer value.
514 //---------------------------------------------------------------
515 VOID StubLinker::EmitPtr(const VOID *val)
524 CodeRun *pCodeRun = GetLastCodeRunIfAny();
525 if (pCodeRun && (CODERUNSIZE - pCodeRun->m_numcodebytes) >= sizeof(val)) {
526 SET_UNALIGNED_PTR(pCodeRun->m_codebytes + pCodeRun->m_numcodebytes, (UINT_PTR)val);
527 pCodeRun->m_numcodebytes += sizeof(val);
529 EmitBytes((BYTE*)&val, sizeof(val));
534 //---------------------------------------------------------------
535 // Create a new undefined label. Label must be assigned to a code
536 // location using EmitLabel() prior to final linking.
537 // Throws COM+ exception on failure.
538 //---------------------------------------------------------------
539 CodeLabel* StubLinker::NewCodeLabel()
548 CodeLabel *pCodeLabel = (CodeLabel*)(m_quickHeap.Alloc(sizeof(CodeLabel)));
549 _ASSERTE(pCodeLabel); // QuickHeap throws exceptions rather than returning NULL
550 pCodeLabel->m_next = m_pFirstCodeLabel;
551 pCodeLabel->m_fExternal = FALSE;
552 pCodeLabel->m_fAbsolute = FALSE;
553 pCodeLabel->i.m_pCodeRun = NULL;
554 m_pFirstCodeLabel = pCodeLabel;
560 CodeLabel* StubLinker::NewAbsoluteCodeLabel()
569 CodeLabel *pCodeLabel = NewCodeLabel();
570 pCodeLabel->m_fAbsolute = TRUE;
575 //---------------------------------------------------------------
576 // Sets the label to point to the current "instruction pointer".
577 // It is invalid to call EmitLabel() twice on
579 //---------------------------------------------------------------
580 VOID StubLinker::EmitLabel(CodeLabel* pCodeLabel)
589 _ASSERTE(!(pCodeLabel->m_fExternal)); //can't emit an external label
590 _ASSERTE(pCodeLabel->i.m_pCodeRun == NULL); //must only emit label once
591 CodeRun *pLastCodeRun = GetLastCodeRunIfAny();
593 pLastCodeRun = AppendNewEmptyCodeRun();
595 pCodeLabel->i.m_pCodeRun = pLastCodeRun;
596 pCodeLabel->i.m_localOffset = pLastCodeRun->m_numcodebytes;
600 //---------------------------------------------------------------
601 // Combines NewCodeLabel() and EmitLabel() for convenience.
602 // Throws COM+ exception on failure.
603 //---------------------------------------------------------------
604 CodeLabel* StubLinker::EmitNewCodeLabel()
613 CodeLabel* label = NewCodeLabel();
619 //---------------------------------------------------------------
620 // Creates & emits the patch offset label for the stub
621 //---------------------------------------------------------------
622 VOID StubLinker::EmitPatchLabel()
632 // Note that it's OK to have re-emit the patch label,
633 // just use the later one.
636 m_pPatchLabel = EmitNewCodeLabel();
639 //---------------------------------------------------------------
640 // Returns final location of label as an offset from the start
641 // of the stub. Can only be called after linkage.
642 //---------------------------------------------------------------
643 UINT32 StubLinker::GetLabelOffset(CodeLabel *pLabel)
652 _ASSERTE(!(pLabel->m_fExternal));
653 return pLabel->i.m_localOffset + pLabel->i.m_pCodeRun->m_globaloffset;
657 //---------------------------------------------------------------
658 // Create a new label to an external address.
659 // Throws COM+ exception on failure.
660 //---------------------------------------------------------------
661 CodeLabel* StubLinker::NewExternalCodeLabel(LPVOID pExternalAddress)
668 PRECONDITION(CheckPointer(pExternalAddress));
672 CodeLabel *pCodeLabel = (CodeLabel*)(m_quickHeap.Alloc(sizeof(CodeLabel)));
673 _ASSERTE(pCodeLabel); // QuickHeap throws exceptions rather than returning NULL
674 pCodeLabel->m_next = m_pFirstCodeLabel;
675 pCodeLabel->m_fExternal = TRUE;
676 pCodeLabel->m_fAbsolute = FALSE;
677 pCodeLabel->e.m_pExternalAddress = pExternalAddress;
678 m_pFirstCodeLabel = pCodeLabel;
682 //---------------------------------------------------------------
683 // Set the target method for Instantiating stubs.
684 //---------------------------------------------------------------
685 void StubLinker::SetTargetMethod(PTR_MethodDesc pMD)
691 PRECONDITION(pMD != NULL);
694 m_pTargetMethod = pMD;
698 //---------------------------------------------------------------
699 // Append an instruction containing a reference to a label.
701 // target - the label being referenced.
702 // instructionFormat - a platform-specific InstructionFormat object
703 // that gives properties about the reference.
704 // variationCode - uninterpreted data passed to the pInstructionFormat methods.
705 //---------------------------------------------------------------
706 VOID StubLinker::EmitLabelRef(CodeLabel* target, const InstructionFormat & instructionFormat, UINT variationCode)
715 LabelRef *pLabelRef = (LabelRef *)(m_quickHeap.Alloc(sizeof(LabelRef)));
716 _ASSERTE(pLabelRef); // m_quickHeap throws an exception rather than returning NULL
717 pLabelRef->m_type = LabelRef::kLabelRef;
718 pLabelRef->m_pInstructionFormat = (InstructionFormat*)&instructionFormat;
719 pLabelRef->m_variationCode = variationCode;
720 pLabelRef->m_target = target;
722 pLabelRef->m_nextLabelRef = m_pFirstLabelRef;
723 m_pFirstLabelRef = pLabelRef;
725 AppendCodeElement(pLabelRef);
734 //---------------------------------------------------------------
735 // Internal helper routine.
736 //---------------------------------------------------------------
737 CodeRun *StubLinker::GetLastCodeRunIfAny()
746 CodeElement *pLastCodeElem = GetLastCodeElement();
747 if (pLastCodeElem == NULL || pLastCodeElem->m_type != CodeElement::kCodeRun) {
750 return (CodeRun*)pLastCodeElem;
755 //---------------------------------------------------------------
756 // Internal helper routine.
757 //---------------------------------------------------------------
758 CodeRun *StubLinker::AppendNewEmptyCodeRun()
767 CodeRun *pNewCodeRun = (CodeRun*)(m_quickHeap.Alloc(sizeof(CodeRun)));
768 _ASSERTE(pNewCodeRun); // QuickHeap throws exceptions rather than returning NULL
769 pNewCodeRun->m_type = CodeElement::kCodeRun;
770 pNewCodeRun->m_numcodebytes = 0;
771 AppendCodeElement(pNewCodeRun);
776 //---------------------------------------------------------------
777 // Internal helper routine.
778 //---------------------------------------------------------------
779 VOID StubLinker::AppendCodeElement(CodeElement *pCodeElement)
788 pCodeElement->m_next = m_pCodeElements;
789 m_pCodeElements = pCodeElement;
794 //---------------------------------------------------------------
795 // Is the current LabelRef's size big enough to reach the target?
796 //---------------------------------------------------------------
797 static BOOL LabelCanReach(LabelRef *pLabelRef)
806 InstructionFormat *pIF = pLabelRef->m_pInstructionFormat;
808 if (pLabelRef->m_target->m_fExternal)
810 return pLabelRef->m_pInstructionFormat->CanReach(
811 pLabelRef->m_refsize, pLabelRef->m_variationCode, TRUE, (INT_PTR)pLabelRef->m_target->e.m_pExternalAddress);
815 UINT targetglobaloffset = pLabelRef->m_target->i.m_pCodeRun->m_globaloffset +
816 pLabelRef->m_target->i.m_localOffset;
817 UINT srcglobaloffset = pLabelRef->m_globaloffset +
818 pIF->GetHotSpotOffset(pLabelRef->m_refsize,
819 pLabelRef->m_variationCode);
820 INT offset = (INT)(targetglobaloffset - srcglobaloffset);
822 return pLabelRef->m_pInstructionFormat->CanReach(
823 pLabelRef->m_refsize, pLabelRef->m_variationCode, FALSE, offset);
827 //---------------------------------------------------------------
828 // Generate the actual stub. The returned stub has a refcount of 1.
829 // No other methods (other than the destructor) should be called
830 // after calling Link().
832 // Throws COM+ exception on failure.
833 //---------------------------------------------------------------
834 Stub *StubLinker::Link(LoaderHeap *pHeap, DWORD flags)
836 STANDARD_VM_CONTRACT;
839 int size = CalculateSize(&globalsize);
841 _ASSERTE(!pHeap || pHeap->IsExecutable());
843 StubHolder<Stub> pStub;
845 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
846 StubUnwindInfoSegmentBoundaryReservationList ReservedStubs;
851 pStub = Stub::NewStub(
855 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
856 , UnwindInfoSize(globalsize)
859 ASSERT(pStub != NULL);
861 bool fSuccess = EmitStub(pStub, globalsize, size, pHeap);
863 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
870 ReservedStubs.AddStub(pStub);
871 pStub.SuppressRelease();
874 CONSISTENCY_CHECK_MSG(fSuccess, ("EmitStub should always return true"));
878 return pStub.Extract();
881 int StubLinker::CalculateSize(int* pGlobalSize)
890 _ASSERTE(pGlobalSize);
892 #if defined(_DEBUG) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
893 if (m_pUnwindInfoCheckLabel)
895 EmitLabel(m_pUnwindInfoCheckLabel);
896 EmitUnwindInfoCheckSubfunction();
897 m_pUnwindInfoCheckLabel = NULL;
902 // Don't want any undefined labels
903 for (CodeLabel *pCodeLabel = m_pFirstCodeLabel;
905 pCodeLabel = pCodeLabel->m_next) {
906 if ((!(pCodeLabel->m_fExternal)) && pCodeLabel->i.m_pCodeRun == NULL) {
907 _ASSERTE(!"Forgot to define a label before asking StubLinker to link.");
912 //-------------------------------------------------------------------
913 // Tentatively set all of the labelref sizes to their smallest possible
915 //-------------------------------------------------------------------
916 for (LabelRef *pLabelRef = m_pFirstLabelRef;
918 pLabelRef = pLabelRef->m_nextLabelRef) {
920 for (UINT bitmask = 1; bitmask <= InstructionFormat::kMax; bitmask = bitmask << 1) {
921 if (pLabelRef->m_pInstructionFormat->m_allowedSizes & bitmask) {
922 pLabelRef->m_refsize = bitmask;
931 BOOL fSomethingChanged;
933 fSomethingChanged = FALSE;
936 // Layout each code element.
939 CodeElement *pCodeElem;
940 for (pCodeElem = m_pCodeElements; pCodeElem; pCodeElem = pCodeElem->m_next) {
942 switch (pCodeElem->m_type) {
943 case CodeElement::kCodeRun:
944 globalsize += ((CodeRun*)pCodeElem)->m_numcodebytes;
947 case CodeElement::kLabelRef: {
948 LabelRef *pLabelRef = (LabelRef*)pCodeElem;
949 globalsize += pLabelRef->m_pInstructionFormat->GetSizeOfInstruction( pLabelRef->m_refsize,
950 pLabelRef->m_variationCode );
951 datasize += pLabelRef->m_pInstructionFormat->GetSizeOfData( pLabelRef->m_refsize,
952 pLabelRef->m_variationCode );
960 // Record a temporary global offset; this is actually
961 // wrong by a fixed value. We'll fix up after we know the
962 // size of the entire stub.
963 pCodeElem->m_globaloffset = 0 - globalsize;
965 // also record the data offset. Note the link-list we walk is in
966 // *reverse* order so we visit the last instruction first
967 // so what we record now is in fact the offset from the *end* of
968 // the data block. We fix it up later.
969 pCodeElem->m_dataoffset = 0 - datasize;
972 // Now fix up the global offsets.
973 for (pCodeElem = m_pCodeElements; pCodeElem; pCodeElem = pCodeElem->m_next) {
974 pCodeElem->m_globaloffset += globalsize;
975 pCodeElem->m_dataoffset += datasize;
979 // Now, iterate thru the LabelRef's and check if any of them
980 // have to be resized.
981 for (LabelRef *pLabelRef = m_pFirstLabelRef;
983 pLabelRef = pLabelRef->m_nextLabelRef) {
986 if (!LabelCanReach(pLabelRef)) {
987 fSomethingChanged = TRUE;
989 UINT bitmask = pLabelRef->m_refsize << 1;
990 // Find the next largest size.
991 // (we could be smarter about this and eliminate intermediate
992 // sizes based on the tentative offset.)
993 for (; bitmask <= InstructionFormat::kMax; bitmask = bitmask << 1) {
994 if (pLabelRef->m_pInstructionFormat->m_allowedSizes & bitmask) {
995 pLabelRef->m_refsize = bitmask;
1000 if (bitmask > InstructionFormat::kMax) {
1001 // CANNOT REACH target even with kMax
1002 _ASSERTE(!"Stub instruction cannot reach target: must choose a different instruction!");
1009 } while (fSomethingChanged); // Keep iterating until all LabelRef's can reach
1012 // We now have the correct layout write out the stub.
1014 // Compute stub code+data size after aligning data correctly
1015 if(globalsize % DATA_ALIGNMENT)
1016 globalsize += (DATA_ALIGNMENT - (globalsize % DATA_ALIGNMENT));
1018 *pGlobalSize = globalsize;
1019 return globalsize + datasize;
1022 bool StubLinker::EmitStub(Stub* pStub, int globalsize, int totalSize, LoaderHeap* pHeap)
1024 STANDARD_VM_CONTRACT;
1026 BYTE *pCode = (BYTE*)(pStub->GetBlob());
1028 ExecutableWriterHolder<Stub> stubWriterHolder(pStub, sizeof(Stub) + totalSize);
1029 Stub *pStubRW = stubWriterHolder.GetRW();
1031 BYTE *pCodeRW = (BYTE*)(pStubRW->GetBlob());
1032 BYTE *pDataRW = pCodeRW+globalsize; // start of data area
1034 int lastCodeOffset = 0;
1036 // Write out each code element.
1037 for (CodeElement* pCodeElem = m_pCodeElements; pCodeElem; pCodeElem = pCodeElem->m_next) {
1040 switch (pCodeElem->m_type) {
1041 case CodeElement::kCodeRun:
1042 CopyMemory(pCodeRW + pCodeElem->m_globaloffset,
1043 ((CodeRun*)pCodeElem)->m_codebytes,
1044 ((CodeRun*)pCodeElem)->m_numcodebytes);
1045 currOffset = pCodeElem->m_globaloffset + ((CodeRun *)pCodeElem)->m_numcodebytes;
1048 case CodeElement::kLabelRef: {
1049 LabelRef *pLabelRef = (LabelRef*)pCodeElem;
1050 InstructionFormat *pIF = pLabelRef->m_pInstructionFormat;
1053 LPBYTE srcglobaladdr = pCode +
1054 pLabelRef->m_globaloffset +
1055 pIF->GetHotSpotOffset(pLabelRef->m_refsize,
1056 pLabelRef->m_variationCode);
1057 LPBYTE targetglobaladdr;
1058 if (!(pLabelRef->m_target->m_fExternal)) {
1059 targetglobaladdr = pCode +
1060 pLabelRef->m_target->i.m_pCodeRun->m_globaloffset +
1061 pLabelRef->m_target->i.m_localOffset;
1063 targetglobaladdr = (LPBYTE)(pLabelRef->m_target->e.m_pExternalAddress);
1065 if ((pLabelRef->m_target->m_fAbsolute)) {
1066 fixupval = (__int64)(size_t)targetglobaladdr;
1068 fixupval = (__int64)(targetglobaladdr - srcglobaladdr);
1070 pLabelRef->m_pInstructionFormat->EmitInstruction(
1071 pLabelRef->m_refsize,
1073 pCode + pCodeElem->m_globaloffset,
1074 pCodeRW + pCodeElem->m_globaloffset,
1075 pLabelRef->m_variationCode,
1076 pDataRW + pCodeElem->m_dataoffset);
1079 pCodeElem->m_globaloffset +
1080 pLabelRef->m_pInstructionFormat->GetSizeOfInstruction( pLabelRef->m_refsize,
1081 pLabelRef->m_variationCode );
1088 lastCodeOffset = (currOffset > lastCodeOffset) ? currOffset : lastCodeOffset;
1091 // Fill in zeros at the end, if necessary
1092 if (lastCodeOffset < globalsize)
1093 ZeroMemory(pCodeRW + lastCodeOffset, globalsize - lastCodeOffset);
1096 // Set additional stub data.
1097 // - Fill in the target method for the Instantiating stub.
1099 // - Fill in patch offset, if we have one
1100 // Note that these offsets are relative to the start of the stub,
1101 // not the code, so you'll have to add sizeof(Stub) to get to the
1103 if (pStubRW->IsInstantiatingStub())
1105 _ASSERTE(m_pTargetMethod != NULL);
1106 _ASSERTE(m_pPatchLabel == NULL);
1107 pStubRW->SetInstantiatedMethodDesc(m_pTargetMethod);
1109 LOG((LF_CORDB, LL_INFO100, "SL::ES: InstantiatedMethod fd:0x%x\n",
1110 pStub->GetInstantiatedMethodDesc()));
1112 else if (m_pPatchLabel != NULL)
1114 UINT32 uLabelOffset = GetLabelOffset(m_pPatchLabel);
1115 _ASSERTE(FitsIn<USHORT>(uLabelOffset));
1116 pStubRW->SetPatchOffset(static_cast<USHORT>(uLabelOffset));
1118 LOG((LF_CORDB, LL_INFO100, "SL::ES: patch offset:0x%x\n",
1119 pStub->GetPatchOffset()));
1122 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1123 if (pStub->HasUnwindInfo())
1125 if (!EmitUnwindInfo(pStub, pStubRW, globalsize, pHeap))
1128 #endif // STUBLINKER_GENERATES_UNWIND_INFO
1132 FlushInstructionCache(GetCurrentProcess(), pCode, globalsize);
1135 _ASSERTE(m_fDataOnly || DbgIsExecutable(pCode, globalsize));
1141 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1142 #if defined(TARGET_AMD64)
1144 // See RtlVirtualUnwind in base\ntos\rtl\amd64\exdsptch.c
1146 static_assert_no_msg(kRAX == (offsetof(CONTEXT, Rax) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1147 static_assert_no_msg(kRCX == (offsetof(CONTEXT, Rcx) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1148 static_assert_no_msg(kRDX == (offsetof(CONTEXT, Rdx) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1149 static_assert_no_msg(kRBX == (offsetof(CONTEXT, Rbx) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1150 static_assert_no_msg(kRBP == (offsetof(CONTEXT, Rbp) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1151 static_assert_no_msg(kRSI == (offsetof(CONTEXT, Rsi) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1152 static_assert_no_msg(kRDI == (offsetof(CONTEXT, Rdi) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1153 static_assert_no_msg(kR8 == (offsetof(CONTEXT, R8 ) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1154 static_assert_no_msg(kR9 == (offsetof(CONTEXT, R9 ) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1155 static_assert_no_msg(kR10 == (offsetof(CONTEXT, R10) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1156 static_assert_no_msg(kR11 == (offsetof(CONTEXT, R11) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1157 static_assert_no_msg(kR12 == (offsetof(CONTEXT, R12) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1158 static_assert_no_msg(kR13 == (offsetof(CONTEXT, R13) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1159 static_assert_no_msg(kR14 == (offsetof(CONTEXT, R14) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1160 static_assert_no_msg(kR15 == (offsetof(CONTEXT, R15) - offsetof(CONTEXT, Rax)) / sizeof(ULONG64));
1162 VOID StubLinker::UnwindSavedReg (UCHAR reg, ULONG SPRelativeOffset)
1164 USHORT FrameOffset = (USHORT)(SPRelativeOffset / 8);
1166 if ((ULONG)FrameOffset == SPRelativeOffset)
1168 UNWIND_CODE *pUnwindCode = AllocUnwindInfo(UWOP_SAVE_NONVOL);
1169 pUnwindCode->OpInfo = reg;
1170 pUnwindCode[1].FrameOffset = FrameOffset;
1174 UNWIND_CODE *pUnwindCode = AllocUnwindInfo(UWOP_SAVE_NONVOL_FAR);
1175 pUnwindCode->OpInfo = reg;
1176 pUnwindCode[1].FrameOffset = (USHORT)SPRelativeOffset;
1177 pUnwindCode[2].FrameOffset = (USHORT)(SPRelativeOffset >> 16);
1181 VOID StubLinker::UnwindPushedReg (UCHAR reg)
1183 m_stackSize += sizeof(void*);
1185 if (m_fHaveFramePointer)
1188 UNWIND_CODE *pUnwindCode = AllocUnwindInfo(UWOP_PUSH_NONVOL);
1189 pUnwindCode->OpInfo = reg;
1192 VOID StubLinker::UnwindAllocStack (SHORT FrameSizeIncrement)
1200 if (! ClrSafeInt<SHORT>::addition(m_stackSize, FrameSizeIncrement, m_stackSize))
1201 COMPlusThrowArithmetic();
1203 if (m_fHaveFramePointer)
1206 UCHAR OpInfo = (UCHAR)((FrameSizeIncrement - 8) / 8);
1208 if (OpInfo*8 + 8 == FrameSizeIncrement)
1210 UNWIND_CODE *pUnwindCode = AllocUnwindInfo(UWOP_ALLOC_SMALL);
1211 pUnwindCode->OpInfo = OpInfo;
1215 USHORT FrameOffset = (USHORT)FrameSizeIncrement;
1216 bool fNeedExtraSlot = ((ULONG)FrameOffset != (ULONG)FrameSizeIncrement);
1218 UNWIND_CODE *pUnwindCode = AllocUnwindInfo(UWOP_ALLOC_LARGE, fNeedExtraSlot ? 1 : 0);
1220 pUnwindCode->OpInfo = fNeedExtraSlot ? 1 : 0;
1222 pUnwindCode[1].FrameOffset = FrameOffset;
1225 pUnwindCode[2].FrameOffset = (USHORT)(FrameSizeIncrement >> 16);
1229 VOID StubLinker::UnwindSetFramePointer (UCHAR reg)
1231 _ASSERTE(!m_fHaveFramePointer);
1233 UNWIND_CODE *pUnwindCode = AllocUnwindInfo(UWOP_SET_FPREG);
1234 pUnwindCode->OpInfo = reg;
1236 m_fHaveFramePointer = TRUE;
1239 UNWIND_CODE *StubLinker::AllocUnwindInfo (UCHAR Op, UCHAR nExtraSlots /*= 0*/)
1247 _ASSERTE(Op < sizeof(UnwindOpExtraSlotTable));
1249 UCHAR nSlotsAlloc = UnwindOpExtraSlotTable[Op] + nExtraSlots;
1251 IntermediateUnwindInfo *pUnwindInfo = (IntermediateUnwindInfo*)m_quickHeap.Alloc( sizeof(IntermediateUnwindInfo)
1252 + nSlotsAlloc * sizeof(UNWIND_CODE));
1253 m_nUnwindSlots += 1 + nSlotsAlloc;
1255 pUnwindInfo->pNext = m_pUnwindInfoList;
1256 m_pUnwindInfoList = pUnwindInfo;
1258 UNWIND_CODE *pUnwindCode = &pUnwindInfo->rgUnwindCode[0];
1260 pUnwindCode->UnwindOp = Op;
1262 CodeRun *pCodeRun = GetLastCodeRunIfAny();
1263 _ASSERTE(pCodeRun != NULL);
1265 pUnwindInfo->pCodeRun = pCodeRun;
1266 pUnwindInfo->LocalOffset = pCodeRun->m_numcodebytes;
1268 EmitUnwindInfoCheck();
1272 #endif // defined(TARGET_AMD64)
1274 struct FindBlockArgs
1281 bool FindBlockCallback (PTR_VOID pvArgs, PTR_VOID pvAllocationBase, SIZE_T cbReserved)
1290 FindBlockArgs* pArgs = (FindBlockArgs*)pvArgs;
1291 if (pArgs->pCode >= pvAllocationBase && (pArgs->pCode < ((BYTE *)pvAllocationBase + cbReserved)))
1293 pArgs->pBlockBase = (BYTE*)pvAllocationBase;
1294 pArgs->cbBlockSize = cbReserved;
1301 bool StubLinker::EmitUnwindInfo(Stub* pStubRX, Stub* pStubRW, int globalsize, LoaderHeap* pHeap)
1303 STANDARD_VM_CONTRACT;
1305 BYTE *pCode = (BYTE*)(pStubRX->GetEntryPoint());
1308 // Determine the lower bound of the address space containing the stub.
1311 FindBlockArgs findBlockArgs;
1312 findBlockArgs.pCode = pCode;
1313 findBlockArgs.pBlockBase = NULL;
1315 pHeap->EnumPageRegions(&FindBlockCallback, &findBlockArgs);
1317 if (findBlockArgs.pBlockBase == NULL)
1319 // REVISIT_TODO better exception
1323 BYTE *pbRegionBaseAddress = findBlockArgs.pBlockBase;
1326 static SIZE_T MaxSegmentSize = -1;
1327 if (MaxSegmentSize == (SIZE_T)-1)
1328 MaxSegmentSize = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_MaxStubUnwindInfoSegmentSize, DYNAMIC_FUNCTION_TABLE_MAX_RANGE);
1330 const SIZE_T MaxSegmentSize = DYNAMIC_FUNCTION_TABLE_MAX_RANGE;
1334 // The RUNTIME_FUNCTION offsets are ULONGs. If the region size is >
1335 // UINT32_MAX, then we'll shift the base address to the next 4gb and
1336 // register a separate function table.
1338 // But...RtlInstallFunctionTableCallback has a 2gb restriction...so
1339 // make that INT32_MAX.
1342 StubUnwindInfoHeader *pHeader = pStubRW->GetUnwindInfoHeader();
1343 _ASSERTE(IS_ALIGNED(pHeader, sizeof(void*)));
1345 BYTE *pbBaseAddress = pbRegionBaseAddress;
1347 while ((size_t)((BYTE*)pHeader - pbBaseAddress) > MaxSegmentSize)
1349 pbBaseAddress += MaxSegmentSize;
1353 // If the unwind info/code straddle a 2gb boundary, then we're stuck.
1354 // Rather than add a lot more bit twiddling code to deal with this
1355 // exceptionally rare case, we'll signal the caller to keep this allocation
1356 // temporarily and allocate another. This repeats until we eventually get
1357 // an allocation that doesn't straddle a 2gb boundary. Afterwards the old
1358 // allocations are freed.
1361 if ((size_t)(pCode + globalsize - pbBaseAddress) > MaxSegmentSize)
1366 // Ensure that the first RUNTIME_FUNCTION struct ends up pointer aligned,
1367 // so that the StubUnwindInfoHeader struct is aligned. UNWIND_INFO
1368 // includes one UNWIND_CODE.
1369 _ASSERTE(IS_ALIGNED(pStubRX, sizeof(void*)));
1370 _ASSERTE(0 == (offsetof(StubUnwindInfoHeader, FunctionEntry) % sizeof(void*)));
1372 StubUnwindInfoHeader * pUnwindInfoHeader = pStubRW->GetUnwindInfoHeader();
1376 UNWIND_CODE *pDestUnwindCode = &pUnwindInfoHeader->UnwindInfo.UnwindCode[0];
1378 UNWIND_CODE *pDestUnwindCodeLimit = (UNWIND_CODE*)pStubRW->GetUnwindInfoHeaderSuffix();
1381 UINT FrameRegister = 0;
1384 // Resolve the unwind operation offsets, and fill in the UNWIND_INFO and
1385 // RUNTIME_FUNCTION structs preceding the stub. The unwind codes are recorded
1386 // in decreasing address order.
1389 for (IntermediateUnwindInfo *pUnwindInfoList = m_pUnwindInfoList; pUnwindInfoList != NULL; pUnwindInfoList = pUnwindInfoList->pNext)
1391 UNWIND_CODE *pUnwindCode = &pUnwindInfoList->rgUnwindCode[0];
1392 UCHAR op = pUnwindCode[0].UnwindOp;
1394 if (UWOP_SET_FPREG == op)
1396 FrameRegister = pUnwindCode[0].OpInfo;
1400 // Compute number of slots used by this encoding.
1405 if (UWOP_ALLOC_LARGE == op)
1407 nSlots = 2 + pUnwindCode[0].OpInfo;
1411 _ASSERTE(UnwindOpExtraSlotTable[op] != (UCHAR)-1);
1412 nSlots = 1 + UnwindOpExtraSlotTable[op];
1416 // Compute offset and ensure that it will fit in the encoding.
1419 SIZE_T CodeOffset = pUnwindInfoList->pCodeRun->m_globaloffset
1420 + pUnwindInfoList->LocalOffset;
1422 if (CodeOffset != (SIZE_T)(UCHAR)CodeOffset)
1424 // REVISIT_TODO better exception
1429 // Copy the encoding data, overwrite the new offset, and advance
1430 // to the next encoding.
1433 _ASSERTE(pDestUnwindCode + nSlots <= pDestUnwindCodeLimit);
1435 CopyMemory(pDestUnwindCode, pUnwindCode, nSlots * sizeof(UNWIND_CODE));
1437 pDestUnwindCode->CodeOffset = (UCHAR)CodeOffset;
1439 pDestUnwindCode += nSlots;
1443 // Fill in the UNWIND_INFO struct
1445 UNWIND_INFO *pUnwindInfo = &pUnwindInfoHeader->UnwindInfo;
1446 _ASSERTE(IS_ALIGNED(pUnwindInfo, sizeof(ULONG)));
1448 // PrologueSize may be 0 if all unwind directives at offset 0.
1449 SIZE_T PrologueSize = m_pUnwindInfoList->pCodeRun->m_globaloffset
1450 + m_pUnwindInfoList->LocalOffset;
1452 UINT nEntryPointSlots = m_nUnwindSlots;
1454 if ( PrologueSize != (SIZE_T)(UCHAR)PrologueSize
1455 || nEntryPointSlots > UCHAR_MAX)
1457 // REVISIT_TODO better exception
1461 _ASSERTE(nEntryPointSlots);
1463 pUnwindInfo->Version = 1;
1464 pUnwindInfo->Flags = 0;
1465 pUnwindInfo->SizeOfProlog = (UCHAR)PrologueSize;
1466 pUnwindInfo->CountOfUnwindCodes = (UCHAR)nEntryPointSlots;
1467 pUnwindInfo->FrameRegister = FrameRegister;
1468 pUnwindInfo->FrameOffset = 0;
1471 // Fill in the RUNTIME_FUNCTION struct for this prologue.
1473 PT_RUNTIME_FUNCTION pCurFunction = &pUnwindInfoHeader->FunctionEntry;
1474 _ASSERTE(IS_ALIGNED(pCurFunction, sizeof(ULONG)));
1476 S_UINT32 sBeginAddress = S_BYTEPTR(pCode) - S_BYTEPTR(pbBaseAddress);
1477 if (sBeginAddress.IsOverflow())
1478 COMPlusThrowArithmetic();
1479 pCurFunction->BeginAddress = sBeginAddress.Value();
1481 S_UINT32 sEndAddress = S_BYTEPTR(pCode) + S_BYTEPTR(globalsize) - S_BYTEPTR(pbBaseAddress);
1482 if (sEndAddress.IsOverflow())
1483 COMPlusThrowArithmetic();
1484 pCurFunction->EndAddress = sEndAddress.Value();
1486 S_UINT32 sTemp = S_BYTEPTR(pUnwindInfo) - S_BYTEPTR(pbBaseAddress);
1487 if (sTemp.IsOverflow())
1488 COMPlusThrowArithmetic();
1489 RUNTIME_FUNCTION__SetUnwindInfoAddress(pCurFunction, sTemp.Value());
1490 #elif defined(TARGET_ARM)
1492 // Fill in the RUNTIME_FUNCTION struct for this prologue.
1494 UNWIND_INFO *pUnwindInfo = &pUnwindInfoHeader->UnwindInfo;
1496 PT_RUNTIME_FUNCTION pCurFunction = &pUnwindInfoHeader->FunctionEntry;
1497 _ASSERTE(IS_ALIGNED(pCurFunction, sizeof(ULONG)));
1499 S_UINT32 sBeginAddress = S_BYTEPTR(pCode) - S_BYTEPTR(pbBaseAddress);
1500 if (sBeginAddress.IsOverflow())
1501 COMPlusThrowArithmetic();
1502 RUNTIME_FUNCTION__SetBeginAddress(pCurFunction, sBeginAddress.Value());
1504 S_UINT32 sTemp = S_BYTEPTR(pUnwindInfo) - S_BYTEPTR(pbBaseAddress);
1505 if (sTemp.IsOverflow())
1506 COMPlusThrowArithmetic();
1507 RUNTIME_FUNCTION__SetUnwindInfoAddress(pCurFunction, sTemp.Value());
1509 //Get the exact function Length. Cannot use globalsize as it is explicitly made to be
1511 CodeRun *pLastCodeElem = GetLastCodeRunIfAny();
1512 _ASSERTE(pLastCodeElem != NULL);
1514 int functionLength = pLastCodeElem->m_numcodebytes + pLastCodeElem->m_globaloffset;
1516 // cannot encode functionLength greater than (2 * 0xFFFFF)
1517 if (functionLength > 2 * 0xFFFFF)
1518 COMPlusThrowArithmetic();
1520 _ASSERTE(functionLength <= globalsize);
1522 BYTE * pUnwindCodes = (BYTE *)pUnwindInfo + sizeof(DWORD);
1524 // Not emitting compact unwind info as there are very few (4) dynamic stubs with unwind info.
1525 // Benefit of the optimization does not outweigh the cost of adding the code for it.
1527 //UnwindInfo for prolog
1528 if (m_cbStackFrame != 0)
1530 if(m_cbStackFrame < 512)
1532 *pUnwindCodes++ = (BYTE)0xF8; // 16-bit sub/add sp,#x
1533 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 18);
1534 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 10);
1535 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 2);
1539 *pUnwindCodes++ = (BYTE)0xFA; // 32-bit sub/add sp,#x
1540 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 18);
1541 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 10);
1542 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 2);
1545 if(m_cbStackFrame >= 4096)
1547 // r4 register is used as param to checkStack function and must have been saved in prolog
1548 _ASSERTE(m_cCalleeSavedRegs > 0);
1549 *pUnwindCodes++ = (BYTE)0xFB; // nop 16 bit for bl r12
1550 *pUnwindCodes++ = (BYTE)0xFC; // nop 32 bit for movt r12, checkStack
1551 *pUnwindCodes++ = (BYTE)0xFC; // nop 32 bit for movw r12, checkStack
1553 // Ensure that mov r4, m_cbStackFrame fits in a 32-bit instruction
1554 if(m_cbStackFrame > 65535)
1555 COMPlusThrow(kNotSupportedException);
1556 *pUnwindCodes++ = (BYTE)0xFC; // nop 32 bit for mov r4, m_cbStackFrame
1560 // Unwind info generated will be incorrect when m_cCalleeSavedRegs = 0.
1561 // The unwind code will say that the size of push/pop instruction
1562 // size is 16bits when actually the opcode generated by
1563 // ThumbEmitPop & ThumbEMitPush will be 32bits.
1564 // Currently no stubs has m_cCalleeSavedRegs as 0
1565 // therefore just adding the assert.
1566 _ASSERTE(m_cCalleeSavedRegs > 0);
1568 if (m_cCalleeSavedRegs <= 4)
1570 *pUnwindCodes++ = (BYTE)(0xD4 + (m_cCalleeSavedRegs - 1)); // push/pop {r4-rX}
1574 _ASSERTE(m_cCalleeSavedRegs <= 8);
1575 *pUnwindCodes++ = (BYTE)(0xDC + (m_cCalleeSavedRegs - 5)); // push/pop {r4-rX}
1580 *pUnwindCodes++ = (BYTE)0x04; // push {r0-r3} / add sp,#16
1581 *pUnwindCodes++ = (BYTE)0xFD; // bx lr
1585 *pUnwindCodes++ = (BYTE)0xFF; // end
1588 ptrdiff_t epilogUnwindCodeIndex = 0;
1590 //epilog differs from prolog
1591 if(m_cbStackFrame >= 4096)
1593 //Index of the first unwind code of the epilog
1594 epilogUnwindCodeIndex = pUnwindCodes - (BYTE *)pUnwindInfo - sizeof(DWORD);
1596 *pUnwindCodes++ = (BYTE)0xF8; // sub/add sp,#x
1597 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 18);
1598 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 10);
1599 *pUnwindCodes++ = (BYTE)(m_cbStackFrame >> 2);
1601 if (m_cCalleeSavedRegs <= 4)
1603 *pUnwindCodes++ = (BYTE)(0xD4 + (m_cCalleeSavedRegs - 1)); // push/pop {r4-rX}
1607 *pUnwindCodes++ = (BYTE)(0xDC + (m_cCalleeSavedRegs - 5)); // push/pop {r4-rX}
1612 *pUnwindCodes++ = (BYTE)0x04; // push {r0-r3} / add sp,#16
1613 *pUnwindCodes++ = (BYTE)0xFD; // bx lr
1617 *pUnwindCodes++ = (BYTE)0xFF; // end
1622 // Number of 32-bit unwind codes
1623 size_t codeWordsCount = (ALIGN_UP((size_t)pUnwindCodes, sizeof(void*)) - (size_t)pUnwindInfo - sizeof(DWORD))/4;
1625 _ASSERTE(epilogUnwindCodeIndex < 32);
1627 //Check that MAX_UNWIND_CODE_WORDS is sufficient to store all unwind Codes
1628 _ASSERTE(codeWordsCount <= MAX_UNWIND_CODE_WORDS);
1630 *(DWORD *)pUnwindInfo =
1631 ((functionLength) / 2) |
1633 ((int)epilogUnwindCodeIndex << 23)|
1634 ((int)codeWordsCount << 28);
1636 #elif defined(TARGET_ARM64)
1639 // If EmitProlog isn't called. This is a leaf function which doesn't need any unwindInfo
1640 T_RUNTIME_FUNCTION *pCurFunction = NULL;
1646 // Fill in the RUNTIME_FUNCTION struct for this prologue.
1648 UNWIND_INFO *pUnwindInfo = &(pUnwindInfoHeader->UnwindInfo);
1650 T_RUNTIME_FUNCTION *pCurFunction = &(pUnwindInfoHeader->FunctionEntry);
1652 _ASSERTE(IS_ALIGNED(pCurFunction, sizeof(void*)));
1654 S_UINT32 sBeginAddress = S_BYTEPTR(pCode) - S_BYTEPTR(pbBaseAddress);
1655 if (sBeginAddress.IsOverflow())
1656 COMPlusThrowArithmetic();
1658 S_UINT32 sTemp = S_BYTEPTR(pUnwindInfo) - S_BYTEPTR(pbBaseAddress);
1659 if (sTemp.IsOverflow())
1660 COMPlusThrowArithmetic();
1662 RUNTIME_FUNCTION__SetBeginAddress(pCurFunction, sBeginAddress.Value());
1663 RUNTIME_FUNCTION__SetUnwindInfoAddress(pCurFunction, sTemp.Value());
1665 CodeRun *pLastCodeElem = GetLastCodeRunIfAny();
1666 _ASSERTE(pLastCodeElem != NULL);
1668 int functionLength = pLastCodeElem->m_numcodebytes + pLastCodeElem->m_globaloffset;
1670 // .xdata has 18 bits for function length and it is to store the total length of the function in bytes, divided by 4
1671 // If the function is larger than 1M, then multiple pdata and xdata records must be used, which we don't support right now.
1672 if (functionLength > 4 * 0x3FFFF)
1673 COMPlusThrowArithmetic();
1675 _ASSERTE(functionLength <= globalsize);
1677 // No support for extended code words and/or extended epilog.
1678 // ASSERTION: first 10 bits of the pUnwindInfo, which holds the #codewords and #epilogcount, cannot be 0
1679 // And no space for exception scope data also means that no support for exceptions for the stubs
1680 // generated with this stublinker.
1681 BYTE * pUnwindCodes = (BYTE *)pUnwindInfo + sizeof(DWORD);
1684 // Emitting the unwind codes:
1685 // The unwind codes are emitted in Epilog order.
1687 // 6. Integer argument registers
1688 // Although we might be saving the argument registers in the prolog we don't need
1689 // to report them to the OS. (they are not expressible anyways)
1691 // 5. Floating point argument registers:
1692 // Similar to Integer argument registers, no reporting
1695 // 4. Set the frame pointer
1696 // ASSUMPTION: none of the Stubs generated with this stublinker change SP value outside of epilog and prolog
1697 // when that is the case we can skip reporting setting up the frame pointer
1699 // With skiping Step #4, #5 and #6 Prolog and Epilog becomes reversible. so they can share the unwind codes
1700 int epilogUnwindCodeIndex = 0;
1702 unsigned cStackFrameSizeInQWORDs = GetStackFrameSize()/16;
1705 *pUnwindCodes++ = (BYTE)(0x40 | (m_cbStackSpace>>3));
1707 // 2. Callee-saved registers
1709 if (m_cCalleeSavedRegs > 0)
1711 unsigned offset = 2 + m_cbStackSpace/8; // 2 is for fp,lr
1712 if ((m_cCalleeSavedRegs %2) ==1)
1715 *pUnwindCodes++ = (BYTE) (0xD0 | ((m_cCalleeSavedRegs-1)>>2));
1716 *pUnwindCodes++ = (BYTE) ((BYTE)((m_cCalleeSavedRegs-1) << 6) | ((offset + m_cCalleeSavedRegs - 1) & 0x3F));
1718 for (int i=(m_cCalleeSavedRegs/2)*2-2; i>=0; i-=2)
1723 *pUnwindCodes++ = 0xE6;
1728 *pUnwindCodes++ = 0xC8;
1729 *pUnwindCodes++ = (BYTE)(offset & 0x3F);
1736 // EmitProlog is supposed to reject frames larger than 504 bytes.
1737 // Assert that here.
1738 _ASSERTE(cStackFrameSizeInQWORDs <= 0x3F);
1739 if (cStackFrameSizeInQWORDs <= 0x1F)
1742 *pUnwindCodes++ = (BYTE)(cStackFrameSizeInQWORDs);
1747 *pUnwindCodes++ = (BYTE)(0xC0 | (cStackFrameSizeInQWORDs >> 8));
1748 *pUnwindCodes++ = (BYTE)(cStackFrameSizeInQWORDs);
1752 *pUnwindCodes++ = 0xE4;
1754 // Number of 32-bit unwind codes
1755 int codeWordsCount = (int)(ALIGN_UP((size_t)pUnwindCodes, sizeof(DWORD)) - (size_t)pUnwindInfo - sizeof(DWORD))/4;
1757 //Check that MAX_UNWIND_CODE_WORDS is sufficient to store all unwind Codes
1758 _ASSERTE(codeWordsCount <= MAX_UNWIND_CODE_WORDS);
1760 *(DWORD *)pUnwindInfo =
1761 ((functionLength) / 4) |
1762 (1 << 21) | // E bit
1763 (epilogUnwindCodeIndex << 22)|
1764 (codeWordsCount << 27);
1765 } // end else (!m_fProlog)
1767 PORTABILITY_ASSERT("StubLinker::EmitUnwindInfo");
1768 T_RUNTIME_FUNCTION *pCurFunction = NULL;
1772 // Get a StubUnwindInfoHeapSegment for this base address
1775 CrstHolder crst(&g_StubUnwindInfoHeapSegmentsCrst);
1777 StubUnwindInfoHeapSegment *pStubHeapSegment;
1778 StubUnwindInfoHeapSegment **ppPrevStubHeapSegment;
1779 for (ppPrevStubHeapSegment = &g_StubHeapSegments;
1780 (pStubHeapSegment = *ppPrevStubHeapSegment);
1781 (ppPrevStubHeapSegment = &pStubHeapSegment->pNext))
1783 if (pbBaseAddress < pStubHeapSegment->pbBaseAddress)
1785 // The list is ordered, so address is between segments
1786 pStubHeapSegment = NULL;
1790 if (pbBaseAddress == pStubHeapSegment->pbBaseAddress)
1792 // Found an existing segment
1797 if (!pStubHeapSegment)
1800 // RtlInstallFunctionTableCallback will only accept a ULONG for the
1801 // region size. We've already checked above that the RUNTIME_FUNCTION
1802 // offsets will work relative to pbBaseAddress.
1805 SIZE_T cbSegment = findBlockArgs.cbBlockSize;
1807 if (cbSegment > MaxSegmentSize)
1808 cbSegment = MaxSegmentSize;
1810 NewHolder<StubUnwindInfoHeapSegment> pNewStubHeapSegment = new StubUnwindInfoHeapSegment();
1813 pNewStubHeapSegment->pbBaseAddress = pbBaseAddress;
1814 pNewStubHeapSegment->cbSegment = cbSegment;
1815 pNewStubHeapSegment->pUnwindHeaderList = NULL;
1817 pNewStubHeapSegment->pUnwindInfoTable = NULL;
1820 // Insert the new stub into list
1821 pNewStubHeapSegment->pNext = *ppPrevStubHeapSegment;
1822 *ppPrevStubHeapSegment = pNewStubHeapSegment;
1823 pNewStubHeapSegment.SuppressRelease();
1825 // Use new segment for the stub
1826 pStubHeapSegment = pNewStubHeapSegment;
1828 InstallEEFunctionTable(
1829 pNewStubHeapSegment,
1832 &FindStubFunctionEntry,
1833 pNewStubHeapSegment,
1838 // Link the new stub into the segment.
1841 pHeader->pNext = pStubHeapSegment->pUnwindHeaderList;
1842 pStubHeapSegment->pUnwindHeaderList = pHeader;
1845 // Publish Unwind info to ETW stack crawler
1846 UnwindInfoTable::AddToUnwindInfoTable(
1847 &pStubHeapSegment->pUnwindInfoTable, pCurFunction,
1848 (TADDR) pStubHeapSegment->pbBaseAddress,
1849 (TADDR) pStubHeapSegment->pbBaseAddress + pStubHeapSegment->cbSegment);
1853 _ASSERTE(pHeader->IsRegistered());
1854 _ASSERTE( &pHeader->FunctionEntry
1855 == FindStubFunctionEntry((ULONG64)pCode, EncodeDynamicFunctionTableContext(pStubHeapSegment, DYNFNTABLE_STUB)));
1860 #endif // STUBLINKER_GENERATES_UNWIND_INFO
1863 void StubLinker::DescribeProlog(UINT cCalleeSavedRegs, UINT cbStackFrame, BOOL fPushArgRegs)
1866 m_cCalleeSavedRegs = cCalleeSavedRegs;
1867 m_cbStackFrame = cbStackFrame;
1868 m_fPushArgRegs = fPushArgRegs;
1870 #elif defined(TARGET_ARM64)
1871 void StubLinker::DescribeProlog(UINT cIntRegArgs, UINT cVecRegArgs, UINT cCalleeSavedRegs, UINT cbStackSpace)
1874 m_cIntRegArgs = cIntRegArgs;
1875 m_cVecRegArgs = cVecRegArgs;
1876 m_cCalleeSavedRegs = cCalleeSavedRegs;
1877 m_cbStackSpace = cbStackSpace;
1880 UINT StubLinker::GetSavedRegArgsOffset()
1882 _ASSERTE(m_fProlog);
1883 // This is the offset from SP
1884 // We're assuming that the stublinker will push the arg registers to the bottom of the stack frame
1885 return m_cbStackSpace + (2+ m_cCalleeSavedRegs)*sizeof(void*); // 2 is for FP and LR
1888 UINT StubLinker::GetStackFrameSize()
1890 _ASSERTE(m_fProlog);
1891 return m_cbStackSpace + (2 + m_cCalleeSavedRegs + m_cIntRegArgs + m_cVecRegArgs)*sizeof(void*);
1894 #endif // ifdef TARGET_ARM, elif defined(TARGET_ARM64)
1896 #endif // #ifndef DACCESS_COMPILE
1898 #ifndef DACCESS_COMPILE
1900 // Redeclaring the Stub type here and assert its size.
1901 // The size assertion is done here because of where CODE_SIZE_ALIGN
1902 // is defined - it is not included in all places where stublink.h
1905 static_assert_no_msg((sizeof(Stub) % CODE_SIZE_ALIGN) == 0);
1907 //-------------------------------------------------------------------
1908 // Inc the refcount.
1909 //-------------------------------------------------------------------
1919 _ASSERTE(m_signature == kUsedStub);
1920 InterlockedIncrement((LONG*)&m_refcount);
1923 //-------------------------------------------------------------------
1924 // Dec the refcount.
1925 //-------------------------------------------------------------------
1935 _ASSERTE(m_signature == kUsedStub);
1936 int count = InterlockedDecrement((LONG*)&m_refcount);
1944 VOID Stub::DeleteStub()
1953 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1954 if (HasUnwindInfo())
1956 StubUnwindInfoHeader *pHeader = GetUnwindInfoHeader();
1959 // Check if the stub has been linked into a StubUnwindInfoHeapSegment.
1961 if (pHeader->IsRegistered())
1963 CrstHolder crst(&g_StubUnwindInfoHeapSegmentsCrst);
1966 // Find the segment containing the stub.
1968 StubUnwindInfoHeapSegment **ppPrevSegment = &g_StubHeapSegments;
1969 StubUnwindInfoHeapSegment *pSegment = *ppPrevSegment;
1973 PBYTE pbCode = (PBYTE)GetEntryPointInternal();
1975 UnwindInfoTable::RemoveFromUnwindInfoTable(&pSegment->pUnwindInfoTable,
1976 (TADDR) pSegment->pbBaseAddress, (TADDR) pbCode);
1978 for (StubUnwindInfoHeapSegment *pNextSegment = pSegment->pNext;
1980 ppPrevSegment = &pSegment->pNext, pSegment = pNextSegment, pNextSegment = pSegment->pNext)
1982 // The segments are sorted by pbBaseAddress.
1983 if (pbCode < pNextSegment->pbBaseAddress)
1988 // The stub was marked as registered, so a segment should exist.
1995 // Find this stub's location in the segment's list.
1997 StubUnwindInfoHeader *pCurHeader;
1998 StubUnwindInfoHeader **ppPrevHeaderList;
1999 for (ppPrevHeaderList = &pSegment->pUnwindHeaderList;
2000 (pCurHeader = *ppPrevHeaderList);
2001 (ppPrevHeaderList = &pCurHeader->pNext))
2003 if (pHeader == pCurHeader)
2007 // The stub was marked as registered, so we should find it in the segment's list.
2008 _ASSERTE(pCurHeader);
2013 // Remove the stub from the segment's list.
2015 *ppPrevHeaderList = pHeader->pNext;
2018 // If the segment's list is now empty, delete the segment.
2020 if (!pSegment->pUnwindHeaderList)
2022 DeleteEEFunctionTable(pSegment);
2024 if (pSegment->pUnwindInfoTable != 0)
2025 delete pSegment->pUnwindInfoTable;
2027 *ppPrevSegment = pSegment->pNext;
2036 if ((m_numCodeBytesAndFlags & LOADER_HEAP_BIT) == 0)
2039 m_signature = kFreedStub;
2040 FillMemory(this+1, GetNumCodeBytes(), 0xcc);
2043 delete [] (BYTE*)GetAllocationBase();
2047 TADDR Stub::GetAllocationBase()
2057 TADDR info = dac_cast<TADDR>(this);
2058 SIZE_T cbPrefix = 0;
2060 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2061 if (HasUnwindInfo())
2063 StubUnwindInfoHeaderSuffix *pSuffix =
2064 PTR_StubUnwindInfoHeaderSuffix(info - cbPrefix -
2067 cbPrefix += StubUnwindInfoHeader::ComputeAlignedSize(pSuffix->nUnwindInfoSize);
2069 #endif // STUBLINKER_GENERATES_UNWIND_INFO
2071 if (!HasExternalEntryPoint())
2073 cbPrefix = ALIGN_UP(cbPrefix + sizeof(Stub), CODE_SIZE_ALIGN) - sizeof(Stub);
2076 return info - cbPrefix;
2079 Stub* Stub::NewStub(PTR_VOID pCode, DWORD flags)
2088 Stub* pStub = NewStub(NULL, 0, flags | NEWSTUB_FL_EXTERNAL);
2090 // Passing NEWSTUB_FL_EXTERNAL requests the stub struct be
2091 // expanded in size by a single pointer. Insert the code point at this
2093 *(PTR_VOID *)(pStub + 1) = pCode;
2098 //-------------------------------------------------------------------
2099 // Stub allocation done here.
2100 //-------------------------------------------------------------------
2101 /*static*/ Stub* Stub::NewStub(
2105 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2106 , UINT nUnwindInfoSize
2117 // The memory layout of the allocated memory for the Stub instance is as follows:
2120 // optional: unwind info - see nUnwindInfoSize usage.
2121 // - stubPayloadOffset
2123 // optional: external pointer | padding + code
2124 size_t stubPayloadOffset = 0;
2125 S_SIZE_T size = S_SIZE_T(sizeof(Stub));
2127 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2128 _ASSERTE(!nUnwindInfoSize || !pHeap || pHeap->m_fPermitStubsWithUnwindInfo);
2130 if (nUnwindInfoSize != 0)
2132 // The Unwind info precedes the Stub itself.
2133 stubPayloadOffset = StubUnwindInfoHeader::ComputeAlignedSize(nUnwindInfoSize);
2134 size += stubPayloadOffset;
2136 #endif // STUBLINKER_GENERATES_UNWIND_INFO
2138 if (flags & NEWSTUB_FL_EXTERNAL)
2140 _ASSERTE(pHeap == NULL);
2141 _ASSERTE(numCodeBytes == 0);
2142 size += sizeof(PTR_PCODE);
2146 size.AlignUp(CODE_SIZE_ALIGN);
2147 size += numCodeBytes;
2150 if (size.IsOverflow())
2151 COMPlusThrowArithmetic();
2153 size_t totalSize = size.Value();
2158 pBlock = new BYTE[totalSize];
2162 TaggedMemAllocPtr ptr = pHeap->AllocAlignedMem(totalSize, CODE_SIZE_ALIGN);
2163 pBlock = (BYTE*)(void*)ptr;
2164 flags |= NEWSTUB_FL_LOADERHEAP;
2167 _ASSERTE((stubPayloadOffset % CODE_SIZE_ALIGN) == 0);
2168 Stub* pStubRX = (Stub*)(pBlock + stubPayloadOffset);
2170 ExecutableWriterHolderNoLog<Stub> stubWriterHolder;
2178 stubWriterHolder.AssignExecutableWriterHolder(pStubRX, sizeof(Stub));
2179 pStubRW = stubWriterHolder.GetRW();
2184 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2189 _ASSERTE((BYTE *)pStubRX->GetAllocationBase() == pBlock);
2194 void Stub::SetupStub(int numCodeBytes, DWORD flags
2195 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2196 , UINT nUnwindInfoSize
2208 m_signature = kUsedStub;
2210 m_pad_code_bytes1 = 0;
2211 m_pad_code_bytes2 = 0;
2212 m_pad_code_bytes3 = 0;
2216 if (((DWORD)numCodeBytes) >= MAX_CODEBYTES)
2217 COMPlusThrowHR(COR_E_OVERFLOW);
2219 m_numCodeBytesAndFlags = numCodeBytes;
2224 if (flags != NEWSTUB_FL_NONE)
2226 if((flags & NEWSTUB_FL_LOADERHEAP) != 0)
2227 m_numCodeBytesAndFlags |= LOADER_HEAP_BIT;
2228 if((flags & NEWSTUB_FL_MULTICAST) != 0)
2229 m_numCodeBytesAndFlags |= MULTICAST_DELEGATE_BIT;
2230 if ((flags & NEWSTUB_FL_EXTERNAL) != 0)
2231 m_numCodeBytesAndFlags |= EXTERNAL_ENTRY_BIT;
2232 if ((flags & NEWSTUB_FL_INSTANTIATING_METHOD) != 0)
2233 m_numCodeBytesAndFlags |= INSTANTIATING_STUB_BIT;
2234 if ((flags & NEWSTUB_FL_THUNK) != 0)
2235 m_numCodeBytesAndFlags |= THUNK_BIT;
2238 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2239 if (nUnwindInfoSize)
2241 m_numCodeBytesAndFlags |= UNWIND_INFO_BIT;
2243 StubUnwindInfoHeaderSuffix * pSuffix = GetUnwindInfoHeaderSuffix();
2244 pSuffix->nUnwindInfoSize = (BYTE)nUnwindInfoSize;
2246 StubUnwindInfoHeader * pHeader = GetUnwindInfoHeader();
2252 //-------------------------------------------------------------------
2254 //-------------------------------------------------------------------
2255 /*static*/ void Stub::Init()
2264 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
2265 g_StubUnwindInfoHeapSegmentsCrst.Init(CrstStubUnwindInfoHeapSegments);
2269 //-------------------------------------------------------------------
2271 //-------------------------------------------------------------------
2272 ArgBasedStubCache::ArgBasedStubCache(UINT fixedSlots)
2273 : m_numFixedSlots(fixedSlots),
2274 m_crst(CrstArgBasedStubCache)
2276 WRAPPER_NO_CONTRACT;
2278 m_aStub = new Stub * [m_numFixedSlots];
2279 _ASSERTE(m_aStub != NULL);
2281 for (unsigned __int32 i = 0; i < m_numFixedSlots; i++) {
2284 m_pSlotEntries = NULL;
2288 //-------------------------------------------------------------------
2290 //-------------------------------------------------------------------
2291 ArgBasedStubCache::~ArgBasedStubCache()
2300 for (unsigned __int32 i = 0; i < m_numFixedSlots; i++) {
2301 Stub *pStub = m_aStub[i];
2306 // a size of 0 is a signal to Nirvana to flush the entire cache
2307 // not sure if this is needed, but should have no CLR perf impact since size is 0.
2308 FlushInstructionCache(GetCurrentProcess(),0,0);
2310 SlotEntry **ppSlotEntry = &m_pSlotEntries;
2312 while (NULL != (pCur = *ppSlotEntry)) {
2313 Stub *pStub = pCur->m_pStub;
2315 *ppSlotEntry = pCur->m_pNext;
2323 //-------------------------------------------------------------------
2324 // Queries/retrieves a previously cached stub.
2326 // If there is no stub corresponding to the given index,
2327 // this function returns NULL.
2329 // Otherwise, this function returns the stub after
2330 // incrementing its refcount.
2331 //-------------------------------------------------------------------
2332 Stub *ArgBasedStubCache::GetStub(UINT_PTR key)
2344 CrstHolder ch(&m_crst);
2346 if (key < m_numFixedSlots) {
2347 pStub = m_aStub[key];
2350 for (SlotEntry *pSlotEntry = m_pSlotEntries;
2352 pSlotEntry = pSlotEntry->m_pNext) {
2354 if (pSlotEntry->m_key == key) {
2355 pStub = pSlotEntry->m_pStub;
2361 ExecutableWriterHolder<Stub> stubWriterHolder(pStub, sizeof(Stub));
2362 stubWriterHolder.GetRW()->IncRef();
2368 //-------------------------------------------------------------------
2369 // Tries to associate a stub with a given index. This association
2370 // may fail because some other thread may have beaten you to it
2371 // just before you make the call.
2373 // If the association succeeds, "pStub" is installed, and it is
2374 // returned back to the caller. The stub's refcount is incremented
2375 // twice (one to reflect the cache's ownership, and one to reflect
2376 // the caller's ownership.)
2378 // If the association fails because another stub is already installed,
2379 // then the incumbent stub is returned to the caller and its refcount
2380 // is incremented once (to reflect the caller's ownership.)
2382 // If the association fails due to lack of memory, NULL is returned
2383 // and no one's refcount changes.
2385 // This routine is intended to be called like this:
2387 // Stub *pCandidate = MakeStub(); // after this, pCandidate's rc is 1
2388 // Stub *pWinner = cache->SetStub(idx, pCandidate);
2389 // pCandidate->DecRef();
2390 // pCandidate = 0xcccccccc; // must not use pCandidate again.
2392 // OutOfMemoryError;
2394 // // If the association succeeded, pWinner's refcount is 2 and so
2395 // // is pCandidate's (because it *is* pWinner);.
2396 // // If the association failed, pWinner's refcount is still 2
2397 // // and pCandidate got destroyed by the last DecRef().
2398 // // Either way, pWinner is now the official index holder. It
2399 // // has a refcount of 2 (one for the cache's ownership, and
2400 // // one belonging to this code.)
2401 //-------------------------------------------------------------------
2402 Stub* ArgBasedStubCache::AttemptToSetStub(UINT_PTR key, Stub *pStub)
2412 CrstHolder ch(&m_crst);
2414 bool incRefForCache = false;
2416 if (key < m_numFixedSlots) {
2418 pStub = m_aStub[key];
2420 m_aStub[key] = pStub;
2421 incRefForCache = true;
2424 SlotEntry *pSlotEntry;
2425 for (pSlotEntry = m_pSlotEntries;
2427 pSlotEntry = pSlotEntry->m_pNext) {
2429 if (pSlotEntry->m_key == key) {
2430 pStub = pSlotEntry->m_pStub;
2435 pSlotEntry = new SlotEntry;
2436 pSlotEntry->m_pStub = pStub;
2437 incRefForCache = true;
2438 pSlotEntry->m_key = key;
2439 pSlotEntry->m_pNext = m_pSlotEntries;
2440 m_pSlotEntries = pSlotEntry;
2444 ExecutableWriterHolder<Stub> stubWriterHolder(pStub, sizeof(Stub));
2447 stubWriterHolder.GetRW()->IncRef(); // IncRef on cache's behalf
2449 stubWriterHolder.GetRW()->IncRef(); // IncRef because we're returning it to caller
2458 VOID ArgBasedStubCache::Dump()
2468 printf("--------------------------------------------------------------\n");
2469 printf("ArgBasedStubCache dump (%u fixed entries):\n", m_numFixedSlots);
2470 for (UINT32 i = 0; i < m_numFixedSlots; i++) {
2472 printf(" Fixed slot %u: ", (ULONG)i);
2473 Stub *pStub = m_aStub[i];
2477 printf("%zxh - refcount is %u\n",
2478 (size_t)(pStub->GetEntryPoint()),
2479 (ULONG)( *( ( ((ULONG*)(pStub->GetEntryPoint())) - 1))));
2483 for (SlotEntry *pSlotEntry = m_pSlotEntries;
2485 pSlotEntry = pSlotEntry->m_pNext) {
2487 printf(" Dyna. slot %u: ", (ULONG)(pSlotEntry->m_key));
2488 Stub *pStub = pSlotEntry->m_pStub;
2489 printf("%zxh - refcount is %u\n",
2490 (size_t)(pStub->GetEntryPoint()),
2491 (ULONG)( *( ( ((ULONG*)(pStub->GetEntryPoint())) - 1))));
2496 printf("--------------------------------------------------------------\n");
2500 #endif // #ifndef DACCESS_COMPILE