1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
9 // A StubLinker object provides a way to link several location-independent
10 // code sources into one executable stub, resolving references,
11 // and choosing the shortest possible instruction size. The StubLinker
12 // abstracts out the notion of a "reference" so it is completely CPU
13 // independent. This StubLinker is intended not only to create method
14 // stubs but to create the PCode-marshaling stubs for Native/Direct.
16 // A StubLinker's typical life-cycle is:
18 // 1. Create a new StubLinker (it accumulates state for the stub being
20 // 2. Emit code bytes and references (requiring fixups) into the StubLinker.
21 // 3. Call the Link() method to produce the final stub.
22 // 4. Destroy the StubLinker.
24 // StubLinkers are not multithread-aware: they're intended to be
25 // used entirely on a single thread. Also, StubLinker's report errors
26 // using COMPlusThrow. StubLinker's do have a destructor: to prevent
27 // C++ object unwinding from clashing with COMPlusThrow,
28 // you must use COMPLUSCATCH to ensure the StubLinker's cleanup in the
29 // event of an exception: the following code would do it:
31 // StubLinker stublink;
35 // // Have to separate into inner function because VC++ forbids
36 // // mixing __try & local objects in the same function.
47 // This file should only be included via the platform-specific cgencpu.h.
51 #ifndef __stublink_h__
52 #define __stublink_h__
56 #include "eecontract.h"
58 //-------------------------------------------------------------------------
60 //-------------------------------------------------------------------------
61 class InstructionFormat;
64 class CheckDuplicatedStructLayouts;
65 class CodeBasedStubCache;
71 struct IntermediateUnwindInfo;
73 #if !defined(_TARGET_X86_) && !defined(FEATURE_PAL)
74 #define STUBLINKER_GENERATES_UNWIND_INFO
75 #endif // !_TARGET_X86_ && !FEATURE_PAL
78 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
80 typedef DPTR(struct StubUnwindInfoHeaderSuffix) PTR_StubUnwindInfoHeaderSuffix;
81 struct StubUnwindInfoHeaderSuffix
83 UCHAR nUnwindInfoSize; // Size of unwind info in bytes
86 // Variable-sized struct that preceeds a Stub when the stub requires unwind
87 // information. Followed by a StubUnwindInfoHeaderSuffix.
88 typedef DPTR(struct StubUnwindInfoHeader) PTR_StubUnwindInfoHeader;
89 struct StubUnwindInfoHeader
91 PTR_StubUnwindInfoHeader pNext;
92 T_RUNTIME_FUNCTION FunctionEntry;
93 UNWIND_INFO UnwindInfo; // variable length
95 // Computes the size needed for this variable-sized struct.
96 static SIZE_T ComputeSize(UINT nUnwindInfoSize);
100 bool IsRegistered ();
103 // List of stub address ranges, in increasing address order.
104 struct StubUnwindInfoHeapSegment
108 StubUnwindInfoHeader *pUnwindHeaderList;
109 StubUnwindInfoHeapSegment *pNext;
112 class UnwindInfoTable* pUnwindInfoTable; // Used to publish unwind info to ETW stack crawler
116 VOID UnregisterUnwindInfoInLoaderHeap (UnlockedLoaderHeap *pHeap);
118 #endif // STUBLINKER_GENERATES_UNWIND_INFO
121 //-------------------------------------------------------------------------
122 // A non-multithreaded object that fixes up and emits one executable stub.
123 //-------------------------------------------------------------------------
127 //---------------------------------------------------------------
129 //---------------------------------------------------------------
133 //---------------------------------------------------------------
134 // Create a new undefined label. Label must be assigned to a code
135 // location using EmitLabel() prior to final linking.
136 // Throws exception on failure.
137 //---------------------------------------------------------------
138 CodeLabel* NewCodeLabel();
140 //---------------------------------------------------------------
141 // Create a new undefined label for which we want the absolute
142 // address, not offset. Label must be assigned to a code
143 // location using EmitLabel() prior to final linking.
144 // Throws exception on failure.
145 //---------------------------------------------------------------
146 CodeLabel* NewAbsoluteCodeLabel();
148 //---------------------------------------------------------------
149 // Combines NewCodeLabel() and EmitLabel() for convenience.
150 // Throws exception on failure.
151 //---------------------------------------------------------------
152 CodeLabel* EmitNewCodeLabel();
155 //---------------------------------------------------------------
156 // Returns final location of label as an offset from the start
157 // of the stub. Can only be called after linkage.
158 //---------------------------------------------------------------
159 UINT32 GetLabelOffset(CodeLabel *pLabel);
161 //---------------------------------------------------------------
162 // Append code bytes.
163 //---------------------------------------------------------------
164 VOID EmitBytes(const BYTE *pBytes, UINT numBytes);
165 VOID Emit8 (unsigned __int8 u8);
166 VOID Emit16(unsigned __int16 u16);
167 VOID Emit32(unsigned __int32 u32);
168 VOID Emit64(unsigned __int64 u64);
169 VOID EmitPtr(const VOID *pval);
171 //---------------------------------------------------------------
172 // Emit a UTF8 string
173 //---------------------------------------------------------------
174 VOID EmitUtf8(LPCUTF8 pUTF8)
182 EmitBytes((const BYTE *)pUTF8, (unsigned int)(p-pUTF8-1));
185 //---------------------------------------------------------------
186 // Append an instruction containing a reference to a label.
188 // target - the label being referenced.
189 // instructionFormat - a platform-specific InstructionFormat object
190 // that gives properties about the reference.
191 // variationCode - uninterpreted data passed to the pInstructionFormat methods.
192 //---------------------------------------------------------------
193 VOID EmitLabelRef(CodeLabel* target, const InstructionFormat & instructionFormat, UINT variationCode);
196 //---------------------------------------------------------------
197 // Sets the label to point to the current "instruction pointer"
198 // It is invalid to call EmitLabel() twice on
200 //---------------------------------------------------------------
201 VOID EmitLabel(CodeLabel* pCodeLabel);
203 //---------------------------------------------------------------
204 // Emits the patch label for the stub.
205 // Throws exception on failure.
206 //---------------------------------------------------------------
207 void EmitPatchLabel();
209 //---------------------------------------------------------------
210 // Create a new label to an external address.
211 // Throws exception on failure.
212 //---------------------------------------------------------------
213 CodeLabel* NewExternalCodeLabel(LPVOID pExternalAddress);
214 CodeLabel* NewExternalCodeLabel(PCODE pExternalAddress)
216 return NewExternalCodeLabel((LPVOID)pExternalAddress);
219 //---------------------------------------------------------------
220 // Push and Pop can be used to keep track of stack growth.
221 // These should be adjusted by opcodes written to the stream.
223 // Note that popping & pushing stack size as opcodes are emitted
224 // is naive & may not be accurate in many cases,
225 // so complex stubs may have to manually adjust the stack size.
226 // However it should work for the vast majority of cases we care
228 //---------------------------------------------------------------
229 void Push(UINT size);
232 INT GetStackSize() { LIMITED_METHOD_CONTRACT; return m_stackSize; }
233 void SetStackSize(SHORT size) { LIMITED_METHOD_CONTRACT; m_stackSize = size; }
235 void SetDataOnly(BOOL fDataOnly = TRUE) { LIMITED_METHOD_CONTRACT; m_fDataOnly = fDataOnly; }
238 void DescribeProlog(UINT cCalleeSavedRegs, UINT cbStackFrame, BOOL fPushArgRegs);
239 #elif defined(_TARGET_ARM64_)
240 void DescribeProlog(UINT cIntRegArgs, UINT cVecRegArgs, UINT cCalleeSavedRegs, UINT cbStackFrame);
241 UINT GetSavedRegArgsOffset();
242 UINT GetStackFrameSize();
245 //===========================================================================
246 // Unwind information
248 // Records location of preserved or parameter register
249 VOID UnwindSavedReg (UCHAR reg, ULONG SPRelativeOffset);
250 VOID UnwindPushedReg (UCHAR reg);
252 // Records "sub rsp, xxx"
253 VOID UnwindAllocStack (SHORT FrameSizeIncrement);
255 // Records frame pointer register
256 VOID UnwindSetFramePointer (UCHAR reg);
258 // In DEBUG, emits a call to m_pUnwindInfoCheckLabel (via
259 // EmitUnwindInfoCheckWorker). Code at that label will call to a
260 // helper that will attempt to RtlVirtualUnwind through the stub. The
261 // helper will preserve ALL registers.
262 VOID EmitUnwindInfoCheck();
264 #if defined(_DEBUG) && defined(STUBLINKER_GENERATES_UNWIND_INFO) && !defined(CROSSGEN_COMPILE)
267 // Injects a call to the given label.
268 virtual VOID EmitUnwindInfoCheckWorker (CodeLabel *pCheckLabel) { _ASSERTE(!"override me"); }
270 // Emits a call to a helper that will attempt to RtlVirtualUnwind
271 // through the stub. The helper will preserve ALL registers.
272 virtual VOID EmitUnwindInfoCheckSubfunction() { _ASSERTE(!"override me"); }
277 //---------------------------------------------------------------
278 // Generate the actual stub. The returned stub has a refcount of 1.
279 // No other methods (other than the destructor) should be called
280 // after calling Link().
282 // Throws exception on failure.
283 //---------------------------------------------------------------
284 Stub *Link(DWORD flags = 0) { WRAPPER_NO_CONTRACT; return Link(NULL, flags); }
285 Stub *Link(LoaderHeap *heap, DWORD flags = 0);
287 //---------------------------------------------------------------
288 // Generate the actual stub. The returned stub has a refcount of 1.
289 // No other methods (other than the destructor) should be called
290 // after calling Link(). The linked stub must have its increment
291 // increased by one prior to calling this method. This method
292 // does not increment the reference count of the interceptee.
294 // Throws exception on failure.
295 //---------------------------------------------------------------
296 Stub *LinkInterceptor(Stub* interceptee, void *pRealAddr)
297 { WRAPPER_NO_CONTRACT; return LinkInterceptor(NULL,interceptee, pRealAddr); }
298 Stub *LinkInterceptor(LoaderHeap *heap, Stub* interceptee, void *pRealAddr);
301 CodeElement *m_pCodeElements; // stored in *reverse* order
302 CodeLabel *m_pFirstCodeLabel; // linked list of CodeLabels
303 LabelRef *m_pFirstLabelRef; // linked list of references
304 CodeLabel *m_pPatchLabel; // label of stub patch offset
305 // currently just for multicast
307 SHORT m_stackSize; // count of pushes/pops
308 CQuickHeap m_quickHeap; // throwaway heap for
311 BOOL m_fDataOnly; // the stub contains only data - does not need FlushInstructionCache
315 BOOL m_fProlog; // True if DescribeProlog has been called
316 UINT m_cCalleeSavedRegs; // Count of callee saved registers (0 == none, 1 == r4, 2 ==
317 // r4-r5 etc. up to 8 == r4-r11)
318 UINT m_cbStackFrame; // Count of bytes in the stack frame (excl of saved regs)
319 BOOL m_fPushArgRegs; // If true, r0-r3 are saved before callee saved regs
320 #endif // _TARGET_ARM_
322 #ifdef _TARGET_ARM64_
324 BOOL m_fProlog; // True if DescribeProlog has been called
325 UINT m_cIntRegArgs; // Count of int register arguments (x0 - x7)
326 UINT m_cVecRegArgs; // Count of FP register arguments (v0 - v7)
327 UINT m_cCalleeSavedRegs; // Count of callee saved registers (x19 - x28)
328 UINT m_cbStackSpace; // Additional stack space for return buffer and stack alignment
329 #endif // _TARGET_ARM64_
331 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
334 CodeLabel *m_pUnwindInfoCheckLabel; // subfunction to call to unwind info check helper.
335 // On AMD64, the prologue is restricted to 256
336 // bytes, so this reduces the size of the injected
337 // code from 14 to 5 bytes.
340 #ifdef _TARGET_AMD64_
341 IntermediateUnwindInfo *m_pUnwindInfoList;
342 UINT m_nUnwindSlots; // number of slots to allocate at end, == UNWIND_INFO::CountOfCodes
343 BOOL m_fHaveFramePointer; // indicates stack operations no longer need to be recorded
346 // Returns total UnwindInfoSize, including RUNTIME_FUNCTION entry
348 UINT UnwindInfoSize(UINT codeSize)
350 if (m_nUnwindSlots == 0) return 0;
352 return sizeof(T_RUNTIME_FUNCTION) + offsetof(UNWIND_INFO, UnwindCode) + m_nUnwindSlots * sizeof(UNWIND_CODE);
354 #endif // _TARGET_AMD64_
357 #define MAX_UNWIND_CODE_WORDS 5 /* maximum number of 32-bit words to store unwind codes */
358 // Cache information about the stack frame set up in the prolog and use it in the generation of the
361 // Reserve fixed size block that's big enough to fit any unwind info we can have
362 static const int c_nUnwindInfoSize = sizeof(T_RUNTIME_FUNCTION) + sizeof(DWORD) + MAX_UNWIND_CODE_WORDS *4;
365 // Returns total UnwindInfoSize, including RUNTIME_FUNCTION entry
367 UINT UnwindInfoSize(UINT codeSize)
369 if (!m_fProlog) return 0;
371 return c_nUnwindInfoSize;
373 #endif // _TARGET_ARM_
375 #ifdef _TARGET_ARM64_
376 #define MAX_UNWIND_CODE_WORDS 5 /* maximum number of 32-bit words to store unwind codes */
379 // Reserve fixed size block that's big enough to fit any unwind info we can have
380 static const int c_nUnwindInfoSize = sizeof(T_RUNTIME_FUNCTION) + sizeof(DWORD) + MAX_UNWIND_CODE_WORDS *4;
381 UINT UnwindInfoSize(UINT codeSize)
383 if (!m_fProlog) return 0;
385 return c_nUnwindInfoSize;
388 #endif // _TARGET_ARM64_
390 #endif // STUBLINKER_GENERATES_UNWIND_INFO
392 CodeRun *AppendNewEmptyCodeRun();
395 // Returns pointer to last CodeElement or NULL.
396 CodeElement *GetLastCodeElement()
398 LIMITED_METHOD_CONTRACT;
399 return m_pCodeElements;
402 // Appends a new CodeElement.
403 VOID AppendCodeElement(CodeElement *pCodeElement);
406 // Calculates the size of the stub code that is allocate
407 // immediately after the stub object. Returns the
408 // total size. GlobalSize contains the size without
410 virtual int CalculateSize(int* globalsize);
412 // Writes out the code element into memory following the
414 bool EmitStub(Stub* pStub, int globalsize);
416 CodeRun *GetLastCodeRunIfAny();
418 bool EmitUnwindInfo(Stub* pStub, int globalsize);
420 #if defined(_TARGET_AMD64_) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
421 UNWIND_CODE *AllocUnwindInfo (UCHAR Op, UCHAR nExtraSlots = 0);
422 #endif // defined(_TARGET_AMD64_) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
425 //************************************************************************
427 //************************************************************************
430 // Link pointer for StubLink's list of labels
433 // if FALSE, label refers to some code within the same stub
434 // if TRUE, label refers to some externally supplied address.
437 // if TRUE, means we want the actual address of the label and
438 // not an offset to it
445 // Indicates the position of the label, expressed
446 // as an offset into a CodeRun.
455 LPVOID m_pExternalAddress;
462 NEWSTUB_FL_INTERCEPT = 0x00000001,
463 NEWSTUB_FL_MULTICAST = 0x00000002,
464 NEWSTUB_FL_EXTERNAL = 0x00000004,
465 NEWSTUB_FL_LOADERHEAP = 0x00000008
469 //-------------------------------------------------------------------------
470 // An executable stub. These can only be created by the StubLinker().
471 // Each stub has a reference count (which is maintained in a thread-safe
472 // manner.) When the ref-count goes to zero, the stub automatically
474 //-------------------------------------------------------------------------
475 typedef DPTR(class Stub) PTR_Stub;
476 typedef DPTR(PTR_Stub) PTR_PTR_Stub;
479 friend class CheckDuplicatedStructLayouts;
480 friend class CheckAsmOffsets;
485 MULTICAST_DELEGATE_BIT = 0x80000000,
486 EXTERNAL_ENTRY_BIT = 0x40000000,
487 LOADER_HEAP_BIT = 0x20000000,
488 INTERCEPT_BIT = 0x10000000,
489 UNWIND_INFO_BIT = 0x08000000,
491 PATCH_OFFSET_MASK = UNWIND_INFO_BIT - 1,
492 MAX_PATCH_OFFSET = PATCH_OFFSET_MASK + 1,
495 static_assert_no_msg(PATCH_OFFSET_MASK < UNWIND_INFO_BIT);
498 //-------------------------------------------------------------------
500 //-------------------------------------------------------------------
504 //-------------------------------------------------------------------
506 // Returns true if the count went to zero and the stub was deleted
507 //-------------------------------------------------------------------
512 //-------------------------------------------------------------------
513 // Used for throwing out unused stubs from stub caches. This
514 // method cannot be 100% accurate due to race conditions. This
515 // is ok because stub cache management is robust in the face
516 // of missed or premature cleanups.
517 //-------------------------------------------------------------------
518 BOOL HeuristicLooksOrphaned()
520 LIMITED_METHOD_CONTRACT;
521 _ASSERTE(m_signature == kUsedStub);
522 return (m_refcount == 1);
525 //-------------------------------------------------------------------
526 // Used by the debugger to help step through stubs
527 //-------------------------------------------------------------------
530 LIMITED_METHOD_CONTRACT;
531 return (m_patchOffset & INTERCEPT_BIT) != 0;
534 BOOL IsMulticastDelegate()
536 LIMITED_METHOD_CONTRACT;
537 return (m_patchOffset & MULTICAST_DELEGATE_BIT) != 0;
540 //-------------------------------------------------------------------
541 // For stubs which execute user code, a patch offset needs to be set
542 // to tell the debugger how far into the stub code the debugger has
543 // to step until the frame is set up.
544 //-------------------------------------------------------------------
545 USHORT GetPatchOffset()
547 LIMITED_METHOD_CONTRACT;
549 return (USHORT)(m_patchOffset & PATCH_OFFSET_MASK);
552 void SetPatchOffset(USHORT offset)
554 LIMITED_METHOD_CONTRACT;
555 _ASSERTE(GetPatchOffset() == 0);
556 m_patchOffset |= offset;
557 _ASSERTE(GetPatchOffset() == offset);
560 TADDR GetPatchAddress()
564 return dac_cast<TADDR>(GetEntryPointInternal()) + GetPatchOffset();
567 //-------------------------------------------------------------------
568 // Unwind information.
569 //-------------------------------------------------------------------
571 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
575 LIMITED_METHOD_CONTRACT;
576 return (m_patchOffset & UNWIND_INFO_BIT) != 0;
579 StubUnwindInfoHeaderSuffix *GetUnwindInfoHeaderSuffix()
590 _ASSERTE(HasUnwindInfo());
592 TADDR info = dac_cast<TADDR>(this);
596 info -= 2 * sizeof(TADDR);
599 return PTR_StubUnwindInfoHeaderSuffix
600 (info - sizeof(StubUnwindInfoHeaderSuffix));
603 StubUnwindInfoHeader *GetUnwindInfoHeader()
613 StubUnwindInfoHeaderSuffix *pSuffix = GetUnwindInfoHeaderSuffix();
615 TADDR suffixEnd = dac_cast<TADDR>(pSuffix) + sizeof(*pSuffix);
617 return PTR_StubUnwindInfoHeader(suffixEnd -
618 StubUnwindInfoHeader::ComputeSize(pSuffix->nUnwindInfoSize));
621 #endif // STUBLINKER_GENERATES_UNWIND_INFO
623 //-------------------------------------------------------------------
624 // Returns pointer to the start of the allocation containing this Stub.
625 //-------------------------------------------------------------------
626 TADDR GetAllocationBase();
628 //-------------------------------------------------------------------
629 // Return executable entrypoint after checking the ref count.
630 //-------------------------------------------------------------------
631 PCODE GetEntryPoint()
636 _ASSERTE(m_signature == kUsedStub);
637 _ASSERTE(m_refcount > 0);
639 TADDR pEntryPoint = dac_cast<TADDR>(GetEntryPointInternal());
647 pEntryPoint |= THUMB_CODE;
653 UINT GetNumCodeBytes()
658 return m_numCodeBytes;
661 //-------------------------------------------------------------------
662 // Return start of the stub blob
663 //-------------------------------------------------------------------
669 _ASSERTE(m_signature == kUsedStub);
670 _ASSERTE(m_refcount > 0);
672 return GetEntryPointInternal();
675 //-------------------------------------------------------------------
676 // Return the Stub as in GetEntryPoint and size of the stub+code in bytes
677 // WARNING: Depending on the stub kind this may be just Stub size as
678 // not all stubs have the info about the code size.
679 // It's the caller responsibility to determine that
680 //-------------------------------------------------------------------
681 static Stub* RecoverStubAndSize(PCODE pEntryPoint, DWORD *pSize)
689 PRECONDITION(pEntryPoint && pSize);
693 Stub *pStub = Stub::RecoverStub(pEntryPoint);
694 *pSize = sizeof(Stub) + pStub->m_numCodeBytes;
698 HRESULT CloneStub(BYTE *pBuffer, DWORD dwBufferSize)
700 LIMITED_METHOD_CONTRACT;
701 if ((pBuffer == NULL) ||
702 (dwBufferSize < (sizeof(*this) + m_numCodeBytes)))
707 memcpyNoGCRefs(pBuffer, this, sizeof(*this) + m_numCodeBytes);
708 reinterpret_cast<Stub *>(pBuffer)->m_refcount = 1;
713 //-------------------------------------------------------------------
714 // Reverse GetEntryPoint.
715 //-------------------------------------------------------------------
716 static Stub* RecoverStub(PCODE pEntryPoint)
718 STATIC_CONTRACT_NOTHROW;
719 STATIC_CONTRACT_GC_NOTRIGGER;
721 TADDR pStubData = PCODEToPINSTR(pEntryPoint);
723 Stub *pStub = PTR_Stub(pStubData - sizeof(*pStub));
725 #if !defined(DACCESS_COMPILE)
726 _ASSERTE(pStub->m_signature == kUsedStub);
727 _ASSERTE(pStub->GetEntryPoint() == pEntryPoint);
728 #elif defined(_DEBUG)
729 // If this isn't really a stub we don't want
730 // to continue with it.
731 // TODO: This should be removed once IsStub
732 // can adverstise whether it's safe to call
733 // further StubManager methods.
734 if (pStub->m_signature != kUsedStub ||
735 pStub->GetEntryPoint() != pEntryPoint)
737 DacError(E_INVALIDARG);
743 //-------------------------------------------------------------------
744 // Returns TRUE if entry point is not inside the Stub allocation.
745 //-------------------------------------------------------------------
746 BOOL HasExternalEntryPoint() const
748 LIMITED_METHOD_CONTRACT;
750 return (m_patchOffset & EXTERNAL_ENTRY_BIT) != 0;
753 //-------------------------------------------------------------------
754 // This is the guy that creates stubs.
755 //-------------------------------------------------------------------
756 static Stub* NewStub(LoaderHeap *pLoaderHeap, UINT numCodeBytes,
758 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
759 , UINT nUnwindInfoSize = 0
763 static Stub* NewStub(PTR_VOID pCode, DWORD flags = 0);
764 static Stub* NewStub(PCODE pCode, DWORD flags = 0)
766 return NewStub((PTR_VOID)pCode, flags);
769 //-------------------------------------------------------------------
771 //-------------------------------------------------------------------
775 // fMC: Set to true if the stub is a multicast delegate, false otherwise
776 void SetupStub(int numCodeBytes, DWORD flags
777 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
778 , UINT nUnwindInfoSlots
783 //-------------------------------------------------------------------
784 // Return executable entrypoint without checking the ref count.
785 //-------------------------------------------------------------------
786 inline PTR_CBYTE GetEntryPointInternal()
788 LIMITED_METHOD_CONTRACT;
791 _ASSERTE(m_signature == kUsedStub);
794 if (HasExternalEntryPoint())
796 return dac_cast<PTR_BYTE>(*dac_cast<PTR_PCODE>(dac_cast<TADDR>(this) + sizeof(*this)));
800 // StubLink always puts the entrypoint first.
801 return dac_cast<PTR_CBYTE>(this) + sizeof(*this);
812 kUsedStub = 0x42555453, // 'STUB'
813 kFreedStub = 0x46555453, // 'STUF'
819 //README ALIGNEMENT: in retail mode UINT m_numCodeBytes does not align to 16byte for the code
820 // after the Stub struct. This is to pad properly
821 UINT m_pad_code_bytes;
826 Stub() // Stubs are created by NewStub(), not "new". Hide the
827 { LIMITED_METHOD_CONTRACT; } // constructor to enforce this.
834 * The InterceptStub hides a reference to the real stub at a negative offset.
835 * When this stub is deleted it decrements the real stub cleaning it up as
836 * well. The InterceptStub is created by the Stublinker.
838 * <TODO>@TODO: Intercepted stubs need have a routine that will find the
839 * last real stub in the chain.</TODO>
840 * The stubs are linked - GetInterceptedStub will return either
841 * a pointer to the next intercept stub (if there is one), or NULL,
842 * indicating end-of-chain. GetRealAddr will return the address of
843 * the "real" code, which may, in fact, be another thunk (for example),
844 * and thus should be traced as well.
847 typedef DPTR(class InterceptStub) PTR_InterceptStub;
848 class InterceptStub : public Stub
852 //-------------------------------------------------------------------
853 // This is the guy that creates stubs.
854 //-------------------------------------------------------------------
855 static Stub* NewInterceptedStub(LoaderHeap *pHeap,
859 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
860 , UINT nUnwindInfoSize = 0
864 //---------------------------------------------------------------
865 // Expose key offsets and values for stub generation.
866 //---------------------------------------------------------------
867 int GetNegativeOffset()
869 LIMITED_METHOD_CONTRACT;
870 return sizeof(TADDR) + GetNegativeOffsetRealAddr();
873 PTR_PTR_Stub GetInterceptedStub()
875 LIMITED_METHOD_CONTRACT;
876 return dac_cast<PTR_PTR_Stub>(
877 dac_cast<TADDR>(this) - GetNegativeOffset());
880 int GetNegativeOffsetRealAddr()
882 LIMITED_METHOD_CONTRACT;
883 return sizeof(TADDR);
886 PTR_TADDR GetRealAddr()
888 LIMITED_METHOD_CONTRACT;
889 return dac_cast<PTR_TADDR>(
890 dac_cast<TADDR>(this) - GetNegativeOffsetRealAddr());
893 static Stub* NewInterceptedStub(void* pCode,
898 void ReleaseInterceptedStub();
901 InterceptStub() // Intercept stubs are only created by NewInterceptedStub.
902 { LIMITED_METHOD_CONTRACT; }
906 //-------------------------------------------------------------------------
907 // Each platform encodes the "branch" instruction in a different
908 // way. We use objects derived from InstructionFormat to abstract this
909 // information away. InstructionFormats don't contain any variable data
910 // so they should be allocated statically.
912 // Note that StubLinker does not create or define any InstructionFormats.
915 // The following example shows how to define a InstructionFormat for the
916 // X86 jump near instruction which takes on two forms:
918 // EB xx jmp rel8 ;; SHORT JMP (signed 8-bit offset)
919 // E9 xxxxxxxx jmp rel32 ;; NEAR JMP (signed 32-bit offset)
921 // InstructionFormat's provide StubLinker the following information:
923 // RRT.m_allowedSizes
925 // What are the possible sizes that the reference can
926 // take? The X86 jump can take either an 8-bit or 32-bit offset
927 // so this value is set to (k8|k32). StubLinker will try to
928 // use the smallest size possible.
931 // RRT.m_fTreatSizesAsSigned
932 // Sign-extend or zero-extend smallsizes offsets to the platform
933 // code pointer size? For x86, this field is set to TRUE (rel8
934 // is considered signed.)
937 // UINT RRT.GetSizeOfInstruction(refsize, variationCode)
938 // Returns the total size of the instruction in bytes for a given
939 // refsize. For this example:
941 // if (refsize==k8) return 2;
942 // if (refsize==k32) return 5;
945 // UINT RRT.GetSizeOfData(refsize, variationCode)
946 // Returns the total size of the seperate data area (if any) that the
947 // instruction needs in bytes for a given refsize. For this example
949 // if (refsize==k32) return 4; else return 0;
951 // The default implem of this returns 0, so CPUs that don't have need
952 // for a seperate constant area don't have to worry about it.
955 // BOOL CanReach(refsize, variationcode, fExternal, offset)
956 // Returns whether the instruction with the given variationcode &
957 // refsize can reach the given offset. In the case of External
958 // calls, fExternal is set and offset is the target address. In this case an
959 // implementation should return TRUE only if refsize is big enough to fit a
960 // full machine-sized pointer to anywhere in the address space.
963 // VOID RRT.EmitInstruction(UINT refsize,
964 // __int64 fixedUpReference,
966 // UINT variationCode,
967 // BYTE *pDataBuffer)
969 // Given a chosen size (refsize) and the final offset value
970 // computed by StubLink (fixedUpReference), write out the
971 // instruction into the provided buffer (guaranteed to be
972 // big enough provided you told the truth with GetSizeOfInstruction()).
973 // If needed (e.g. on SH3) a data buffer is also passed in for
974 // storage of constants.
978 // if (refsize==k8) {
979 // pOutBuffer[0] = 0xeb;
980 // pOutBuffer[1] = (__int8)fixedUpReference;
981 // } else if (refsize == k32) {
982 // pOutBuffer[0] = 0xe9;
983 // *((__int32*)(1+pOutBuffer)) = (__int32)fixedUpReference;
985 // CRASH("Bad input.");
988 // VOID RRT.GetHotSpotOffset(UINT refsize, UINT variationCode)
990 // The reference offset is always relative to some IP: this
991 // method tells StubLinker where that IP is relative to the
992 // start of the instruction. For X86, the offset is always
993 // relative to the start of the *following* instruction so
994 // the correct implementation is:
996 // return GetSizeOfInstruction(refsize, variationCode);
998 // Actually, InstructionFormat() provides a default implementation of this
999 // method that does exactly this so X86 need not override this at all.
1002 // The extra "variationCode" argument is an __int32 that StubLinker receives
1003 // from EmitLabelRef() and passes uninterpreted to each RRT method.
1004 // This allows one RRT to handle a family of related instructions,
1005 // for example, the family of conditional jumps on the X86.
1007 //-------------------------------------------------------------------------
1008 class InstructionFormat
1013 // if you want to add a size, insert it in-order (e.g. a 18-bit size would
1014 // go between k16 and k32) and shift all the higher values up. All values
1015 // must be a power of 2 since the get ORed together.
1032 #ifdef INSTRFMT_K64SMALL
1060 #ifdef INSTRFMT_K64SMALL
1061 k64Small = (1 << _k64Small),
1066 kAllowAlways= (1 << _kAllowAlways),
1067 kMax = kAllowAlways,
1070 const UINT m_allowedSizes; // OR mask using above "k" values
1071 InstructionFormat(UINT allowedSizes) : m_allowedSizes(allowedSizes)
1073 LIMITED_METHOD_CONTRACT;
1076 virtual UINT GetSizeOfInstruction(UINT refsize, UINT variationCode) = 0;
1077 virtual VOID EmitInstruction(UINT refsize, __int64 fixedUpReference, BYTE *pCodeBuffer, UINT variationCode, BYTE *pDataBuffer) = 0;
1078 virtual UINT GetHotSpotOffset(UINT refsize, UINT variationCode)
1080 WRAPPER_NO_CONTRACT;
1081 // Default implementation: the offset is added to the
1082 // start of the following instruction.
1083 return GetSizeOfInstruction(refsize, variationCode);
1086 virtual UINT GetSizeOfData(UINT refsize, UINT variationCode)
1088 LIMITED_METHOD_CONTRACT;
1089 // Default implementation: 0 extra bytes needed (most CPUs)
1093 virtual BOOL CanReach(UINT refsize, UINT variationCode, BOOL fExternal, INT_PTR offset)
1095 LIMITED_METHOD_CONTRACT;
1098 // For external, we don't have enough info to predict
1099 // the offset yet so we only accept if the offset size
1100 // is at least as large as the native pointer size.
1102 case InstructionFormat::k8: // intentional fallthru
1103 case InstructionFormat::k16: // intentional fallthru
1105 case InstructionFormat::k24: // intentional fallthru
1108 case InstructionFormat::k26: // intentional fallthru
1110 return FALSE; // no 8 or 16-bit platforms
1112 case InstructionFormat::k32:
1113 return sizeof(LPVOID) <= 4;
1115 case InstructionFormat::k64:
1116 return sizeof(LPVOID) <= 8;
1118 case InstructionFormat::kAllowAlways:
1128 case InstructionFormat::k8:
1129 return FitsInI1(offset);
1131 case InstructionFormat::k16:
1132 return FitsInI2(offset);
1135 case InstructionFormat::k24:
1136 return FitsInI2(offset>>8);
1140 case InstructionFormat::k26:
1141 return FitsInI2(offset>>10);
1143 case InstructionFormat::k32:
1144 return FitsInI4(offset);
1146 case InstructionFormat::k64:
1147 // intentional fallthru
1149 case InstructionFormat::kAllowAlways:
1164 //-------------------------------------------------------------------------
1165 // This stub cache associates stubs with an integer key. For some clients,
1166 // this might represent the size of the argument stack in some cpu-specific
1167 // units (for the x86, the size is expressed in DWORDS.) For other clients,
1168 // this might take into account the style of stub (e.g. whether it returns
1169 // an object reference or not).
1170 //-------------------------------------------------------------------------
1171 class ArgBasedStubCache
1174 ArgBasedStubCache(UINT fixedSize = NUMFIXEDSLOTS);
1175 ~ArgBasedStubCache();
1177 //-----------------------------------------------------------------
1178 // Retrieves the stub associated with the given key.
1179 //-----------------------------------------------------------------
1180 Stub *GetStub(UINT_PTR key);
1182 //-----------------------------------------------------------------
1183 // Tries to associate the stub with the given key.
1184 // It may fail because another thread might swoop in and
1185 // do the association before you do. Thus, you must use the
1186 // return value stub rather than the pStub.
1187 //-----------------------------------------------------------------
1188 Stub* AttemptToSetStub(UINT_PTR key, Stub *pStub);
1191 // Suggestions for number of slots
1201 VOID Dump(); //Diagnostic dump
1206 // How many low-numbered keys have direct access?
1207 UINT m_numFixedSlots;
1209 // For 'm_numFixedSlots' low-numbered keys, we store them in an array.
1220 // High-numbered keys are stored in a sparse linked list.
1221 SlotEntry *m_pSlotEntries;
1228 #define CPUSTUBLINKER StubLinkerCPU
1230 class NDirectStubLinker;
1231 class CPUSTUBLINKER;
1233 #endif // __stublink_h__