1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4 // ===========================================================================
8 // ===========================================================================
9 // This file contains the implementation for creating and using prestubs
10 // ===========================================================================
18 #include "dllimport.h"
19 #include "comdelegate.h"
20 #include "dbginterface.h"
21 #include "listlock.inl"
23 #include "eventtrace.h"
27 #include "virtualcallstub.h"
33 #ifdef FEATURE_INTERPRETER
34 #include "interpreter.h"
37 #ifdef FEATURE_COMINTEROP
38 #include "clrtocomcall.h"
41 #include "mdaassistants.h"
43 #ifdef FEATURE_STACK_SAMPLING
44 #include "stacksampler.h"
47 #ifdef FEATURE_PERFMAP
51 #ifdef FEATURE_TIERED_COMPILATION
52 #include "callcounter.h"
55 #ifndef DACCESS_COMPILE
57 #if defined(FEATURE_JIT_PITCHING)
58 EXTERN_C void CheckStacksAndPitch();
59 EXTERN_C void SavePitchingCandidate(MethodDesc* pMD, ULONG sizeOfCode);
60 EXTERN_C void DeleteFromPitchingCandidate(MethodDesc* pMD);
61 EXTERN_C void MarkMethodNotPitchingCandidate(MethodDesc* pMD);
64 EXTERN_C void STDCALL ThePreStub();
66 #if defined(HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
68 EXTERN_C void STDCALL ThePreStubCompactARM();
70 #endif // defined(HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
72 EXTERN_C void STDCALL ThePreStubPatch();
74 //==========================================================================
76 PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BOOL fFullBackPatch)
81 PRECONDITION(!ContainsGenericVariables());
82 #ifndef FEATURE_INTERPRETER
83 PRECONDITION(HasStableEntryPoint());
84 #endif // FEATURE_INTERPRETER
85 PRECONDITION(pMT == GetMethodTable());
88 #ifdef FEATURE_INTERPRETER
89 PCODE pTarget = GetMethodEntryPoint();
91 PCODE pTarget = GetStableEntryPoint();
94 if (!HasTemporaryEntryPoint())
97 PCODE pExpected = GetTemporaryEntryPoint();
99 if (pExpected == pTarget)
102 // True interface methods are never backpatched
103 if (pMT->IsInterface() && !IsStatic())
108 FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
109 if (pFuncPtrStubs != NULL)
111 Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
112 if (pFuncPtrPrecode != NULL)
114 // If there is a funcptr precode to patch, we are done for this round.
115 if (pFuncPtrPrecode->SetTargetInterlocked(pTarget))
120 #ifndef HAS_COMPACT_ENTRYPOINTS
121 // Patch the fake entrypoint if necessary
122 Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget);
123 #endif // HAS_COMPACT_ENTRYPOINTS
126 if (HasNonVtableSlot())
129 BOOL fBackpatched = FALSE;
131 #define BACKPATCH(pPatchedMT) \
134 if (pPatchedMT->GetSlot(dwSlot) == pExpected) \
136 pPatchedMT->SetSlot(dwSlot, pTarget); \
137 fBackpatched = TRUE; \
142 // The owning slot has been updated already, so there is no need to backpatch it
143 _ASSERTE(pMT->GetSlot(GetSlot()) == pTarget);
145 if (pDispatchingMT != NULL && pDispatchingMT != pMT)
147 DWORD dwSlot = GetSlot();
149 BACKPATCH(pDispatchingMT);
154 // Backpatch the MethodTable that code:MethodTable::GetRestoredSlot() reads the value from.
155 // VSD reads the slot value using code:MethodTable::GetRestoredSlot(), and so we need to make sure
156 // that it returns the stable entrypoint eventually to avoid going through the slow path all the time.
158 MethodTable * pRestoredSlotMT = pDispatchingMT->GetRestoredSlotMT(dwSlot);
160 BACKPATCH(pRestoredSlotMT);
166 MethodImpl::Iterator it(this);
169 DWORD dwSlot = it.GetSlot();
173 if (pDispatchingMT != NULL)
175 BACKPATCH(pDispatchingMT);
182 if (fFullBackPatch && !fBackpatched && IsDuplicate())
184 // If this is a duplicate, let's scan the rest of the VTable hunting for other hits.
185 unsigned numSlots = pMT->GetNumVirtuals();
186 for (DWORD dwSlot=0; dwSlot<numSlots; dwSlot++)
190 if (pDispatchingMT != NULL)
192 BACKPATCH(pDispatchingMT);
202 // <TODO> FIX IN BETA 2
204 // g_pNotificationTable is only modified by the DAC and therefore the
205 // optmizer can assume that it will always be its default value and has
206 // been seen to (on IA64 free builds) eliminate the code in DACNotifyCompilationFinished
207 // such that DAC notifications are no longer sent.
209 // TODO: fix this in Beta 2
210 // the RIGHT fix is to make g_pNotificationTable volatile, but currently
211 // we don't have DAC macros to do that. Additionally, there are a number
212 // of other places we should look at DAC definitions to determine if they
213 // should be also declared volatile.
215 // for now we just turn off optimization for these guys
217 #pragma optimize("", off)
220 void DACNotifyCompilationFinished(MethodDesc *methodDesc)
231 // Is the list active?
232 JITNotifications jn(g_pNotificationTable);
235 // Get Module and mdToken
236 mdToken t = methodDesc->GetMemberDef();
237 Module *modulePtr = methodDesc->GetModule();
241 #ifndef FEATURE_GDBJIT
243 USHORT jnt = jn.Requested((TADDR) modulePtr, t);
244 if (jnt & CLRDATA_METHNOTIFY_GENERATED)
246 // If so, throw an exception!
248 DACNotify::DoJITNotification(methodDesc);
249 #ifndef FEATURE_GDBJIT
256 #pragma optimize("", on)
261 // ********************************************************************
263 // ********************************************************************
265 // MakeJitWorker is the thread safe way to invoke the JIT compiler
266 // If multiple threads get in here for the same pMD, ALL of them
267 // MUST return the SAME value for pstub.
269 // This function creates a DeadlockAware list of methods being jitted
270 // which prevents us from trying to JIT the same method more that once.
272 PCODE MethodDesc::MakeJitWorker(COR_ILMETHOD_DECODER* ILHeader, CORJIT_FLAGS flags)
274 STANDARD_VM_CONTRACT;
276 BOOL fIsILStub = IsILStub(); // @TODO: understand the need for this special case
278 LOG((LF_JIT, LL_INFO1000000,
279 "MakeJitWorker(" FMT_ADDR ", %s) for %s:%s\n",
281 fIsILStub ? " TRUE" : "FALSE",
282 GetMethodTable()->GetDebugClassName(),
283 m_pszDebugMethodName));
285 #if defined(FEATURE_JIT_PITCHING)
286 CheckStacksAndPitch();
290 ULONG sizeOfCode = 0;
291 #if defined(FEATURE_INTERPRETER) || defined(FEATURE_TIERED_COMPILATION)
292 BOOL fStable = TRUE; // True iff the new code address (to be stored in pCode), is a stable entry point.
294 #ifdef FEATURE_INTERPRETER
295 PCODE pPreviousInterpStub = NULL;
296 BOOL fInterpreted = FALSE;
299 #ifdef FEATURE_MULTICOREJIT
300 MulticoreJitManager & mcJitManager = GetAppDomain()->GetMulticoreJitManager();
302 bool fBackgroundThread = flags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_MCJIT_BACKGROUND);
305 // If this is the first stage of a tiered compilation progression, use tier0, otherwise
306 // use default compilation options
307 #ifdef FEATURE_TIERED_COMPILATION
308 if (!IsEligibleForTieredCompilation())
315 flags.Add(CORJIT_FLAGS(CORJIT_FLAGS::CORJIT_FLAG_TIER0));
320 // Enter the global lock which protects the list of all functions being JITd
321 ListLockHolder pJitLock (GetDomain()->GetJitLock());
323 // It is possible that another thread stepped in before we entered the global lock for the first time.
324 pCode = GetNativeCode();
327 #ifdef FEATURE_INTERPRETER
328 if (Interpreter::InterpretationStubToMethodInfo(pCode) == this)
330 pPreviousInterpStub = pCode;
333 #endif // FEATURE_INTERPRETER
337 const char *description = "jit lock";
338 INDEBUG(description = m_pszDebugMethodName;)
339 ListLockEntryHolder pEntry(ListLockEntry::Find(pJitLock, this, description));
341 // We have an entry now, we can release the global lock
344 // Take the entry lock
346 ListLockEntryLockHolder pEntryLock(pEntry, FALSE);
348 if (pEntryLock.DeadlockAwareAcquire())
350 if (pEntry->m_hrResultCode == S_FALSE)
352 // Nobody has jitted the method yet
356 // We came in to jit but someone beat us so return the
359 // We can just fall through because we will notice below that
360 // the method has code.
362 // @todo: Note that we may have a failed HRESULT here -
363 // we might want to return an early error rather than
364 // repeatedly failing the jit.
369 // Taking this lock would cause a deadlock (presumably because we
370 // are involved in a class constructor circular dependency.) For
371 // instance, another thread may be waiting to run the class constructor
372 // that we are jitting, but is currently jitting this function.
374 // To remedy this, we want to go ahead and do the jitting anyway.
375 // The other threads contending for the lock will then notice that
376 // the jit finished while they were running class constructors, and abort their
377 // current jit effort.
379 // We don't have to do anything special right here since we
380 // can check HasNativeCode() to detect this case later.
382 // Note that at this point we don't have the lock, but that's OK because the
383 // thread which does have the lock is blocked waiting for us.
386 // It is possible that another thread stepped in before we entered the lock.
387 pCode = GetNativeCode();
388 #ifdef FEATURE_INTERPRETER
389 if (pCode != NULL && (pCode != pPreviousInterpStub))
392 #endif // FEATURE_INTERPRETER
397 SString namespaceOrClassName, methodName, methodSignature;
399 PCODE pOtherCode = NULL; // Need to move here due to 'goto GotNewCode'
401 #ifdef FEATURE_MULTICOREJIT
403 bool fCompiledInBackground = false;
405 // If not called from multi-core JIT thread,
406 if (! fBackgroundThread)
408 // Quick check before calling expensive out of line function on this method's domain has code JITted by background thread
409 if (mcJitManager.GetMulticoreJitCodeStorage().GetRemainingMethodCount() > 0)
411 if (MulticoreJitManager::IsMethodSupported(this))
413 pCode = mcJitManager.RequestMethodCode(this); // Query multi-core JIT manager for compiled code
415 // Multicore JIT manager starts background thread to pre-compile methods, but it does not back-patch it/notify profiler/notify DAC,
416 // Jumtp to GotNewCode to do so
419 fCompiledInBackground = true;
421 #ifdef DEBUGGING_SUPPORTED
422 // Notify the debugger of the jitted function
423 if (g_pDebugInterface != NULL)
425 g_pDebugInterface->JITComplete(this, pCode);
438 // we race with other threads to JIT the code for an IL stub and the
439 // IL header is released once one of the threads completes. As a result
440 // we must be inside the lock to reliably get the IL header for the
443 ILStubResolver* pResolver = AsDynamicMethodDesc()->GetILStubResolver();
444 ILHeader = pResolver->GetILHeader();
448 MdaJitCompilationStart* pProbe = MDA_GET_ASSISTANT(JitCompilationStart);
450 pProbe->NowCompiling(this);
451 #endif // MDA_SUPPORTED
453 #ifdef PROFILING_SUPPORTED
454 // If profiling, need to give a chance for a tool to examine and modify
455 // the IL before it gets to the JIT. This allows one to add probe calls for
456 // things like code coverage, performance, or whatever.
458 BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo());
460 #ifdef FEATURE_MULTICOREJIT
461 // Multicore JIT should be disabled when CORProfilerTrackJITInfo is on
462 // But there could be corner case in which profiler is attached when multicore background thread is calling MakeJitWorker
463 // Disable this block when calling from multicore JIT background thread
464 if (!fBackgroundThread)
469 g_profControlBlock.pProfInterface->JITCompilationStarted((FunctionID) this, TRUE);
470 // The profiler may have changed the code on the callback. Need to
471 // pick up the new code. Note that you have to be fully trusted in
472 // this mode and the code will not be verified.
473 COR_ILMETHOD *pilHeader = GetILHeader(TRUE);
474 new (ILHeader) COR_ILMETHOD_DECODER(pilHeader, GetMDImport(), NULL);
478 unsigned int ilSize, unused;
479 CorInfoOptions corOptions;
480 LPCBYTE ilHeaderPointer = this->AsDynamicMethodDesc()->GetResolver()->GetCodeInfo(&ilSize, &unused, &corOptions, &unused);
482 g_profControlBlock.pProfInterface->DynamicMethodJITCompilationStarted((FunctionID) this, TRUE, ilHeaderPointer, ilSize);
487 #endif // PROFILING_SUPPORTED
488 #ifdef FEATURE_INTERPRETER
489 // We move the ETW event for start of JITting inward, after we make the decision
490 // to JIT rather than interpret.
491 #else // FEATURE_INTERPRETER
492 // Fire an ETW event to mark the beginning of JIT'ing
493 ETW::MethodLog::MethodJitting(this, &namespaceOrClassName, &methodName, &methodSignature);
494 #endif // FEATURE_INTERPRETER
496 #ifdef FEATURE_STACK_SAMPLING
497 #ifdef FEATURE_MULTICOREJIT
498 if (!fBackgroundThread)
499 #endif // FEATURE_MULTICOREJIT
501 StackSampler::RecordJittingInfo(this, flags);
503 #endif // FEATURE_STACK_SAMPLING
507 pCode = UnsafeJitFunction(this, ILHeader, flags, &sizeOfCode);
511 // If the current thread threw an exception, but a competing thread
512 // somehow succeeded at JITting the same function (e.g., out of memory
513 // encountered on current thread but not competing thread), then go ahead
514 // and swallow this current thread's exception, since we somehow managed
515 // to successfully JIT the code on the other thread.
517 // Note that if a deadlock cycle is broken, that does not result in an
518 // exception--the thread would just pass through the lock and JIT the
519 // function in competition with the other thread (with the winner of the
520 // race decided later on when we do SetNativeCodeInterlocked). This
521 // try/catch is purely to deal with the (unusual) case where a competing
522 // thread succeeded where we aborted.
524 pOtherCode = GetNativeCode();
526 if (pOtherCode == NULL)
528 pEntry->m_hrResultCode = E_FAIL;
532 EX_END_CATCH(RethrowTerminalExceptions)
534 if (pOtherCode != NULL)
536 // Somebody finished jitting recursively while we were jitting the method.
537 // Just use their method & leak the one we finished. (Normally we hope
538 // not to finish our JIT in this case, as we will abort early if we notice
539 // a reentrant jit has occurred. But we may not catch every place so we
540 // do a definitive final check here.
545 _ASSERTE(pCode != NULL);
548 if (GCStress<cfg_instr_jit>::IsEnabled())
550 SetupGcCoverage(this, (BYTE*) pCode);
552 #endif // HAVE_GCCOVER
554 #ifdef FEATURE_INTERPRETER
555 // Determine whether the new code address is "stable"...= is not an interpreter stub.
556 fInterpreted = (Interpreter::InterpretationStubToMethodInfo(pCode) == this);
557 fStable = !fInterpreted;
558 #endif // FEATURE_INTERPRETER
560 #ifdef FEATURE_MULTICOREJIT
562 // If called from multi-core JIT background thread, store code under lock, delay patching until code is queried from application threads
563 if (fBackgroundThread)
565 // Fire an ETW event to mark the end of JIT'ing
566 ETW::MethodLog::MethodJitted(this, &namespaceOrClassName, &methodName, &methodSignature, pCode, 0 /* ReJITID */);
568 #ifdef FEATURE_PERFMAP
569 // Save the JIT'd method information so that perf can resolve JIT'd call frames.
570 PerfMap::LogJITCompiledMethod(this, pCode, sizeOfCode);
573 mcJitManager.GetMulticoreJitCodeStorage().StoreMethodCode(this, pCode);
580 // If this function had already been requested for rejit (before its original
581 // code was jitted), then give the rejit manager a chance to jump-stamp the
582 // code we just compiled so the first thread entering the function will jump
583 // to the prestub and trigger the rejit. Note that the PublishMethodHolder takes
584 // a lock to avoid a particular kind of rejit race. See
585 // code:ReJitManager::PublishMethodHolder::PublishMethodHolder#PublishCode for
586 // details on the rejit race.
588 // Aside from rejit, performing a SetNativeCodeInterlocked at this point
589 // generally ensures that there is only one winning version of the native
590 // code. This also avoid races with profiler overriding ngened code (see
591 // matching SetNativeCodeInterlocked done after
592 // JITCachedFunctionSearchStarted)
593 #ifdef FEATURE_INTERPRETER
594 PCODE pExpected = pPreviousInterpStub;
595 if (pExpected == NULL) pExpected = GetTemporaryEntryPoint();
598 ReJitPublishMethodHolder publishWorker(this, pCode);
599 if (!SetNativeCodeInterlocked(pCode
600 #ifdef FEATURE_INTERPRETER
605 // Another thread beat us to publishing its copy of the JITted code.
606 pCode = GetNativeCode();
609 #if defined(FEATURE_JIT_PITCHING)
612 SavePitchingCandidate(this, sizeOfCode);
617 #ifdef FEATURE_INTERPRETER
618 // State for dynamic methods cannot be freed if the method was ever interpreted,
619 // since there is no way to ensure that it is not in use at the moment.
620 if (IsDynamicMethod() && !fInterpreted && (pPreviousInterpStub == NULL))
622 AsDynamicMethodDesc()->GetResolver()->FreeCompileTimeState();
624 #endif // FEATURE_INTERPRETER
626 // We succeeded in jitting the code, and our jitted code is the one that's going to run now.
627 pEntry->m_hrResultCode = S_OK;
629 #ifdef PROFILING_SUPPORTED
630 // Notify the profiler that JIT completed.
631 // Must do this after the address has been set.
632 // @ToDo: Why must we set the address before notifying the profiler ??
633 // Note that if IsInterceptedForDeclSecurity is set no one should access the jitted code address anyway.
635 BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo());
638 g_profControlBlock.pProfInterface->
639 JITCompilationFinished((FunctionID) this,
640 pEntry->m_hrResultCode,
645 g_profControlBlock.pProfInterface->DynamicMethodJITCompilationFinished((FunctionID) this, pEntry->m_hrResultCode, TRUE);
649 #endif // PROFILING_SUPPORTED
651 #ifdef FEATURE_MULTICOREJIT
652 if (! fCompiledInBackground)
654 #ifdef FEATURE_INTERPRETER
655 // If we didn't JIT, but rather, created an interpreter stub (i.e., fStable is false), don't tell ETW that we did.
657 #endif // FEATURE_INTERPRETER
659 // Fire an ETW event to mark the end of JIT'ing
660 ETW::MethodLog::MethodJitted(this, &namespaceOrClassName, &methodName, &methodSignature, pCode, 0 /* ReJITID */);
662 #ifdef FEATURE_PERFMAP
663 // Save the JIT'd method information so that perf can resolve JIT'd call frames.
664 PerfMap::LogJITCompiledMethod(this, pCode, sizeOfCode);
669 #ifdef FEATURE_MULTICOREJIT
671 // If not called from multi-core JIT thread, not got code from storage, quick check before calling out of line function
672 if (! fBackgroundThread && ! fCompiledInBackground && mcJitManager.IsRecorderActive())
674 if (MulticoreJitManager::IsMethodSupported(this))
676 mcJitManager.RecordMethodJit(this); // Tell multi-core JIT manager to record method on successful JITting
683 // The notification will only occur if someone has registered for this method.
684 DACNotifyCompilationFinished(this);
691 // We must have a code by now.
692 _ASSERTE(pCode != NULL);
694 LOG((LF_CORDB, LL_EVERYTHING, "MethodDesc::MakeJitWorker finished. Stub is" FMT_ADDR "\n",
700 #ifdef FEATURE_STUBS_AS_IL
702 // CreateInstantiatingILStubTargetSig:
703 // This method is used to create the signature of the target of the ILStub
704 // for instantiating and unboxing stubs, when/where we need to introduce a generic context.
705 // And since the generic context is a hidden parameter, we're creating a signature that
706 // looks like non-generic but has one additional parameter right after the thisptr
707 void CreateInstantiatingILStubTargetSig(MethodDesc *pBaseMD,
708 SigTypeContext &typeContext,
709 SigBuilder *stubSigBuilder)
711 STANDARD_VM_CONTRACT;
713 MetaSig msig(pBaseMD);
714 BYTE callingConvention = IMAGE_CEE_CS_CALLCONV_DEFAULT;
716 callingConvention |= IMAGE_CEE_CS_CALLCONV_HASTHIS;
718 stubSigBuilder->AppendByte(callingConvention);
721 stubSigBuilder->AppendData(msig.NumFixedArgs() + 1); // +1 is for context param
724 SigPointer pReturn = msig.GetReturnProps();
725 pReturn.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder, FALSE);
728 // The hidden context parameter
729 stubSigBuilder->AppendElementType(ELEMENT_TYPE_I);
730 #endif // !_TARGET_X86_
732 // Copy rest of the arguments
734 SigPointer pArgs = msig.GetArgProps();
735 for (unsigned i = 0; i < msig.NumFixedArgs(); i++)
737 pArgs.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder);
741 // The hidden context parameter
742 stubSigBuilder->AppendElementType(ELEMENT_TYPE_I);
743 #endif // _TARGET_X86_
746 Stub * CreateUnboxingILStubForSharedGenericValueTypeMethods(MethodDesc* pTargetMD)
753 POSTCONDITION(CheckPointer(RETVAL));
757 SigTypeContext typeContext(pTargetMD);
759 MetaSig msig(pTargetMD);
761 _ASSERTE(msig.HasThis());
763 ILStubLinker sl(pTargetMD->GetModule(),
764 pTargetMD->GetSignature(),
767 TRUE, // fTargetHasThis
768 TRUE, // fStubHasThis
769 FALSE // fIsNDirectStub
772 ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch);
774 // 1. Build the new signature
775 SigBuilder stubSigBuilder;
776 CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder);
778 // 2. Emit the method body
779 mdToken tokPinningHelper = pCode->GetToken(MscorlibBinder::GetField(FIELD__PINNING_HELPER__M_DATA));
781 // 2.1 Push the thisptr
782 // We need to skip over the MethodTable*
783 // The trick below will do that.
784 pCode->EmitLoadThis();
785 pCode->EmitLDFLDA(tokPinningHelper);
787 #if defined(_TARGET_X86_)
788 // 2.2 Push the rest of the arguments for x86
789 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
795 // 2.3 Push the hidden context param
796 // The context is going to be captured from the thisptr
797 pCode->EmitLoadThis();
798 pCode->EmitLDFLDA(tokPinningHelper);
799 pCode->EmitLDC(Object::GetOffsetOfFirstField());
801 pCode->EmitLDIND_I();
803 #if !defined(_TARGET_X86_)
804 // 2.4 Push the rest of the arguments for not x86
805 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
811 // 2.5 Push the target address
812 pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY));
815 pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1);
818 PCCOR_SIGNATURE pSig;
820 pTargetMD->GetSig(&pSig,&cbSig);
821 PTR_Module pLoaderModule = pTargetMD->GetLoaderModule();
822 MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(),
823 pLoaderModule->GetILStubCache()->GetOrCreateStubMethodTable(pLoaderModule),
824 ILSTUB_UNBOXINGILSTUB,
825 pTargetMD->GetModule(),
830 ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver();
832 DWORD cbTargetSig = 0;
833 PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig);
834 pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig);
835 pResolver->SetStubTargetMethodDesc(pTargetMD);
837 RETURN Stub::NewStub(JitILStub(pStubMD));
841 Stub * CreateInstantiatingILStub(MethodDesc* pTargetMD, void* pHiddenArg)
848 PRECONDITION(CheckPointer(pHiddenArg));
849 POSTCONDITION(CheckPointer(RETVAL));
853 SigTypeContext typeContext;
854 MethodTable* pStubMT;
855 if (pTargetMD->HasMethodInstantiation())
857 // The pHiddenArg shall be a MethodDesc*
858 MethodDesc* pMD = static_cast<MethodDesc *>(pHiddenArg);
859 SigTypeContext::InitTypeContext(pMD, &typeContext);
860 pStubMT = pMD->GetMethodTable();
864 // The pHiddenArg shall be a MethodTable*
865 SigTypeContext::InitTypeContext(TypeHandle::FromPtr(pHiddenArg), &typeContext);
866 pStubMT = static_cast<MethodTable *>(pHiddenArg);
869 MetaSig msig(pTargetMD);
871 ILStubLinker sl(pTargetMD->GetModule(),
872 pTargetMD->GetSignature(),
875 msig.HasThis(), // fTargetHasThis
876 msig.HasThis(), // fStubHasThis
877 FALSE // fIsNDirectStub
880 ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch);
882 // 1. Build the new signature
883 SigBuilder stubSigBuilder;
884 CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder);
886 // 2. Emit the method body
889 // 2.1 Push the thisptr
890 pCode->EmitLoadThis();
893 #if defined(_TARGET_X86_)
894 // 2.2 Push the rest of the arguments for x86
895 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
899 #endif // _TARGET_X86_
901 // 2.3 Push the hidden context param
903 pCode->EmitLDC((TADDR)pHiddenArg);
905 #if !defined(_TARGET_X86_)
906 // 2.4 Push the rest of the arguments for not x86
907 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
911 #endif // !_TARGET_X86_
913 // 2.5 Push the target address
914 pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY));
917 pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1);
920 PCCOR_SIGNATURE pSig;
922 pTargetMD->GetSig(&pSig,&cbSig);
923 PTR_Module pLoaderModule = pTargetMD->GetLoaderModule();
924 MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(),
926 ILSTUB_INSTANTIATINGSTUB,
927 pTargetMD->GetModule(),
932 ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver();
934 DWORD cbTargetSig = 0;
935 PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig);
936 pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig);
937 pResolver->SetStubTargetMethodDesc(pTargetMD);
939 RETURN Stub::NewStub(JitILStub(pStubMD));
943 /* Make a stub that for a value class method that expects a BOXed this pointer */
944 Stub * MakeUnboxingStubWorker(MethodDesc *pMD)
950 POSTCONDITION(CheckPointer(RETVAL));
956 _ASSERTE (pMD->GetMethodTable()->IsValueType());
957 _ASSERTE(!pMD->ContainsGenericVariables());
958 MethodDesc *pUnboxedMD = pMD->GetWrappedMethodDesc();
960 _ASSERTE(pUnboxedMD != NULL && pUnboxedMD != pMD);
962 #ifdef FEATURE_STUBS_AS_IL
963 if (pUnboxedMD->RequiresInstMethodTableArg())
965 pstub = CreateUnboxingILStubForSharedGenericValueTypeMethods(pUnboxedMD);
971 sl.EmitUnboxMethodStub(pUnboxedMD);
972 pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap());
977 #if defined(FEATURE_SHARE_GENERIC_CODE)
978 Stub * MakeInstantiatingStubWorker(MethodDesc *pMD)
984 PRECONDITION(pMD->IsInstantiatingStub());
985 PRECONDITION(!pMD->RequiresInstArg());
986 PRECONDITION(!pMD->IsSharedByGenericMethodInstantiations());
987 POSTCONDITION(CheckPointer(RETVAL));
991 // Note: this should be kept idempotent ... in the sense that
992 // if multiple threads get in here for the same pMD
993 // it should not matter whose stuff finally gets used.
995 MethodDesc *pSharedMD = NULL;
996 void* extraArg = NULL;
998 // It's an instantiated generic method
999 // Fetch the shared code associated with this instantiation
1000 pSharedMD = pMD->GetWrappedMethodDesc();
1001 _ASSERTE(pSharedMD != NULL && pSharedMD != pMD);
1003 if (pMD->HasMethodInstantiation())
1009 // It's a per-instantiation static method
1010 extraArg = pMD->GetMethodTable();
1014 #ifdef FEATURE_STUBS_AS_IL
1015 pstub = CreateInstantiatingILStub(pSharedMD, extraArg);
1018 _ASSERTE(pSharedMD != NULL && pSharedMD != pMD);
1019 sl.EmitInstantiatingMethodStub(pSharedMD, extraArg);
1021 pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap());
1026 #endif // defined(FEATURE_SHARE_GENERIC_CODE)
1028 #if defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
1030 extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactEntryPoint (PCODE pCode)
1032 _ASSERTE (pCode >= PC_REG_RELATIVE_OFFSET);
1034 pCode = (PCODE) (pCode - PC_REG_RELATIVE_OFFSET + THUMB_CODE);
1036 _ASSERTE (MethodDescChunk::IsCompactEntryPointAtAddress (pCode));
1038 return MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, FALSE);
1041 #endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
1043 //=============================================================================
1044 // This function generates the real code for a method and installs it into
1045 // the methoddesc. Usually ***BUT NOT ALWAYS***, this function runs only once
1046 // per methoddesc. In addition to installing the new code, this function
1047 // returns a pointer to the new code for the prestub's convenience.
1048 //=============================================================================
1049 extern "C" PCODE STDCALL PreStubWorker(TransitionBlock * pTransitionBlock, MethodDesc * pMD)
1051 PCODE pbRetVal = NULL;
1053 BEGIN_PRESERVE_LAST_ERROR;
1055 STATIC_CONTRACT_THROWS;
1056 STATIC_CONTRACT_GC_TRIGGERS;
1057 STATIC_CONTRACT_MODE_COOPERATIVE;
1058 STATIC_CONTRACT_ENTRY_POINT;
1060 MAKE_CURRENT_THREAD_AVAILABLE();
1063 Thread::ObjectRefFlush(CURRENT_THREAD);
1066 FrameWithCookie<PrestubMethodFrame> frame(pTransitionBlock, pMD);
1067 PrestubMethodFrame * pPFrame = &frame;
1069 pPFrame->Push(CURRENT_THREAD);
1071 INSTALL_MANAGED_EXCEPTION_DISPATCHER;
1072 INSTALL_UNWIND_AND_CONTINUE_HANDLER;
1074 ETWOnStartup (PrestubWorker_V1,PrestubWorkerEnd_V1);
1076 _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
1078 // Running the PreStubWorker on a method causes us to access its MethodTable
1079 g_IBCLogger.LogMethodDescAccess(pMD);
1081 // Make sure the method table is restored, and method instantiation if present
1082 pMD->CheckRestore();
1084 CONSISTENCY_CHECK(GetAppDomain()->CheckCanExecuteManagedCode(pMD));
1086 // Note this is redundant with the above check but we do it anyway for safety
1088 // This has been disabled so we have a better chance of catching these. Note that this check is
1089 // NOT sufficient for domain neutral and ngen cases.
1091 // pMD->EnsureActive();
1093 MethodTable *pDispatchingMT = NULL;
1095 if (pMD->IsVtableMethod())
1097 OBJECTREF curobj = pPFrame->GetThis();
1099 if (curobj != NULL) // Check for virtual function called non-virtually on a NULL object
1101 pDispatchingMT = curobj->GetTrueMethodTable();
1103 #ifdef FEATURE_ICASTABLE
1104 if (pDispatchingMT->IsICastable())
1106 MethodTable *pMDMT = pMD->GetMethodTable();
1107 TypeHandle objectType(pDispatchingMT);
1108 TypeHandle methodType(pMDMT);
1110 GCStress<cfg_any>::MaybeTrigger();
1111 INDEBUG(curobj = NULL); // curobj is unprotected and CanCastTo() can trigger GC
1112 if (!objectType.CanCastTo(methodType))
1114 // Apperantly ICastable magic was involved when we chose this method to be called
1115 // that's why we better stick to the MethodTable it belongs to, otherwise
1116 // DoPrestub() will fail not being able to find implementation for pMD in pDispatchingMT.
1118 pDispatchingMT = pMDMT;
1121 #endif // FEATURE_ICASTABLE
1123 // For value types, the only virtual methods are interface implementations.
1124 // Thus pDispatching == pMT because there
1125 // is no inheritance in value types. Note the BoxedEntryPointStubs are shared
1126 // between all sharable generic instantiations, so the == test is on
1127 // canonical method tables.
1129 MethodTable *pMDMT = pMD->GetMethodTable(); // put this here to see what the MT is in debug mode
1130 _ASSERTE(!pMD->GetMethodTable()->IsValueType() ||
1131 (pMD->IsUnboxingStub() && (pDispatchingMT->GetCanonicalMethodTable() == pMDMT->GetCanonicalMethodTable())));
1136 GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
1137 pbRetVal = pMD->DoPrestub(pDispatchingMT);
1139 UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
1140 UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
1143 HardwareExceptionHolder
1145 // Give debugger opportunity to stop here
1149 pPFrame->Pop(CURRENT_THREAD);
1151 POSTCONDITION(pbRetVal != NULL);
1153 END_PRESERVE_LAST_ERROR;
1160 // These are two functions for testing purposes only, in debug builds only. They can be used by setting
1161 // InjectFatalError to 3. They ensure that we really can restore the guard page for SEH try/catch clauses.
1163 // @todo: Do we use this for anything anymore?
1165 static void TestSEHGuardPageRestoreOverflow()
1169 static void TestSEHGuardPageRestore()
1171 PAL_TRY(void *, unused, NULL)
1173 TestSEHGuardPageRestoreOverflow();
1175 PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER)
1177 _ASSERTE(!"Got first overflow.");
1181 PAL_TRY(void *, unused, NULL)
1183 TestSEHGuardPageRestoreOverflow();
1185 PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER)
1187 // If you get two asserts, then it works!
1188 _ASSERTE(!"Got second overflow.");
1194 // Separated out the body of PreStubWorker for the case where we don't have a frame.
1196 // Note that pDispatchingMT may not actually be the MT that is indirected through.
1197 // If a virtual method is called non-virtually, pMT will be used to indirect through
1199 // This returns a pointer to the stable entrypoint for the jitted method. Typically, this
1200 // is the same as the pointer to the top of the JITted code of the method. However, in
1201 // the case of methods that require stubs to be executed first (e.g., remoted methods
1202 // that require remoting stubs to be executed first), this stable entrypoint would be a
1203 // pointer to the stub, and not a pointer directly to the JITted code.
1204 PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
1209 POSTCONDITION(RETVAL != NULL);
1216 Thread *pThread = GetThread();
1218 MethodTable *pMT = GetMethodTable();
1220 // Running a prestub on a method causes us to access its MethodTable
1221 g_IBCLogger.LogMethodDescAccess(this);
1223 // A secondary layer of defense against executing code in inspection-only assembly.
1224 // This should already have been taken care of by not allowing inspection assemblies
1225 // to be activated. However, this is a very inexpensive piece of insurance in the name
1227 if (IsIntrospectionOnly())
1229 _ASSERTE(!"A ReflectionOnly assembly reached the prestub. This should not have happened.");
1230 COMPlusThrow(kInvalidOperationException, IDS_EE_CODEEXECUTION_IN_INTROSPECTIVE_ASSEMBLY);
1233 if (ContainsGenericVariables())
1235 COMPlusThrow(kInvalidOperationException, IDS_EE_CODEEXECUTION_CONTAINSGENERICVAR);
1238 /************************** DEBUG CHECKS *************************/
1239 /*-----------------------------------------------------------------
1240 // Halt if needed, GC stress, check the sharing count etc.
1244 static unsigned ctr = 0;
1247 if (g_pConfig->ShouldPrestubHalt(this))
1249 _ASSERTE(!"PreStubHalt");
1252 LOG((LF_CLASSLOADER, LL_INFO10000, "In PreStubWorker for %s::%s\n",
1253 m_pszDebugClassName, m_pszDebugMethodName));
1255 // This is a nice place to test out having some fatal EE errors. We do this only in a checked build, and only
1256 // under the InjectFatalError key.
1257 if (g_pConfig->InjectFatalError() == 1)
1259 EEPOLICY_HANDLE_FATAL_ERROR(COR_E_EXECUTIONENGINE);
1261 else if (g_pConfig->InjectFatalError() == 2)
1263 EEPOLICY_HANDLE_FATAL_ERROR(COR_E_STACKOVERFLOW);
1265 else if (g_pConfig->InjectFatalError() == 3)
1267 TestSEHGuardPageRestore();
1270 // Useful to test GC with the prestub on the call stack
1271 if (g_pConfig->ShouldPrestubGC(this))
1274 GCHeapUtilities::GetGCHeap()->GarbageCollect(-1);
1278 STRESS_LOG1(LF_CLASSLOADER, LL_INFO10000, "Prestubworker: method %pM\n", this);
1281 GCStress<cfg_any, EeconfigFastGcSPolicy, CoopGcModePolicy>::MaybeTrigger();
1283 // Are we in the prestub because of a rejit request? If so, let the ReJitManager
1284 // take it from here.
1285 pCode = ReJitManager::DoReJitIfNecessary(this);
1288 // A ReJIT was performed, so nothing left for DoPrestub() to do. Return now.
1290 // The stable entrypoint will either be a pointer to the original JITted code
1291 // (with a jmp at the top to jump to the newly-rejitted code) OR a pointer to any
1292 // stub code that must be executed first (e.g., a remoting stub), which in turn
1293 // will call the original JITted code (which then jmps to the newly-rejitted
1295 RETURN GetStableEntryPoint();
1299 #ifdef FEATURE_COMINTEROP
1300 /************************** INTEROP *************************/
1301 /*-----------------------------------------------------------------
1302 // Some method descriptors are COMPLUS-to-COM call descriptors
1303 // they are not your every day method descriptors, for example
1304 // they don't have an IL or code.
1306 if (IsComPlusCall() || IsGenericComPlusCall())
1308 pCode = GetStubForInteropMethod(this);
1310 GetPrecode()->SetTargetInterlocked(pCode);
1312 RETURN GetStableEntryPoint();
1314 #endif // FEATURE_COMINTEROP
1316 // workaround: This is to handle a punted work item dealing with a skipped module constructor
1317 // due to appdomain unload. Basically shared code was JITted in domain A, and then
1318 // this caused a link to another shared module with a module CCTOR, which was skipped
1319 // or aborted in another appdomain we were trying to propagate the activation to.
1321 // Note that this is not a fix, but that it just minimizes the window in which the
1323 if (pThread->IsAbortRequested())
1325 pThread->HandleThreadAbort();
1328 /************************** CLASS CONSTRUCTOR ********************/
1329 // Make sure .cctor has been run
1331 if (IsClassConstructorTriggeredViaPrestub())
1333 pMT->CheckRunClassInitThrowing();
1336 /************************** BACKPATCHING *************************/
1337 // See if the addr of code has changed from the pre-stub
1338 #ifdef FEATURE_INTERPRETER
1339 if (!IsReallyPointingToPrestub())
1341 if (!IsPointingToPrestub())
1344 // If we are counting calls for tiered compilation, leave the prestub
1345 // in place so that we can continue intercepting method invocations.
1346 // When the TieredCompilationManager has received enough call notifications
1347 // for this method only then do we back-patch it.
1348 #ifdef FEATURE_TIERED_COMPILATION
1349 PCODE pNativeCode = GetNativeCode();
1350 if (pNativeCode && IsEligibleForTieredCompilation())
1352 CallCounter * pCallCounter = GetAppDomain()->GetCallCounter();
1353 BOOL doBackPatch = pCallCounter->OnMethodCalled(this);
1360 LOG((LF_CLASSLOADER, LL_INFO10000,
1361 " In PreStubWorker, method already jitted, backpatching call point\n"));
1362 #if defined(FEATURE_JIT_PITCHING)
1363 MarkMethodNotPitchingCandidate(this);
1365 RETURN DoBackpatch(pMT, pDispatchingMT, TRUE);
1368 // record if remoting needs to intercept this call
1369 BOOL fRemotingIntercepted = IsRemotingInterceptedViaPrestub();
1371 BOOL fReportCompilationFinished = FALSE;
1373 /************************** CODE CREATION *************************/
1374 if (IsUnboxingStub())
1376 pStub = MakeUnboxingStubWorker(this);
1378 #if defined(FEATURE_SHARE_GENERIC_CODE)
1379 else if (IsInstantiatingStub())
1381 pStub = MakeInstantiatingStubWorker(this);
1383 #endif // defined(FEATURE_SHARE_GENERIC_CODE)
1384 else if (IsIL() || IsNoMetadata())
1386 // remember if we need to backpatch the MethodTable slot
1387 BOOL fBackpatch = !fRemotingIntercepted
1388 && IsNativeCodeStableAfterInit();
1390 #ifdef FEATURE_PREJIT
1392 // See if we have any prejitted code to use.
1395 pCode = GetPreImplementedCode();
1397 #ifdef PROFILING_SUPPORTED
1400 BOOL fShouldSearchCache = TRUE;
1403 BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches());
1404 g_profControlBlock.pProfInterface->
1405 JITCachedFunctionSearchStarted((FunctionID) this,
1406 &fShouldSearchCache);
1410 if (!fShouldSearchCache)
1412 #ifdef FEATURE_INTERPRETER
1413 SetNativeCodeInterlocked(NULL, pCode, FALSE);
1415 SetNativeCodeInterlocked(NULL, pCode);
1417 _ASSERTE(!IsPreImplemented());
1421 #endif // PROFILING_SUPPORTED
1425 LOG((LF_ZAP, LL_INFO10000,
1426 "ZAP: Using code" FMT_ADDR "for %s.%s sig=\"%s\" (token %x).\n",
1428 m_pszDebugClassName,
1429 m_pszDebugMethodName,
1430 m_pszDebugMethodSignature,
1433 TADDR pFixupList = GetFixupList();
1434 if (pFixupList != NULL)
1436 Module *pZapModule = GetZapModule();
1437 _ASSERTE(pZapModule != NULL);
1438 if (!pZapModule->FixupDelayList(pFixupList))
1440 _ASSERTE(!"FixupDelayList failed");
1441 ThrowHR(COR_E_BADIMAGEFORMAT);
1446 if (GCStress<cfg_instr_ngen>::IsEnabled())
1447 SetupGcCoverage(this, (BYTE*) pCode);
1448 #endif // HAVE_GCCOVER
1450 #ifdef PROFILING_SUPPORTED
1452 * This notifies the profiler that a search to find a
1453 * cached jitted function has been made.
1456 BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches());
1457 g_profControlBlock.pProfInterface->
1458 JITCachedFunctionSearchFinished((FunctionID) this, COR_PRF_CACHED_FUNCTION_FOUND);
1461 #endif // PROFILING_SUPPORTED
1465 // If not, try to jit it
1468 #endif // FEATURE_PREJIT
1470 #ifdef FEATURE_READYTORUN
1473 Module * pModule = GetModule();
1474 if (pModule->IsReadyToRun())
1476 pCode = pModule->GetReadyToRunInfo()->GetEntryPoint(this);
1478 fReportCompilationFinished = TRUE;
1481 #endif // FEATURE_READYTORUN
1485 NewHolder<COR_ILMETHOD_DECODER> pHeader(NULL);
1486 // Get the information on the method
1487 if (!IsNoMetadata())
1489 COR_ILMETHOD* ilHeader = GetILHeader(TRUE);
1490 if(ilHeader == NULL)
1492 #ifdef FEATURE_COMINTEROP
1493 // Abstract methods can be called through WinRT derivation if the deriving type
1494 // is not implemented in managed code, and calls through the CCW to the abstract
1495 // method. Throw a sensible exception in that case.
1496 if (pMT->IsExportedToWinRT() && IsAbstract())
1498 COMPlusThrowHR(E_NOTIMPL);
1500 #endif // FEATURE_COMINTEROP
1502 COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL);
1505 COR_ILMETHOD_DECODER::DecoderStatus status = COR_ILMETHOD_DECODER::FORMAT_ERROR;
1508 // Decoder ctor can AV on a malformed method header
1509 AVInRuntimeImplOkayHolder AVOkay;
1510 pHeader = new COR_ILMETHOD_DECODER(ilHeader, GetMDImport(), &status);
1512 status = COR_ILMETHOD_DECODER::FORMAT_ERROR;
1515 if (status == COR_ILMETHOD_DECODER::VERIFICATION_ERROR &&
1516 Security::CanSkipVerification(GetModule()->GetDomainAssembly()))
1518 status = COR_ILMETHOD_DECODER::SUCCESS;
1521 if (status != COR_ILMETHOD_DECODER::SUCCESS)
1523 if (status == COR_ILMETHOD_DECODER::VERIFICATION_ERROR)
1525 // Throw a verification HR
1526 COMPlusThrowHR(COR_E_VERIFICATION);
1530 COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL);
1534 #ifdef _VER_EE_VERIFICATION_ENABLED
1535 static ConfigDWORD peVerify;
1537 if (peVerify.val(CLRConfig::EXTERNAL_PEVerify))
1538 Verify(pHeader, TRUE, FALSE); // Throws a VerifierException if verification fails
1539 #endif // _VER_EE_VERIFICATION_ENABLED
1540 } // end if (!IsNoMetadata())
1543 LOG((LF_CLASSLOADER, LL_INFO1000000,
1544 " In PreStubWorker, calling MakeJitWorker\n"));
1546 // Create the precode eagerly if it is going to be needed later.
1549 GetOrCreatePrecode();
1552 // Mark the code as hot in case the method ends up in the native image
1553 g_IBCLogger.LogMethodCodeAccess(this);
1555 pCode = MakeJitWorker(pHeader, CORJIT_FLAGS());
1557 #ifdef FEATURE_INTERPRETER
1558 if ((pCode != NULL) && !HasStableEntryPoint())
1560 // We don't yet have a stable entry point, so don't do backpatching yet.
1561 // But we do have to handle some extra cases that occur in backpatching.
1562 // (Perhaps I *should* get to the backpatching code, but in a mode where we know
1563 // we're not dealing with the stable entry point...)
1564 if (HasNativeCodeSlot())
1566 // We called "SetNativeCodeInterlocked" in MakeJitWorker, which updated the native
1567 // code slot, but I think we also want to update the regular slot...
1568 PCODE tmpEntry = GetTemporaryEntryPoint();
1569 PCODE pFound = FastInterlockCompareExchangePointer(GetAddrOfSlot(), pCode, tmpEntry);
1570 // Doesn't matter if we failed -- if we did, it's because somebody else made progress.
1571 if (pFound != tmpEntry) pCode = pFound;
1574 // Now we handle the case of a FuncPtrPrecode.
1575 FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
1576 if (pFuncPtrStubs != NULL)
1578 Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
1579 if (pFuncPtrPrecode != NULL)
1581 // If there is a funcptr precode to patch, attempt to patch it. If we lose, that's OK,
1582 // somebody else made progress.
1583 pFuncPtrPrecode->SetTargetInterlocked(pCode);
1587 #endif // FEATURE_INTERPRETER
1588 } // end if (pCode == NULL)
1589 } // end else if (IsIL() || IsNoMetadata())
1590 else if (IsNDirect())
1592 pCode = GetStubForInteropMethod(this);
1593 GetOrCreatePrecode();
1597 // Get the fcall implementation
1598 BOOL fSharedOrDynamicFCallImpl;
1599 pCode = ECall::GetFCallImpl(this, &fSharedOrDynamicFCallImpl);
1601 if (fSharedOrDynamicFCallImpl)
1603 // Fake ctors share one implementation that has to be wrapped by prestub
1604 GetOrCreatePrecode();
1609 pStub = GenerateArrayOpStub((ArrayMethodDesc*)this);
1611 else if (IsEEImpl())
1613 _ASSERTE(GetMethodTable()->IsDelegate());
1614 pCode = COMDelegate::GetInvokeMethodStub((EEImplMethodDesc*)this);
1615 GetOrCreatePrecode();
1619 // This is a method type we don't handle yet
1620 _ASSERTE(!"Unknown Method Type");
1623 /************************** POSTJIT *************************/
1624 #ifndef FEATURE_INTERPRETER
1625 _ASSERTE(pCode == NULL || GetNativeCode() == NULL || pCode == GetNativeCode());
1626 #else // FEATURE_INTERPRETER
1627 // Interpreter adds a new possiblity == someone else beat us to installing an intepreter stub.
1628 _ASSERTE(pCode == NULL || GetNativeCode() == NULL || pCode == GetNativeCode()
1629 || Interpreter::InterpretationStubToMethodInfo(pCode) == this);
1630 #endif // FEATURE_INTERPRETER
1632 // At this point we must have either a pointer to managed code or to a stub. All of the above code
1633 // should have thrown an exception if it couldn't make a stub.
1634 _ASSERTE((pStub != NULL) ^ (pCode != NULL));
1636 /************************** SECURITY *************************/
1638 // Lets check to see if we need declarative security on this stub, If we have
1639 // security checks on this method or class then we need to add an intermediate
1640 // stub that performs declarative checks prior to calling the real stub.
1641 // record if security needs to intercept this call (also depends on whether we plan to use stubs for declarative security)
1644 _ASSERTE((pStub != NULL) ^ (pCode != NULL));
1646 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1648 // We are seeing memory reordering race around fixups (see DDB 193514 and related bugs). We get into
1649 // situation where the patched precode is visible by other threads, but the resolved fixups
1650 // are not. IT SHOULD NEVER HAPPEN according to our current understanding of x86/x64 memory model.
1651 // (see email thread attached to the bug for details).
1653 // We suspect that there may be bug in the hardware or that hardware may have shortcuts that may be
1654 // causing grief. We will try to avoid the race by executing an extra memory barrier.
1659 // If we are counting calls for tiered compilation, leave the prestub
1660 // in place so that we can continue intercepting method invocations.
1661 // When the TieredCompilationManager has received enough call notifications
1662 // for this method only then do we back-patch it.
1663 #ifdef FEATURE_TIERED_COMPILATION
1664 if (pCode && IsEligibleForTieredCompilation())
1666 CallCounter * pCallCounter = GetAppDomain()->GetCallCounter();
1667 BOOL doBackPatch = pCallCounter->OnMethodCalled(this);
1678 GetPrecode()->SetTargetInterlocked(pCode);
1680 if (!HasStableEntryPoint())
1682 // Is the result an interpreter stub?
1683 #ifdef FEATURE_INTERPRETER
1684 if (Interpreter::InterpretationStubToMethodInfo(pCode) == this)
1686 SetEntryPointInterlocked(pCode);
1689 #endif // FEATURE_INTERPRETER
1691 ReJitPublishMethodHolder publishWorker(this, pCode);
1692 SetStableEntryPointInterlocked(pCode);
1698 if (!GetOrCreatePrecode()->SetTargetInterlocked(pStub->GetEntryPoint()))
1703 if (pStub->HasExternalEntryPoint())
1705 // If the Stub wraps code that is outside of the Stub allocation, then we
1706 // need to free the Stub allocation now.
1711 #ifdef FEATURE_INTERPRETER
1712 _ASSERTE(!IsReallyPointingToPrestub());
1713 #else // FEATURE_INTERPRETER
1714 _ASSERTE(!IsPointingToPrestub());
1715 _ASSERTE(HasStableEntryPoint());
1716 #endif // FEATURE_INTERPRETER
1718 if (fReportCompilationFinished)
1719 DACNotifyCompilationFinished(this);
1721 RETURN DoBackpatch(pMT, pDispatchingMT, FALSE);
1724 #endif // !DACCESS_COMPILE
1726 //==========================================================================
1727 // The following code manages the PreStub. All method stubs initially
1729 //==========================================================================
1731 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1732 static PCODE g_UMThunkPreStub;
1733 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1735 #ifndef DACCESS_COMPILE
1737 void ThePreStubManager::Init(void)
1739 STANDARD_VM_CONTRACT;
1742 // Add the prestub manager
1745 StubManager::AddStubManager(new ThePreStubManager());
1748 //-----------------------------------------------------------
1749 // Initialize the prestub.
1750 //-----------------------------------------------------------
1751 void InitPreStubManager(void)
1753 STANDARD_VM_CONTRACT;
1755 if (NingenEnabled())
1760 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1761 g_UMThunkPreStub = GenerateUMThunkPrestub()->GetEntryPoint();
1762 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1764 ThePreStubManager::Init();
1767 PCODE TheUMThunkPreStub()
1769 LIMITED_METHOD_CONTRACT;
1771 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1772 return g_UMThunkPreStub;
1773 #else // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1774 return GetEEFuncEntryPoint(TheUMEntryPrestub);
1775 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1778 PCODE TheVarargNDirectStub(BOOL hasRetBuffArg)
1780 LIMITED_METHOD_CONTRACT;
1782 #if !defined(_TARGET_X86_)
1785 return GetEEFuncEntryPoint(VarargPInvokeStub_RetBuffArg);
1790 return GetEEFuncEntryPoint(VarargPInvokeStub);
1794 static PCODE PatchNonVirtualExternalMethod(MethodDesc * pMD, PCODE pCode, PTR_CORCOMPILE_IMPORT_SECTION pImportSection, TADDR pIndirection)
1796 STANDARD_VM_CONTRACT;
1799 // Skip fixup precode jump for better perf. Since we have MethodDesc available, we can use cheaper method
1800 // than code:Precode::TryToSkipFixupPrecode.
1802 #ifdef HAS_FIXUP_PRECODE
1803 if (pMD->HasPrecode() && pMD->GetPrecode()->GetType() == PRECODE_FIXUP
1804 && pMD->IsNativeCodeStableAfterInit()
1805 #ifndef HAS_REMOTING_PRECODE
1806 && !pMD->IsRemotingInterceptedViaPrestub()
1810 PCODE pDirectTarget = pMD->IsFCall() ? ECall::GetFCallImpl(pMD) : pMD->GetNativeCode();
1811 if (pDirectTarget != NULL)
1812 pCode = pDirectTarget;
1814 #endif //HAS_FIXUP_PRECODE
1816 if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
1818 CORCOMPILE_EXTERNAL_METHOD_THUNK * pThunk = (CORCOMPILE_EXTERNAL_METHOD_THUNK *)pIndirection;
1820 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1821 INT64 oldValue = *(INT64*)pThunk;
1822 BYTE* pOldValue = (BYTE*)&oldValue;
1824 if (pOldValue[0] == X86_INSTR_CALL_REL32)
1826 INT64 newValue = oldValue;
1827 BYTE* pNewValue = (BYTE*)&newValue;
1828 pNewValue[0] = X86_INSTR_JMP_REL32;
1830 *(INT32 *)(pNewValue+1) = rel32UsingJumpStub((INT32*)(&pThunk->callJmp[1]), pCode, pMD, NULL);
1832 _ASSERTE(IS_ALIGNED((size_t)pThunk, sizeof(INT64)));
1833 EnsureWritableExecutablePages(pThunk, sizeof(INT64));
1834 FastInterlockCompareExchangeLong((INT64*)pThunk, newValue, oldValue);
1836 FlushInstructionCache(GetCurrentProcess(), pThunk, 8);
1838 #elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
1839 // Patchup the thunk to point to the actual implementation of the cross module external method
1840 EnsureWritableExecutablePages(&pThunk->m_pTarget);
1841 pThunk->m_pTarget = pCode;
1843 #if defined(_TARGET_ARM_)
1844 // ThumbBit must be set on the target address
1845 _ASSERTE(pCode & THUMB_CODE);
1848 PORTABILITY_ASSERT("ExternalMethodFixupWorker");
1853 *EnsureWritableExecutablePages((TADDR *)pIndirection) = pCode;
1859 //==========================================================================================
1860 // In NGen images calls to external methods start out pointing to jump thunks.
1861 // These jump thunks initially point to the assembly code _ExternalMethodFixupStub
1862 // It transfers control to ExternalMethodFixupWorker which will patch the jump
1863 // thunk to point to the actual cross module address for the method body
1864 // Some methods also have one-time prestubs we defer the patching until
1865 // we have the final stable method entry point.
1867 EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBlock, TADDR pIndirection, DWORD sectionIndex, Module * pModule)
1869 STATIC_CONTRACT_THROWS;
1870 STATIC_CONTRACT_GC_TRIGGERS;
1871 STATIC_CONTRACT_MODE_COOPERATIVE;
1872 STATIC_CONTRACT_ENTRY_POINT;
1874 // We must save (and restore) the Last Error code before we call anything
1875 // that could overwrite it. Any callsite that leads to TlsGetValue will
1876 // potentially overwrite the Last Error code.
1879 // In Dev10 bug 837293 we were overwriting the Last Error code on the first
1880 // call to a PInvoke method. This occurred when we were running a
1881 // (precompiled) PInvoke IL stub implemented in the ngen image.
1883 // In this IL stub implementation we call the native method kernel32!GetFileAttributes,
1884 // and then we immediately try to save the Last Error code by calling the
1885 // mscorlib method System.StubHelpers.StubHelpers.SetLastError().
1887 // However when we are coming from a precompiled IL Stub in an ngen image
1888 // we must use an ExternalMethodFixup to find the target address of
1889 // System.StubHelpers.StubHelpers.SetLastError() and this was overwriting
1890 // the value of the Last Error before it could be retrieved and saved.
1895 BEGIN_PRESERVE_LAST_ERROR;
1897 MAKE_CURRENT_THREAD_AVAILABLE();
1900 Thread::ObjectRefFlush(CURRENT_THREAD);
1903 FrameWithCookie<ExternalMethodFrame> frame(pTransitionBlock);
1904 ExternalMethodFrame * pEMFrame = &frame;
1906 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1907 // Decode indirection cell from callsite if it is not present
1908 if (pIndirection == NULL)
1910 // Asssume that the callsite is call [xxxxxxxx]
1911 PCODE retAddr = pEMFrame->GetReturnAddress();
1913 pIndirection = *(((TADDR *)retAddr) - 1);
1915 pIndirection = *(((INT32 *)retAddr) - 1) + retAddr;
1920 // FUTURE: Consider always passing in module and section index to avoid the lookups
1921 if (pModule == NULL)
1923 pModule = ExecutionManager::FindZapModule(pIndirection);
1924 sectionIndex = (DWORD)-1;
1926 _ASSERTE(pModule != NULL);
1928 pEMFrame->SetCallSite(pModule, pIndirection);
1930 pEMFrame->Push(CURRENT_THREAD); // Push the new ExternalMethodFrame onto the frame stack
1932 INSTALL_MANAGED_EXCEPTION_DISPATCHER;
1933 INSTALL_UNWIND_AND_CONTINUE_HANDLER;
1935 bool fVirtual = false;
1936 MethodDesc * pMD = NULL;
1937 MethodTable * pMT = NULL;
1941 GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
1943 PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage();
1945 RVA rva = pNativeImage->GetDataRva(pIndirection);
1947 PTR_CORCOMPILE_IMPORT_SECTION pImportSection;
1948 if (sectionIndex != (DWORD)-1)
1950 pImportSection = pModule->GetImportSectionFromIndex(sectionIndex);
1951 _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva));
1955 pImportSection = pModule->GetImportSectionForRVA(rva);
1957 _ASSERTE(pImportSection != NULL);
1960 if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
1962 _ASSERTE(pImportSection->EntrySize == sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK));
1963 index = (rva - pImportSection->Section.VirtualAddress) / sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK);
1967 _ASSERTE(pImportSection->EntrySize == sizeof(TADDR));
1968 index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR);
1971 PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures));
1973 PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]);
1975 BYTE kind = *pBlob++;
1977 Module * pInfoModule = pModule;
1978 if (kind & ENCODE_MODULE_OVERRIDE)
1980 DWORD moduleIndex = CorSigUncompressData(pBlob);
1981 pInfoModule = pModule->GetModuleFromIndex(moduleIndex);
1982 kind &= ~ENCODE_MODULE_OVERRIDE;
1988 case ENCODE_METHOD_ENTRY:
1990 pMD = ZapSig::DecodeMethod(pModule,
1994 if (pModule->IsReadyToRun())
1996 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
1997 pMD->EnsureActive();
2003 case ENCODE_METHOD_ENTRY_DEF_TOKEN:
2005 mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef);
2006 pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE);
2008 pMD->PrepareForUseAsADependencyOfANativeImage();
2010 if (pModule->IsReadyToRun())
2012 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2013 pMD->EnsureActive();
2019 case ENCODE_METHOD_ENTRY_REF_TOKEN:
2021 SigTypeContext typeContext;
2022 mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef);
2023 FieldDesc * pFD = NULL;
2025 MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th);
2026 _ASSERTE(pMD != NULL);
2028 pMD->PrepareForUseAsADependencyOfANativeImage();
2030 if (pModule->IsReadyToRun())
2032 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2033 pMD->EnsureActive();
2037 #ifdef FEATURE_WINMD_RESILIENT
2038 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2039 pMD->EnsureActive();
2046 case ENCODE_VIRTUAL_ENTRY:
2048 pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th);
2051 pMD->PrepareForUseAsADependencyOfANativeImage();
2053 if (pMD->IsVtableMethod())
2055 slot = pMD->GetSlot();
2056 pMT = th.IsNull() ? pMD->GetMethodTable() : th.GetMethodTable();
2061 if (pModule->IsReadyToRun())
2063 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2064 pMD->EnsureActive();
2069 case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2071 mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef);
2072 pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE);
2077 case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2079 mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef);
2081 FieldDesc * pFD = NULL;
2083 SigTypeContext typeContext;
2084 MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th, TRUE /* actual type required */);
2085 _ASSERTE(pMD != NULL);
2090 case ENCODE_VIRTUAL_ENTRY_SLOT:
2092 slot = CorSigUncompressData(pBlob);
2093 pMT = ZapSig::DecodeType(pModule, pInfoModule, pBlob).GetMethodTable();
2100 _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND");
2101 ThrowHR(COR_E_BADIMAGEFORMAT);
2106 GCX_COOP_THREAD_EXISTS(CURRENT_THREAD);
2108 // Get the stub manager for this module
2109 VirtualCallStubManager *pMgr = pModule->GetLoaderAllocator()->GetVirtualCallStubManager();
2111 DispatchToken token;
2112 if (pMT->IsInterface())
2113 token = pMT->GetLoaderAllocator()->GetDispatchToken(pMT->GetTypeID(), slot);
2115 token = DispatchToken::CreateDispatchToken(slot);
2117 OBJECTREF *protectedObj = pEMFrame->GetThisPtr();
2118 _ASSERTE(protectedObj != NULL);
2119 if (*protectedObj == NULL) {
2120 COMPlusThrow(kNullReferenceException);
2123 StubCallSite callSite(pIndirection, pEMFrame->GetReturnAddress());
2124 pCode = pMgr->ResolveWorker(&callSite, protectedObj, token, VirtualCallStubManager::SK_LOOKUP);
2125 _ASSERTE(pCode != NULL);
2129 _ASSERTE(pMD != NULL);
2132 // Switch to cooperative mode to avoid racing with GC stackwalk
2133 GCX_COOP_THREAD_EXISTS(CURRENT_THREAD);
2134 pEMFrame->SetFunction(pMD);
2137 pCode = pMD->GetMethodEntryPoint();
2140 // Note that we do not want to call code:MethodDesc::IsPointingToPrestub() here. It does not take remoting interception
2141 // into account and so it would cause otherwise intercepted methods to be JITed. It is a compat issue if the JITing fails.
2143 if (!DoesSlotCallPrestub(pCode))
2145 pCode = PatchNonVirtualExternalMethod(pMD, pCode, pImportSection, pIndirection);
2149 #if defined (FEATURE_JIT_PITCHING)
2150 DeleteFromPitchingCandidate(pMD);
2154 // Force a GC on every jit if the stress level is high enough
2155 GCStress<cfg_any>::MaybeTrigger();
2159 UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
2160 UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
2162 pEMFrame->Pop(CURRENT_THREAD); // Pop the ExternalMethodFrame from the frame stack
2164 END_PRESERVE_LAST_ERROR;
2170 #if !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_)
2172 //==========================================================================================
2173 // In NGen image, virtual slots inherited from cross-module dependencies point to jump thunks.
2174 // These jump thunk initially point to VirtualMethodFixupStub which transfers control here.
2175 // This method 'VirtualMethodFixupWorker' will patch the jump thunk to point to the actual
2176 // inherited method body after we have execute the precode and a stable entry point.
2178 EXTERN_C PCODE VirtualMethodFixupWorker(Object * pThisPtr, CORCOMPILE_VIRTUAL_IMPORT_THUNK *pThunk)
2189 _ASSERTE(pThisPtr != NULL);
2190 VALIDATEOBJECT(pThisPtr);
2192 MethodTable * pMT = pThisPtr->GetTrueMethodTable();
2194 WORD slotNumber = pThunk->slotNum;
2195 _ASSERTE(slotNumber != (WORD)-1);
2197 PCODE pCode = pMT->GetRestoredSlot(slotNumber);
2199 if (!DoesSlotCallPrestub(pCode))
2201 // Skip fixup precode jump for better perf
2202 PCODE pDirectTarget = Precode::TryToSkipFixupPrecode(pCode);
2203 if (pDirectTarget != NULL)
2204 pCode = pDirectTarget;
2206 // Patch the thunk to the actual method body
2207 if (EnsureWritableExecutablePagesNoThrow(&pThunk->m_pTarget, sizeof(pThunk->m_pTarget)))
2208 pThunk->m_pTarget = pCode;
2210 #if defined(_TARGET_ARM_)
2211 // The target address should have the thumb bit set
2212 _ASSERTE(pCode & THUMB_CODE);
2216 #endif // !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_)
2218 #ifdef FEATURE_READYTORUN
2220 static PCODE getHelperForInitializedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD)
2222 STANDARD_VM_CONTRACT;
2224 PCODE pHelper = NULL;
2228 case ENCODE_STATIC_BASE_NONGC_HELPER:
2233 baseNonGC = pMT->GetNonGCStaticsBasePointer();
2235 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseNonGC);
2238 case ENCODE_STATIC_BASE_GC_HELPER:
2243 baseGC = pMT->GetGCStaticsBasePointer();
2245 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseGC);
2248 case ENCODE_CCTOR_TRIGGER:
2249 pHelper = DynamicHelpers::CreateReturn(pModule->GetLoaderAllocator());
2251 case ENCODE_FIELD_ADDRESS:
2253 _ASSERTE(pFD->IsStatic());
2261 if (!pFD->IsRVA()) // for RVA the base is ignored
2262 base = pFD->GetBase();
2263 pAddress = pFD->GetStaticAddressHandle((void *)dac_cast<TADDR>(base));
2266 // The following code assumes that the statics are pinned that is not the case for collectible types
2267 _ASSERTE(!pFD->GetEnclosingMethodTable()->Collectible());
2269 // Unbox valuetype fields
2270 if (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE && !pFD->IsRVA())
2271 pHelper = DynamicHelpers::CreateReturnIndirConst(pModule->GetLoaderAllocator(), (TADDR)pAddress, (INT8)Object::GetOffsetOfFirstField());
2273 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)pAddress);
2277 _ASSERTE(!"Unexpected statics CORCOMPILE_FIXUP_BLOB_KIND");
2278 ThrowHR(COR_E_BADIMAGEFORMAT);
2284 static PCODE getHelperForSharedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD)
2286 STANDARD_VM_CONTRACT;
2288 _ASSERTE(kind == ENCODE_FIELD_ADDRESS);
2290 CorInfoHelpFunc helpFunc = CEEInfo::getSharedStaticsHelper(pFD, pMT);
2292 TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID();
2295 if (helpFunc != CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR && helpFunc != CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR)
2297 if (pMT->IsDynamicStatics())
2299 classID = pMT->GetModuleDynamicEntryID();
2303 classID = pMT->GetClassIndex();
2307 bool fUnbox = (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE);
2309 AllocMemTracker amTracker;
2311 StaticFieldAddressArgs * pArgs = (StaticFieldAddressArgs *)amTracker.Track(
2312 pModule->GetLoaderAllocator()->GetHighFrequencyHeap()->
2313 AllocMem(S_SIZE_T(sizeof(StaticFieldAddressArgs))));
2315 pArgs->staticBaseHelper = (FnStaticBaseHelper)CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc);
2316 pArgs->arg0 = moduleID;
2317 pArgs->arg1 = classID;
2318 pArgs->offset = pFD->GetOffset();
2320 PCODE pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), (TADDR)pArgs,
2321 fUnbox ? GetEEFuncEntryPoint(JIT_StaticFieldAddressUnbox_Dynamic) : GetEEFuncEntryPoint(JIT_StaticFieldAddress_Dynamic));
2323 amTracker.SuppressRelease();
2328 static PCODE getHelperForStaticBase(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT)
2330 STANDARD_VM_CONTRACT;
2332 int helpFunc = CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE;
2334 if (kind == ENCODE_STATIC_BASE_GC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER)
2336 helpFunc = CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2339 if (pMT->IsDynamicStatics())
2341 const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2345 if (!pMT->HasClassConstructor() && !pMT->HasBoxedRegularStatics())
2347 const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2351 if (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER)
2353 const int delta = CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2358 if (helpFunc == CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR || helpFunc == CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR)
2360 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), pMT->GetModule()->GetModuleID(), CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc));
2364 TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID();
2367 if (pMT->IsDynamicStatics())
2369 classID = pMT->GetModuleDynamicEntryID();
2373 classID = pMT->GetClassIndex();
2376 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), moduleID, classID, CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc));
2382 TADDR GetFirstArgumentRegisterValuePtr(TransitionBlock * pTransitionBlock)
2384 TADDR pArgument = (TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters();
2386 // x86 is special as always
2387 pArgument += offsetof(ArgumentRegisters, ECX);
2393 void ProcessDynamicDictionaryLookup(TransitionBlock * pTransitionBlock,
2395 Module * pInfoModule,
2397 PCCOR_SIGNATURE pBlob,
2398 PCCOR_SIGNATURE pBlobStart,
2399 CORINFO_RUNTIME_LOOKUP * pResult,
2400 DWORD * pDictionaryIndexAndSlot)
2402 STANDARD_VM_CONTRACT;
2404 TADDR genericContextPtr = *(TADDR*)GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2406 pResult->testForFixup = pResult->testForNull = false;
2407 pResult->signature = NULL;
2409 pResult->indirectFirstOffset = 0;
2411 pResult->indirections = CORINFO_USEHELPER;
2413 DWORD numGenericArgs = 0;
2414 MethodTable* pContextMT = NULL;
2415 MethodDesc* pContextMD = NULL;
2417 if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)
2419 pContextMD = (MethodDesc*)genericContextPtr;
2420 numGenericArgs = pContextMD->GetNumGenericMethodArgs();
2421 pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_METHOD;
2425 pContextMT = (MethodTable*)genericContextPtr;
2427 if (kind == ENCODE_DICTIONARY_LOOKUP_THISOBJ)
2429 TypeHandle contextTypeHandle = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2431 SigPointer p(pBlob);
2435 pContextMT = pContextMT->GetMethodTableMatchingParentClass(contextTypeHandle.AsMethodTable());
2438 numGenericArgs = pContextMT->GetNumGenericArgs();
2439 pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_CLASS;
2442 _ASSERTE(numGenericArgs > 0);
2444 CORCOMPILE_FIXUP_BLOB_KIND signatureKind = (CORCOMPILE_FIXUP_BLOB_KIND)CorSigUncompressData(pBlob);
2447 // Optimization cases
2449 if (signatureKind == ENCODE_TYPE_HANDLE)
2451 SigPointer sigptr(pBlob, -1);
2453 CorElementType type;
2454 IfFailThrow(sigptr.GetElemType(&type));
2456 if ((type == ELEMENT_TYPE_MVAR) && (kind == ENCODE_DICTIONARY_LOOKUP_METHOD))
2458 pResult->indirections = 2;
2459 pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo);
2461 if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative)
2463 pResult->indirectFirstOffset = 1;
2467 IfFailThrow(sigptr.GetData(&data));
2468 pResult->offsets[1] = sizeof(TypeHandle) * data;
2472 else if ((type == ELEMENT_TYPE_VAR) && (kind != ENCODE_DICTIONARY_LOOKUP_METHOD))
2474 pResult->indirections = 3;
2475 pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo();
2476 pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1);
2479 IfFailThrow(sigptr.GetData(&data));
2480 pResult->offsets[2] = sizeof(TypeHandle) * data;
2486 if (pContextMT != NULL && pContextMT->GetNumDicts() > 0xFFFF)
2487 ThrowHR(COR_E_BADIMAGEFORMAT);
2489 // Dictionary index and slot number are encoded in a 32-bit DWORD. The higher 16 bits
2490 // are used for the dictionary index, and the lower 16 bits for the slot number.
2491 *pDictionaryIndexAndSlot = (pContextMT == NULL ? 0 : pContextMT->GetNumDicts() - 1);
2492 *pDictionaryIndexAndSlot <<= 16;
2494 WORD dictionarySlot;
2496 if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)
2498 if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMD->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 1, FromReadyToRunImage, &dictionarySlot))
2500 pResult->testForNull = 1;
2502 // Indirect through dictionary table pointer in InstantiatedMethodDesc
2503 pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo);
2505 if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative)
2507 pResult->indirectFirstOffset = 1;
2510 *pDictionaryIndexAndSlot |= dictionarySlot;
2514 // It's a class dictionary lookup (CORINFO_LOOKUP_CLASSPARAM or CORINFO_LOOKUP_THISOBJ)
2517 if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMT->GetClass()->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 2, FromReadyToRunImage, &dictionarySlot))
2519 pResult->testForNull = 1;
2521 // Indirect through dictionary table pointer in vtable
2522 pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo();
2524 // Next indirect through the dictionary appropriate to this instantiated type
2525 pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1);
2527 *pDictionaryIndexAndSlot |= dictionarySlot;
2532 PCODE DynamicHelperFixup(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND * pKind, TypeHandle * pTH, MethodDesc ** ppMD, FieldDesc ** ppFD)
2534 STANDARD_VM_CONTRACT;
2536 PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage();
2538 RVA rva = pNativeImage->GetDataRva((TADDR)pCell);
2540 PTR_CORCOMPILE_IMPORT_SECTION pImportSection = pModule->GetImportSectionFromIndex(sectionIndex);
2541 _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva));
2543 _ASSERTE(pImportSection->EntrySize == sizeof(TADDR));
2545 COUNT_T index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR);
2547 PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures));
2549 PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]);
2550 PCCOR_SIGNATURE pBlobStart = pBlob;
2552 BYTE kind = *pBlob++;
2554 Module * pInfoModule = pModule;
2555 if (kind & ENCODE_MODULE_OVERRIDE)
2557 DWORD moduleIndex = CorSigUncompressData(pBlob);
2558 pInfoModule = pModule->GetModuleFromIndex(moduleIndex);
2559 kind &= ~ENCODE_MODULE_OVERRIDE;
2562 bool fReliable = false;
2564 MethodDesc * pMD = NULL;
2565 FieldDesc * pFD = NULL;
2566 CORINFO_RUNTIME_LOOKUP genericLookup;
2567 DWORD dictionaryIndexAndSlot = -1;
2571 case ENCODE_NEW_HELPER:
2572 th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2573 th.AsMethodTable()->EnsureInstanceActive();
2575 case ENCODE_ISINSTANCEOF_HELPER:
2576 case ENCODE_CHKCAST_HELPER:
2578 case ENCODE_NEW_ARRAY_HELPER:
2579 th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2582 case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2583 case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2584 case ENCODE_STATIC_BASE_NONGC_HELPER:
2585 case ENCODE_STATIC_BASE_GC_HELPER:
2586 case ENCODE_CCTOR_TRIGGER:
2587 th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2589 th.AsMethodTable()->EnsureInstanceActive();
2590 th.AsMethodTable()->CheckRunClassInitThrowing();
2594 case ENCODE_FIELD_ADDRESS:
2595 pFD = ZapSig::DecodeField(pModule, pInfoModule, pBlob, &th);
2596 _ASSERTE(pFD->IsStatic());
2599 case ENCODE_VIRTUAL_ENTRY:
2600 // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2601 // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2602 // case ENCODE_VIRTUAL_ENTRY_SLOT:
2604 case ENCODE_DELEGATE_CTOR:
2606 pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th);
2607 if (pMD->RequiresInstArg())
2609 pMD = MethodDesc::FindOrCreateAssociatedMethodDesc(pMD,
2611 FALSE /* forceBoxedEntryPoint */,
2612 pMD->GetMethodInstantiation(),
2613 FALSE /* allowInstParam */);
2615 pMD->EnsureActive();
2619 case ENCODE_DICTIONARY_LOOKUP_THISOBJ:
2620 case ENCODE_DICTIONARY_LOOKUP_TYPE:
2621 case ENCODE_DICTIONARY_LOOKUP_METHOD:
2622 ProcessDynamicDictionaryLookup(pTransitionBlock, pModule, pInfoModule, kind, pBlob, pBlobStart, &genericLookup, &dictionaryIndexAndSlot);
2626 _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND");
2627 ThrowHR(COR_E_BADIMAGEFORMAT);
2630 PCODE pHelper = NULL;
2634 // For reliable helpers, exceptions in creating the optimized helper are non-fatal. Swallow them to make CER work well.
2639 case ENCODE_ISINSTANCEOF_HELPER:
2640 case ENCODE_CHKCAST_HELPER:
2642 bool fClassMustBeRestored;
2643 CorInfoHelpFunc helpFunc = CEEInfo::getCastingHelperStatic(th, /* throwing */ (kind == ENCODE_CHKCAST_HELPER), &fClassMustBeRestored);
2644 pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc));
2647 case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2648 case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2649 case ENCODE_STATIC_BASE_NONGC_HELPER:
2650 case ENCODE_STATIC_BASE_GC_HELPER:
2651 case ENCODE_CCTOR_TRIGGER:
2652 case ENCODE_FIELD_ADDRESS:
2654 MethodTable * pMT = th.AsMethodTable();
2656 bool fNeedsNonTrivialHelper = false;
2658 if (pMT->IsDomainNeutral() && !IsSingleAppDomain())
2660 fNeedsNonTrivialHelper = true;
2663 if (pMT->Collectible() && (kind != ENCODE_CCTOR_TRIGGER))
2665 // Collectible statics are not pinned - the fast getters expect statics to be pinned
2666 fNeedsNonTrivialHelper = true;
2672 fNeedsNonTrivialHelper = !!pFD->IsSpecialStatic();
2676 fNeedsNonTrivialHelper = (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER) || (kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER);
2680 if (fNeedsNonTrivialHelper)
2684 if (pFD->IsRVA() || pFD->IsContextStatic())
2686 _ASSERTE(!"Fast getter for rare kinds of static fields");
2690 pHelper = getHelperForSharedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD);
2695 pHelper = getHelperForStaticBase(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT);
2700 // Delay the creation of the helper until the type is initialized
2701 if (pMT->IsClassInited())
2702 pHelper = getHelperForInitializedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD);
2707 case ENCODE_VIRTUAL_ENTRY:
2708 // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2709 // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2710 // case ENCODE_VIRTUAL_ENTRY_SLOT:
2712 if (!pMD->IsVtableMethod())
2714 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode());
2718 AllocMemTracker amTracker;
2720 VirtualFunctionPointerArgs * pArgs = (VirtualFunctionPointerArgs *)amTracker.Track(
2721 pModule->GetLoaderAllocator()->GetHighFrequencyHeap()->
2722 AllocMem(S_SIZE_T(sizeof(VirtualFunctionPointerArgs))));
2724 pArgs->classHnd = (CORINFO_CLASS_HANDLE)th.AsPtr();
2725 pArgs->methodHnd = (CORINFO_METHOD_HANDLE)pMD;
2727 pHelper = DynamicHelpers::CreateHelperWithArg(pModule->GetLoaderAllocator(), (TADDR)pArgs,
2728 GetEEFuncEntryPoint(JIT_VirtualFunctionPointer_Dynamic));
2730 amTracker.SuppressRelease();
2739 if (pHelper != NULL)
2741 *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper;
2745 // Always execute the reliable fallback in debug builds
2752 EX_END_CATCH (SwallowAllExceptions);
2758 case ENCODE_NEW_HELPER:
2760 CorInfoHelpFunc helpFunc = CEEInfo::getNewHelperStatic(th.AsMethodTable());
2761 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc));
2764 case ENCODE_NEW_ARRAY_HELPER:
2766 CorInfoHelpFunc helpFunc = CEEInfo::getNewArrHelperStatic(th);
2767 ArrayTypeDesc *pArrayTypeDesc = th.AsArray();
2768 MethodTable *pArrayMT = pArrayTypeDesc->GetTemplateMethodTable();
2769 pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), dac_cast<TADDR>(pArrayMT), CEEJitInfo::getHelperFtnStatic(helpFunc));
2773 case ENCODE_DELEGATE_CTOR:
2775 MethodTable * pDelegateType = NULL;
2780 TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2782 if (pArgument != NULL)
2784 pDelegateType = (*(Object **)pArgument)->GetMethodTable();
2785 _ASSERTE(pDelegateType->IsDelegate());
2789 DelegateCtorArgs ctorData;
2790 ctorData.pMethod = NULL;
2791 ctorData.pArg3 = NULL;
2792 ctorData.pArg4 = NULL;
2793 ctorData.pArg5 = NULL;
2795 MethodDesc * pDelegateCtor = NULL;
2797 if (pDelegateType != NULL)
2799 pDelegateCtor = COMDelegate::GetDelegateCtor(TypeHandle(pDelegateType), pMD, &ctorData);
2801 if (ctorData.pArg4 != NULL || ctorData.pArg5 != NULL)
2803 // This should never happen - we should never get collectible or secure delegates here
2805 pDelegateCtor = NULL;
2809 TADDR target = NULL;
2811 if (pDelegateCtor != NULL)
2813 target = pDelegateCtor->GetMultiCallableAddrOfCode();
2817 target = ECall::GetFCallImpl(MscorlibBinder::GetMethod(METHOD__DELEGATE__CONSTRUCT_DELEGATE));
2818 ctorData.pArg3 = NULL;
2821 if (ctorData.pArg3 != NULL)
2823 pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), (TADDR)ctorData.pArg3, target);
2827 pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), target);
2832 case ENCODE_DICTIONARY_LOOKUP_THISOBJ:
2833 case ENCODE_DICTIONARY_LOOKUP_TYPE:
2834 case ENCODE_DICTIONARY_LOOKUP_METHOD:
2836 pHelper = DynamicHelpers::CreateDictionaryLookupHelper(pModule->GetLoaderAllocator(), &genericLookup, dictionaryIndexAndSlot, pModule);
2844 if (pHelper != NULL)
2846 *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper;
2850 *pKind = (CORCOMPILE_FIXUP_BLOB_KIND)kind;
2858 extern "C" SIZE_T STDCALL DynamicHelperWorker(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, INT frameFlags)
2860 PCODE pHelper = NULL;
2861 SIZE_T result = NULL;
2863 STATIC_CONTRACT_THROWS;
2864 STATIC_CONTRACT_GC_TRIGGERS;
2865 STATIC_CONTRACT_MODE_COOPERATIVE;
2867 MAKE_CURRENT_THREAD_AVAILABLE();
2870 Thread::ObjectRefFlush(CURRENT_THREAD);
2873 FrameWithCookie<DynamicHelperFrame> frame(pTransitionBlock, frameFlags);
2874 DynamicHelperFrame * pFrame = &frame;
2876 pFrame->Push(CURRENT_THREAD);
2878 INSTALL_MANAGED_EXCEPTION_DISPATCHER;
2879 INSTALL_UNWIND_AND_CONTINUE_HANDLER;
2881 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
2882 // Decode indirection cell from callsite if it is not present
2885 // Asssume that the callsite is call [xxxxxxxx]
2886 PCODE retAddr = pFrame->GetReturnAddress();
2888 pCell = *(((TADDR **)retAddr) - 1);
2890 pCell = (TADDR *)(*(((INT32 *)retAddr) - 1) + retAddr);
2894 _ASSERTE(pCell != NULL);
2897 MethodDesc * pMD = NULL;
2898 FieldDesc * pFD = NULL;
2899 CORCOMPILE_FIXUP_BLOB_KIND kind = ENCODE_NONE;
2902 GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
2904 pHelper = DynamicHelperFixup(pTransitionBlock, pCell, sectionIndex, pModule, &kind, &th, &pMD, &pFD);
2907 if (pHelper == NULL)
2909 TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2913 case ENCODE_ISINSTANCEOF_HELPER:
2914 case ENCODE_CHKCAST_HELPER:
2916 BOOL throwInvalidCast = (kind == ENCODE_CHKCAST_HELPER);
2917 if (*(Object **)pArgument == NULL || ObjIsInstanceOf(*(Object **)pArgument, th, throwInvalidCast))
2919 result = (SIZE_T)(*(Object **)pArgument);
2923 _ASSERTE (!throwInvalidCast);
2928 case ENCODE_STATIC_BASE_NONGC_HELPER:
2929 result = (SIZE_T)th.AsMethodTable()->GetNonGCStaticsBasePointer();
2931 case ENCODE_STATIC_BASE_GC_HELPER:
2932 result = (SIZE_T)th.AsMethodTable()->GetGCStaticsBasePointer();
2934 case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2935 ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable());
2936 result = (SIZE_T)th.AsMethodTable()->GetNonGCThreadStaticsBasePointer();
2938 case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2939 ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable());
2940 result = (SIZE_T)th.AsMethodTable()->GetGCThreadStaticsBasePointer();
2942 case ENCODE_CCTOR_TRIGGER:
2944 case ENCODE_FIELD_ADDRESS:
2945 result = (SIZE_T)pFD->GetCurrentStaticAddress();
2947 case ENCODE_VIRTUAL_ENTRY:
2948 // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2949 // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2950 // case ENCODE_VIRTUAL_ENTRY_SLOT:
2952 OBJECTREF objRef = ObjectToOBJECTREF(*(Object **)pArgument);
2954 GCPROTECT_BEGIN(objRef);
2957 COMPlusThrow(kNullReferenceException);
2959 // Duplicated logic from JIT_VirtualFunctionPointer_Framed
2960 if (!pMD->IsVtableMethod())
2962 result = pMD->GetMultiCallableAddrOfCode();
2966 result = pMD->GetMultiCallableAddrOfVirtualizedCode(&objRef, th);
2977 UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
2978 UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
2980 pFrame->Pop(CURRENT_THREAD);
2982 if (pHelper == NULL)
2983 *(SIZE_T *)((TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters()) = result;
2987 #endif // FEATURE_READYTORUN
2989 #endif // !DACCESS_COMPILE