1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4 // ===========================================================================
8 // ===========================================================================
9 // This file contains the implementation for creating and using prestubs
10 // ===========================================================================
17 #include "dllimport.h"
18 #include "comdelegate.h"
19 #include "dbginterface.h"
21 #include "eventtrace.h"
25 #include "virtualcallstub.h"
31 #ifdef FEATURE_INTERPRETER
32 #include "interpreter.h"
35 #ifdef FEATURE_COMINTEROP
36 #include "clrtocomcall.h"
39 #include "mdaassistants.h"
41 #ifdef FEATURE_STACK_SAMPLING
42 #include "stacksampler.h"
45 #ifdef FEATURE_PERFMAP
49 #ifdef FEATURE_TIERED_COMPILATION
50 #include "callcounter.h"
53 #if defined(FEATURE_GDBJIT)
55 #endif // FEATURE_GDBJIT
57 #ifndef DACCESS_COMPILE
59 #if defined(FEATURE_JIT_PITCHING)
60 EXTERN_C void CheckStacksAndPitch();
61 EXTERN_C void SavePitchingCandidate(MethodDesc* pMD, ULONG sizeOfCode);
62 EXTERN_C void DeleteFromPitchingCandidate(MethodDesc* pMD);
63 EXTERN_C void MarkMethodNotPitchingCandidate(MethodDesc* pMD);
66 EXTERN_C void STDCALL ThePreStubPatch();
68 #if defined(HAVE_GCCOVER)
69 CrstStatic MethodDesc::m_GCCoverCrst;
71 void MethodDesc::Init()
73 m_GCCoverCrst.Init(CrstGCCover);
78 //==========================================================================
80 PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BOOL fFullBackPatch)
85 PRECONDITION(!ContainsGenericVariables());
86 PRECONDITION(HasStableEntryPoint());
87 PRECONDITION(pMT == GetMethodTable());
90 PCODE pTarget = GetStableEntryPoint();
92 if (!HasTemporaryEntryPoint())
95 PCODE pExpected = GetTemporaryEntryPoint();
97 if (pExpected == pTarget)
100 // True interface methods are never backpatched
101 if (pMT->IsInterface() && !IsStatic())
106 FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
107 if (pFuncPtrStubs != NULL)
109 Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
110 if (pFuncPtrPrecode != NULL)
112 // If there is a funcptr precode to patch, we are done for this round.
113 if (pFuncPtrPrecode->SetTargetInterlocked(pTarget))
118 #ifndef HAS_COMPACT_ENTRYPOINTS
119 // Patch the fake entrypoint if necessary
120 Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget);
121 #endif // HAS_COMPACT_ENTRYPOINTS
124 if (HasNonVtableSlot())
127 BOOL fBackpatched = FALSE;
129 #define BACKPATCH(pPatchedMT) \
132 if (pPatchedMT->GetSlot(dwSlot) == pExpected) \
134 pPatchedMT->SetSlot(dwSlot, pTarget); \
135 fBackpatched = TRUE; \
140 // The owning slot has been updated already, so there is no need to backpatch it
141 _ASSERTE(pMT->GetSlot(GetSlot()) == pTarget);
143 if (pDispatchingMT != NULL && pDispatchingMT != pMT)
145 DWORD dwSlot = GetSlot();
147 BACKPATCH(pDispatchingMT);
152 // Backpatch the MethodTable that code:MethodTable::GetRestoredSlot() reads the value from.
153 // VSD reads the slot value using code:MethodTable::GetRestoredSlot(), and so we need to make sure
154 // that it returns the stable entrypoint eventually to avoid going through the slow path all the time.
156 MethodTable * pRestoredSlotMT = pDispatchingMT->GetRestoredSlotMT(dwSlot);
158 BACKPATCH(pRestoredSlotMT);
164 MethodImpl::Iterator it(this);
167 DWORD dwSlot = it.GetSlot();
171 if (pDispatchingMT != NULL)
173 BACKPATCH(pDispatchingMT);
180 if (fFullBackPatch && !fBackpatched && IsDuplicate())
182 // If this is a duplicate, let's scan the rest of the VTable hunting for other hits.
183 unsigned numSlots = pMT->GetNumVirtuals();
184 for (DWORD dwSlot=0; dwSlot<numSlots; dwSlot++)
188 if (pDispatchingMT != NULL)
190 BACKPATCH(pDispatchingMT);
200 // <TODO> FIX IN BETA 2
202 // g_pNotificationTable is only modified by the DAC and therefore the
203 // optmizer can assume that it will always be its default value and has
204 // been seen to (on IA64 free builds) eliminate the code in DACNotifyCompilationFinished
205 // such that DAC notifications are no longer sent.
207 // TODO: fix this in Beta 2
208 // the RIGHT fix is to make g_pNotificationTable volatile, but currently
209 // we don't have DAC macros to do that. Additionally, there are a number
210 // of other places we should look at DAC definitions to determine if they
211 // should be also declared volatile.
213 // for now we just turn off optimization for these guys
215 #pragma optimize("", off)
218 void DACNotifyCompilationFinished(MethodDesc *methodDesc, PCODE pCode)
229 // Is the list active?
230 JITNotifications jn(g_pNotificationTable);
233 // Get Module and mdToken
234 mdToken t = methodDesc->GetMemberDef();
235 Module *modulePtr = methodDesc->GetModule();
240 USHORT jnt = jn.Requested((TADDR) modulePtr, t);
241 if (jnt & CLRDATA_METHNOTIFY_GENERATED)
243 // If so, throw an exception!
244 DACNotify::DoJITNotification(methodDesc, (TADDR)pCode);
250 #pragma optimize("", on)
254 PCODE MethodDesc::PrepareInitialCode()
256 STANDARD_VM_CONTRACT;
257 PrepareCodeConfig config(NativeCodeVersion(this), TRUE, TRUE);
258 PCODE pCode = PrepareCode(&config);
260 #if defined(FEATURE_GDBJIT) && defined(FEATURE_PAL) && !defined(CROSSGEN_COMPILE)
261 NotifyGdb::MethodPrepared(this);
267 PCODE MethodDesc::PrepareCode(NativeCodeVersion codeVersion)
269 STANDARD_VM_CONTRACT;
271 #ifdef FEATURE_CODE_VERSIONING
272 if (codeVersion.IsDefaultVersion())
276 PrepareCodeConfig config(codeVersion, TRUE, TRUE);
277 return PrepareCode(&config);
278 #ifdef FEATURE_CODE_VERSIONING
282 // a bit slower path (+1 usec?)
283 VersionedPrepareCodeConfig config;
285 CodeVersionManager::TableLockHolder lock(GetCodeVersionManager());
286 config = VersionedPrepareCodeConfig(codeVersion);
288 config.FinishConfiguration();
289 return PrepareCode(&config);
295 PCODE MethodDesc::PrepareCode(PrepareCodeConfig* pConfig)
297 STANDARD_VM_CONTRACT;
299 // If other kinds of code need multi-versioning we could add more cases here,
300 // but for now generation of all other code/stubs occurs in other code paths
301 _ASSERTE(IsIL() || IsNoMetadata());
302 return PrepareILBasedCode(pConfig);
305 PCODE MethodDesc::PrepareILBasedCode(PrepareCodeConfig* pConfig)
307 STANDARD_VM_CONTRACT;
310 if (pConfig->MayUsePrecompiledCode())
312 pCode = GetPrecompiledCode(pConfig);
316 LOG((LF_CLASSLOADER, LL_INFO1000000,
317 " In PrepareILBasedCode, calling JitCompileCode\n"));
318 pCode = JitCompileCode(pConfig);
321 // Mark the code as hot in case the method ends up in the native image
322 g_IBCLogger.LogMethodCodeAccess(this);
327 PCODE MethodDesc::GetPrecompiledCode(PrepareCodeConfig* pConfig)
329 STANDARD_VM_CONTRACT;
332 #ifdef FEATURE_PREJIT
333 pCode = GetPrecompiledNgenCode(pConfig);
336 #ifdef FEATURE_READYTORUN
339 pCode = GetPrecompiledR2RCode(pConfig);
342 pConfig->SetNativeCode(pCode, &pCode);
345 #endif // FEATURE_READYTORUN
350 PCODE MethodDesc::GetPrecompiledNgenCode(PrepareCodeConfig* pConfig)
352 STANDARD_VM_CONTRACT;
355 #ifdef FEATURE_PREJIT
356 pCode = GetPreImplementedCode();
358 #ifdef PROFILING_SUPPORTED
360 // The pre-existing cache search callbacks aren't implemented as you might expect.
361 // Instead of sending a cache search started for all methods, we only send the notification
362 // when we already know a pre-compiled version of the method exists. In the NGEN case we also
363 // don't send callbacks unless the method triggers the prestub which excludes a lot of methods.
364 // From the profiler's perspective this technique is only reliable/predictable when using profiler
365 // instrumented NGEN images (that virtually no profilers use). As-is the callback only
366 // gives an opportunity for the profiler to say whether or not it wants to use the ngen'ed
369 // Despite those oddities I am leaving this behavior as-is during refactoring because trying to
370 // improve it probably offers little value vs. the potential for compat issues and creating more
371 // complexity reasoning how the API behavior changed across runtime releases.
374 BOOL fShouldSearchCache = TRUE;
376 BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches());
377 g_profControlBlock.pProfInterface->JITCachedFunctionSearchStarted((FunctionID)this, &fShouldSearchCache);
381 if (!fShouldSearchCache)
383 SetNativeCodeInterlocked(NULL, pCode);
384 _ASSERTE(!IsPreImplemented());
385 pConfig->SetProfilerRejectedPrecompiledCode();
389 #endif // PROFILING_SUPPORTED
393 LOG((LF_ZAP, LL_INFO10000,
394 "ZAP: Using code" FMT_ADDR "for %s.%s sig=\"%s\" (token %x).\n",
397 m_pszDebugMethodName,
398 m_pszDebugMethodSignature,
401 TADDR pFixupList = GetFixupList();
402 if (pFixupList != NULL)
404 Module *pZapModule = GetZapModule();
405 _ASSERTE(pZapModule != NULL);
406 if (!pZapModule->FixupDelayList(pFixupList))
408 _ASSERTE(!"FixupDelayList failed");
409 ThrowHR(COR_E_BADIMAGEFORMAT);
414 if (GCStress<cfg_instr_ngen>::IsEnabled())
415 SetupGcCoverage(this, (BYTE*)pCode);
416 #endif // HAVE_GCCOVER
418 #ifdef PROFILING_SUPPORTED
420 * This notifies the profiler that a search to find a
421 * cached jitted function has been made.
424 BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches());
425 g_profControlBlock.pProfInterface->
426 JITCachedFunctionSearchFinished((FunctionID)this, COR_PRF_CACHED_FUNCTION_FOUND);
429 #endif // PROFILING_SUPPORTED
432 #endif // FEATURE_PREJIT
438 PCODE MethodDesc::GetPrecompiledR2RCode(PrepareCodeConfig* pConfig)
440 STANDARD_VM_CONTRACT;
443 #ifdef FEATURE_READYTORUN
444 Module * pModule = GetModule();
445 if (pModule->IsReadyToRun())
447 pCode = pModule->GetReadyToRunInfo()->GetEntryPoint(this, pConfig, TRUE /* fFixups */);
453 PCODE MethodDesc::GetMulticoreJitCode()
455 STANDARD_VM_CONTRACT;
458 #ifdef FEATURE_MULTICOREJIT
459 // Quick check before calling expensive out of line function on this method's domain has code JITted by background thread
460 MulticoreJitManager & mcJitManager = GetAppDomain()->GetMulticoreJitManager();
461 if (mcJitManager.GetMulticoreJitCodeStorage().GetRemainingMethodCount() > 0)
463 if (MulticoreJitManager::IsMethodSupported(this))
465 pCode = mcJitManager.RequestMethodCode(this); // Query multi-core JIT manager for compiled code
472 COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyMetadataILHeader(PrepareCodeConfig* pConfig, COR_ILMETHOD_DECODER* pDecoderMemory)
474 STANDARD_VM_CONTRACT;
476 _ASSERTE(!IsNoMetadata());
478 COR_ILMETHOD_DECODER* pHeader = NULL;
479 COR_ILMETHOD* ilHeader = pConfig->GetILHeader();
480 if (ilHeader == NULL)
482 #ifdef FEATURE_COMINTEROP
483 // Abstract methods can be called through WinRT derivation if the deriving type
484 // is not implemented in managed code, and calls through the CCW to the abstract
485 // method. Throw a sensible exception in that case.
486 if (GetMethodTable()->IsExportedToWinRT() && IsAbstract())
488 COMPlusThrowHR(E_NOTIMPL);
490 #endif // FEATURE_COMINTEROP
492 COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL);
495 COR_ILMETHOD_DECODER::DecoderStatus status = COR_ILMETHOD_DECODER::FORMAT_ERROR;
497 // Decoder ctor can AV on a malformed method header
498 AVInRuntimeImplOkayHolder AVOkay;
499 pHeader = new (pDecoderMemory) COR_ILMETHOD_DECODER(ilHeader, GetMDImport(), &status);
502 if (status == COR_ILMETHOD_DECODER::FORMAT_ERROR)
504 COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL);
510 COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyNoMetadataILHeader()
512 STANDARD_VM_CONTRACT;
516 ILStubResolver* pResolver = AsDynamicMethodDesc()->GetILStubResolver();
517 return pResolver->GetILHeader();
524 // NoMetadata currently doesn't verify the IL. I'm not sure if that was
525 // a deliberate decision in the past or not, but I've left the behavior
526 // as-is during refactoring.
529 COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyILHeader(PrepareCodeConfig* pConfig, COR_ILMETHOD_DECODER* pIlDecoderMemory)
531 STANDARD_VM_CONTRACT;
532 _ASSERTE(IsIL() || IsNoMetadata());
536 // The NoMetadata version already has a decoder to use, it doesn't need the stack allocated one
537 return GetAndVerifyNoMetadataILHeader();
541 return GetAndVerifyMetadataILHeader(pConfig, pIlDecoderMemory);
545 // ********************************************************************
547 // ********************************************************************
549 // JitCompileCode is the thread safe way to invoke the JIT compiler
550 // If multiple threads get in here for the same config, ALL of them
551 // MUST return the SAME value for pcode.
553 // This function creates a DeadlockAware list of methods being jitted
554 // which prevents us from trying to JIT the same method more that once.
556 PCODE MethodDesc::JitCompileCode(PrepareCodeConfig* pConfig)
558 STANDARD_VM_CONTRACT;
560 LOG((LF_JIT, LL_INFO1000000,
561 "JitCompileCode(" FMT_ADDR ", %s) for %s:%s\n",
563 IsILStub() ? " TRUE" : "FALSE",
564 GetMethodTable()->GetDebugClassName(),
565 m_pszDebugMethodName));
567 #if defined(FEATURE_JIT_PITCHING)
568 CheckStacksAndPitch();
573 // Enter the global lock which protects the list of all functions being JITd
574 JitListLock::LockHolder pJitLock(GetDomain()->GetJitLock());
576 // It is possible that another thread stepped in before we entered the global lock for the first time.
577 if ((pCode = pConfig->IsJitCancellationRequested()))
582 const char *description = "jit lock";
583 INDEBUG(description = m_pszDebugMethodName;)
584 ReleaseHolder<JitListLockEntry> pEntry(JitListLockEntry::Find(
585 pJitLock, pConfig->GetCodeVersion(), description));
587 // We have an entry now, we can release the global lock
590 // Take the entry lock
592 JitListLockEntry::LockHolder pEntryLock(pEntry, FALSE);
594 if (pEntryLock.DeadlockAwareAcquire())
596 if (pEntry->m_hrResultCode == S_FALSE)
598 // Nobody has jitted the method yet
602 // We came in to jit but someone beat us so return the
605 // We can just fall through because we will notice below that
606 // the method has code.
608 // @todo: Note that we may have a failed HRESULT here -
609 // we might want to return an early error rather than
610 // repeatedly failing the jit.
615 // Taking this lock would cause a deadlock (presumably because we
616 // are involved in a class constructor circular dependency.) For
617 // instance, another thread may be waiting to run the class constructor
618 // that we are jitting, but is currently jitting this function.
620 // To remedy this, we want to go ahead and do the jitting anyway.
621 // The other threads contending for the lock will then notice that
622 // the jit finished while they were running class constructors, and abort their
623 // current jit effort.
625 // We don't have to do anything special right here since we
626 // can check HasNativeCode() to detect this case later.
628 // Note that at this point we don't have the lock, but that's OK because the
629 // thread which does have the lock is blocked waiting for us.
632 // It is possible that another thread stepped in before we entered the lock.
633 if ((pCode = pConfig->IsJitCancellationRequested()))
638 pCode = GetMulticoreJitCode();
641 pConfig->SetNativeCode(pCode, &pCode);
642 pEntry->m_hrResultCode = S_OK;
647 return JitCompileCodeLockedEventWrapper(pConfig, pEntryLock);
653 PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry)
655 STANDARD_VM_CONTRACT;
658 ULONG sizeOfCode = 0;
662 MdaJitCompilationStart* pProbe = MDA_GET_ASSISTANT(JitCompilationStart);
664 pProbe->NowCompiling(this);
665 #endif // MDA_SUPPORTED
667 #ifdef PROFILING_SUPPORTED
669 BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo());
670 // For methods with non-zero rejit id we send ReJITCompilationStarted, otherwise
671 // JITCompilationStarted. It isn't clear if this is the ideal policy for these
672 // notifications yet.
673 ReJITID rejitId = pConfig->GetCodeVersion().GetILCodeVersionId();
676 g_profControlBlock.pProfInterface->ReJITCompilationStarted((FunctionID)this,
681 // If profiling, need to give a chance for a tool to examine and modify
682 // the IL before it gets to the JIT. This allows one to add probe calls for
683 // things like code coverage, performance, or whatever.
687 g_profControlBlock.pProfInterface->JITCompilationStarted((FunctionID)this, TRUE);
692 unsigned int ilSize, unused;
693 CorInfoOptions corOptions;
694 LPCBYTE ilHeaderPointer = this->AsDynamicMethodDesc()->GetResolver()->GetCodeInfo(&ilSize, &unused, &corOptions, &unused);
696 g_profControlBlock.pProfInterface->DynamicMethodJITCompilationStarted((FunctionID)this, TRUE, ilHeaderPointer, ilSize);
701 #endif // PROFILING_SUPPORTED
703 if (!ETW_TRACING_CATEGORY_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER_Context,
707 pCode = JitCompileCodeLocked(pConfig, pEntry, &sizeOfCode, &flags);
711 SString namespaceOrClassName, methodName, methodSignature;
713 // Methods that may be interpreted defer this notification until it is certain
714 // we are jitting and not interpreting in CompileMethodWithEtwWrapper.
715 // Some further refactoring could consolidate the notification to always
716 // occur at the point the interpreter does it, but it might even better
717 // to fix the issues that cause us to avoid generating jit notifications
718 // for interpreted methods in the first place. The interpreter does generate
719 // a small stub of native code but no native-IL mapping.
720 #ifndef FEATURE_INTERPRETER
721 ETW::MethodLog::MethodJitting(this,
722 &namespaceOrClassName,
727 pCode = JitCompileCodeLocked(pConfig, pEntry, &sizeOfCode, &flags);
729 // Interpretted methods skip this notification
730 #ifdef FEATURE_INTERPRETER
731 if (Interpreter::InterpretationStubToMethodInfo(pCode) == NULL)
734 // Fire an ETW event to mark the end of JIT'ing
735 ETW::MethodLog::MethodJitted(this,
736 &namespaceOrClassName,
740 pConfig->GetCodeVersion().GetVersionId(),
741 pConfig->ProfilerRejectedPrecompiledCode(),
742 pConfig->ReadyToRunRejectedPrecompiledCode());
747 #ifdef FEATURE_STACK_SAMPLING
748 StackSampler::RecordJittingInfo(this, flags);
749 #endif // FEATURE_STACK_SAMPLING
751 #ifdef PROFILING_SUPPORTED
753 BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo());
754 // For methods with non-zero rejit id we send ReJITCompilationFinished, otherwise
755 // JITCompilationFinished. It isn't clear if this is the ideal policy for these
756 // notifications yet.
757 ReJITID rejitId = pConfig->GetCodeVersion().GetILCodeVersionId();
761 g_profControlBlock.pProfInterface->ReJITCompilationFinished((FunctionID)this,
767 // Notify the profiler that JIT completed.
768 // Must do this after the address has been set.
769 // @ToDo: Why must we set the address before notifying the profiler ??
773 g_profControlBlock.pProfInterface->
774 JITCompilationFinished((FunctionID)this,
775 pEntry->m_hrResultCode,
780 g_profControlBlock.pProfInterface->DynamicMethodJITCompilationFinished((FunctionID)this, pEntry->m_hrResultCode, TRUE);
785 #endif // PROFILING_SUPPORTED
787 #ifdef FEATURE_INTERPRETER
788 bool isJittedMethod = (Interpreter::InterpretationStubToMethodInfo(pCode) == NULL);
791 // Interpretted methods skip this notification
792 #ifdef FEATURE_INTERPRETER
796 #ifdef FEATURE_PERFMAP
797 // Save the JIT'd method information so that perf can resolve JIT'd call frames.
798 PerfMap::LogJITCompiledMethod(this, pCode, sizeOfCode);
803 #ifdef FEATURE_MULTICOREJIT
804 // Non-initial code versions and multicore jit initial compilation all skip this
805 if (pConfig->NeedsMulticoreJitNotification())
807 MulticoreJitManager & mcJitManager = GetAppDomain()->GetMulticoreJitManager();
808 if (mcJitManager.IsRecorderActive())
810 if (MulticoreJitManager::IsMethodSupported(this))
812 mcJitManager.RecordMethodJit(this); // Tell multi-core JIT manager to record method on successful JITting
818 #ifdef FEATURE_INTERPRETER
822 // The notification will only occur if someone has registered for this method.
823 DACNotifyCompilationFinished(this, pCode);
829 PCODE MethodDesc::JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags)
831 STANDARD_VM_CONTRACT;
835 // The profiler may have changed the code on the callback. Need to
836 // pick up the new code.
837 COR_ILMETHOD_DECODER ilDecoderTemp;
838 COR_ILMETHOD_DECODER *pilHeader = GetAndVerifyILHeader(pConfig, &ilDecoderTemp);
839 *pFlags = pConfig->GetJitCompilationFlags();
840 PCODE pOtherCode = NULL;
843 pCode = UnsafeJitFunction(this, pilHeader, *pFlags, pSizeOfCode);
847 // If the current thread threw an exception, but a competing thread
848 // somehow succeeded at JITting the same function (e.g., out of memory
849 // encountered on current thread but not competing thread), then go ahead
850 // and swallow this current thread's exception, since we somehow managed
851 // to successfully JIT the code on the other thread.
853 // Note that if a deadlock cycle is broken, that does not result in an
854 // exception--the thread would just pass through the lock and JIT the
855 // function in competition with the other thread (with the winner of the
856 // race decided later on when we do SetNativeCodeInterlocked). This
857 // try/catch is purely to deal with the (unusual) case where a competing
858 // thread succeeded where we aborted.
860 if (!(pOtherCode = pConfig->IsJitCancellationRequested()))
862 pEntry->m_hrResultCode = E_FAIL;
866 EX_END_CATCH(RethrowTerminalExceptions)
868 if (pOtherCode != NULL)
870 // Somebody finished jitting recursively while we were jitting the method.
871 // Just use their method & leak the one we finished. (Normally we hope
872 // not to finish our JIT in this case, as we will abort early if we notice
873 // a reentrant jit has occurred. But we may not catch every place so we
874 // do a definitive final check here.
878 _ASSERTE(pCode != NULL);
881 // Instrument for coverage before trying to publish this version
882 // of the code as the native code, to avoid other threads seeing
883 // partially instrumented methods.
884 if (GCStress<cfg_instr_jit>::IsEnabled())
886 // Do the instrumentation and publish atomically, so that the
887 // instrumentation data always matches the published code.
888 CrstHolder gcCoverLock(&m_GCCoverCrst);
890 // Make sure no other thread has stepped in before us.
891 if ((pOtherCode = pConfig->IsJitCancellationRequested()))
896 SetupGcCoverage(this, (BYTE*)pCode);
898 // This thread should always win the publishing race
899 // since we're under a lock.
900 if (!pConfig->SetNativeCode(pCode, &pOtherCode))
902 _ASSERTE(!"GC Cover native code publish failed");
906 #endif // HAVE_GCCOVER
908 // Aside from rejit, performing a SetNativeCodeInterlocked at this point
909 // generally ensures that there is only one winning version of the native
910 // code. This also avoid races with profiler overriding ngened code (see
911 // matching SetNativeCodeInterlocked done after
912 // JITCachedFunctionSearchStarted)
913 if (!pConfig->SetNativeCode(pCode, &pOtherCode))
915 // Another thread beat us to publishing its copy of the JITted code.
919 #if defined(FEATURE_JIT_PITCHING)
920 SavePitchingCandidate(this, *pSizeOfCode);
923 // We succeeded in jitting the code, and our jitted code is the one that's going to run now.
924 pEntry->m_hrResultCode = S_OK;
931 PrepareCodeConfig::PrepareCodeConfig() {}
933 PrepareCodeConfig::PrepareCodeConfig(NativeCodeVersion codeVersion, BOOL needsMulticoreJitNotification, BOOL mayUsePrecompiledCode) :
934 m_pMethodDesc(codeVersion.GetMethodDesc()),
935 m_nativeCodeVersion(codeVersion),
936 m_needsMulticoreJitNotification(needsMulticoreJitNotification),
937 m_mayUsePrecompiledCode(mayUsePrecompiledCode),
938 m_ProfilerRejectedPrecompiledCode(FALSE),
939 m_ReadyToRunRejectedPrecompiledCode(FALSE)
942 MethodDesc* PrepareCodeConfig::GetMethodDesc()
944 LIMITED_METHOD_CONTRACT;
945 return m_pMethodDesc;
948 PCODE PrepareCodeConfig::IsJitCancellationRequested()
950 LIMITED_METHOD_CONTRACT;
951 return m_pMethodDesc->GetNativeCode();
954 BOOL PrepareCodeConfig::NeedsMulticoreJitNotification()
956 LIMITED_METHOD_CONTRACT;
957 return m_needsMulticoreJitNotification;
960 BOOL PrepareCodeConfig::ProfilerRejectedPrecompiledCode()
962 LIMITED_METHOD_CONTRACT;
963 return m_ProfilerRejectedPrecompiledCode;
966 void PrepareCodeConfig::SetProfilerRejectedPrecompiledCode()
968 LIMITED_METHOD_CONTRACT;
969 m_ProfilerRejectedPrecompiledCode = TRUE;
972 BOOL PrepareCodeConfig::ReadyToRunRejectedPrecompiledCode()
974 LIMITED_METHOD_CONTRACT;
975 return m_ReadyToRunRejectedPrecompiledCode;
978 void PrepareCodeConfig::SetReadyToRunRejectedPrecompiledCode()
980 LIMITED_METHOD_CONTRACT;
981 m_ReadyToRunRejectedPrecompiledCode = TRUE;
984 NativeCodeVersion PrepareCodeConfig::GetCodeVersion()
986 LIMITED_METHOD_CONTRACT;
987 return m_nativeCodeVersion;
990 BOOL PrepareCodeConfig::SetNativeCode(PCODE pCode, PCODE * ppAlternateCodeToUse)
992 LIMITED_METHOD_CONTRACT;
994 // If this function had already been requested for rejit (before its original
995 // code was jitted), then give the CodeVersionManager a chance to jump-stamp the
996 // code we just compiled so the first thread entering the function will jump
997 // to the prestub and trigger the rejit. Note that the PublishMethodHolder takes
998 // a lock to avoid a particular kind of rejit race. See
999 // code:CodeVersionManager::PublishMethodHolder::PublishMethodHolder#PublishCode for
1000 // details on the rejit race.
1002 if (m_pMethodDesc->IsVersionableWithJumpStamp())
1004 PublishMethodHolder publishWorker(GetMethodDesc(), pCode);
1005 if (m_pMethodDesc->SetNativeCodeInterlocked(pCode, NULL))
1012 if (m_pMethodDesc->SetNativeCodeInterlocked(pCode, NULL))
1018 *ppAlternateCodeToUse = m_pMethodDesc->GetNativeCode();
1022 COR_ILMETHOD* PrepareCodeConfig::GetILHeader()
1024 STANDARD_VM_CONTRACT;
1025 return m_pMethodDesc->GetILHeader(TRUE);
1028 CORJIT_FLAGS PrepareCodeConfig::GetJitCompilationFlags()
1030 STANDARD_VM_CONTRACT;
1033 if (m_pMethodDesc->IsILStub())
1035 ILStubResolver* pResolver = m_pMethodDesc->AsDynamicMethodDesc()->GetILStubResolver();
1036 flags = pResolver->GetJitFlags();
1038 #ifdef FEATURE_TIERED_COMPILATION
1039 flags.Add(TieredCompilationManager::GetJitFlags(m_nativeCodeVersion));
1044 BOOL PrepareCodeConfig::MayUsePrecompiledCode()
1046 LIMITED_METHOD_CONTRACT;
1047 return m_mayUsePrecompiledCode;
1050 #ifdef FEATURE_CODE_VERSIONING
1051 VersionedPrepareCodeConfig::VersionedPrepareCodeConfig() {}
1053 VersionedPrepareCodeConfig::VersionedPrepareCodeConfig(NativeCodeVersion codeVersion) :
1054 PrepareCodeConfig(codeVersion, TRUE, FALSE)
1056 LIMITED_METHOD_CONTRACT;
1058 _ASSERTE(!m_nativeCodeVersion.IsDefaultVersion());
1059 _ASSERTE(m_pMethodDesc->GetCodeVersionManager()->LockOwnedByCurrentThread());
1060 m_ilCodeVersion = m_nativeCodeVersion.GetILCodeVersion();
1063 HRESULT VersionedPrepareCodeConfig::FinishConfiguration()
1065 STANDARD_VM_CONTRACT;
1067 _ASSERTE(!GetMethodDesc()->GetCodeVersionManager()->LockOwnedByCurrentThread());
1069 // Any code build stages that do just in time configuration should
1070 // be configured now
1071 #ifdef FEATURE_REJIT
1072 if (m_ilCodeVersion.GetRejitState() != ILCodeVersion::kStateActive)
1074 ReJitManager::ConfigureILCodeVersion(m_ilCodeVersion);
1076 _ASSERTE(m_ilCodeVersion.GetRejitState() == ILCodeVersion::kStateActive);
1082 PCODE VersionedPrepareCodeConfig::IsJitCancellationRequested()
1084 LIMITED_METHOD_CONTRACT;
1085 return m_nativeCodeVersion.GetNativeCode();
1088 BOOL VersionedPrepareCodeConfig::SetNativeCode(PCODE pCode, PCODE * ppAlternateCodeToUse)
1090 LIMITED_METHOD_CONTRACT;
1092 //This isn't the default version so jumpstamp is never needed
1093 _ASSERTE(!m_nativeCodeVersion.IsDefaultVersion());
1094 if (m_nativeCodeVersion.SetNativeCodeInterlocked(pCode, NULL))
1100 *ppAlternateCodeToUse = m_nativeCodeVersion.GetNativeCode();
1105 COR_ILMETHOD* VersionedPrepareCodeConfig::GetILHeader()
1107 STANDARD_VM_CONTRACT;
1108 return m_ilCodeVersion.GetIL();
1111 CORJIT_FLAGS VersionedPrepareCodeConfig::GetJitCompilationFlags()
1113 STANDARD_VM_CONTRACT;
1116 #ifdef FEATURE_REJIT
1117 DWORD profilerFlags = m_ilCodeVersion.GetJitFlags();
1118 flags.Add(ReJitManager::JitFlagsFromProfCodegenFlags(profilerFlags));
1121 #ifdef FEATURE_TIERED_COMPILATION
1122 flags.Add(TieredCompilationManager::GetJitFlags(m_nativeCodeVersion));
1128 #endif //FEATURE_CODE_VERSIONING
1130 #ifdef FEATURE_STUBS_AS_IL
1132 // CreateInstantiatingILStubTargetSig:
1133 // This method is used to create the signature of the target of the ILStub
1134 // for instantiating and unboxing stubs, when/where we need to introduce a generic context.
1135 // And since the generic context is a hidden parameter, we're creating a signature that
1136 // looks like non-generic but has one additional parameter right after the thisptr
1137 void CreateInstantiatingILStubTargetSig(MethodDesc *pBaseMD,
1138 SigTypeContext &typeContext,
1139 SigBuilder *stubSigBuilder)
1141 STANDARD_VM_CONTRACT;
1143 MetaSig msig(pBaseMD);
1144 BYTE callingConvention = IMAGE_CEE_CS_CALLCONV_DEFAULT;
1146 callingConvention |= IMAGE_CEE_CS_CALLCONV_HASTHIS;
1147 // CallingConvention
1148 stubSigBuilder->AppendByte(callingConvention);
1151 stubSigBuilder->AppendData(msig.NumFixedArgs() + 1); // +1 is for context param
1154 SigPointer pReturn = msig.GetReturnProps();
1155 pReturn.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder, FALSE);
1157 #ifndef _TARGET_X86_
1158 // The hidden context parameter
1159 stubSigBuilder->AppendElementType(ELEMENT_TYPE_I);
1160 #endif // !_TARGET_X86_
1162 // Copy rest of the arguments
1164 SigPointer pArgs = msig.GetArgProps();
1165 for (unsigned i = 0; i < msig.NumFixedArgs(); i++)
1167 pArgs.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder);
1171 // The hidden context parameter
1172 stubSigBuilder->AppendElementType(ELEMENT_TYPE_I);
1173 #endif // _TARGET_X86_
1176 Stub * CreateUnboxingILStubForSharedGenericValueTypeMethods(MethodDesc* pTargetMD)
1183 POSTCONDITION(CheckPointer(RETVAL));
1187 SigTypeContext typeContext(pTargetMD);
1189 MetaSig msig(pTargetMD);
1191 _ASSERTE(msig.HasThis());
1193 ILStubLinker sl(pTargetMD->GetModule(),
1194 pTargetMD->GetSignature(),
1197 TRUE, // fTargetHasThis
1198 TRUE, // fStubHasThis
1199 FALSE // fIsNDirectStub
1202 ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch);
1204 // 1. Build the new signature
1205 SigBuilder stubSigBuilder;
1206 CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder);
1208 // 2. Emit the method body
1209 mdToken tokPinningHelper = pCode->GetToken(MscorlibBinder::GetField(FIELD__PINNING_HELPER__M_DATA));
1211 // 2.1 Push the thisptr
1212 // We need to skip over the MethodTable*
1213 // The trick below will do that.
1214 pCode->EmitLoadThis();
1215 pCode->EmitLDFLDA(tokPinningHelper);
1217 #if defined(_TARGET_X86_)
1218 // 2.2 Push the rest of the arguments for x86
1219 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
1221 pCode->EmitLDARG(i);
1225 // 2.3 Push the hidden context param
1226 // The context is going to be captured from the thisptr
1227 pCode->EmitLoadThis();
1228 pCode->EmitLDFLDA(tokPinningHelper);
1229 pCode->EmitLDC(Object::GetOffsetOfFirstField());
1231 pCode->EmitLDIND_I();
1233 #if !defined(_TARGET_X86_)
1234 // 2.4 Push the rest of the arguments for not x86
1235 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
1237 pCode->EmitLDARG(i);
1241 // 2.5 Push the target address
1242 pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY));
1245 pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1);
1248 PCCOR_SIGNATURE pSig;
1250 pTargetMD->GetSig(&pSig,&cbSig);
1251 PTR_Module pLoaderModule = pTargetMD->GetLoaderModule();
1252 MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(),
1253 pLoaderModule->GetILStubCache()->GetOrCreateStubMethodTable(pLoaderModule),
1254 ILSTUB_UNBOXINGILSTUB,
1255 pTargetMD->GetModule(),
1260 ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver();
1262 DWORD cbTargetSig = 0;
1263 PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig);
1264 pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig);
1265 pResolver->SetStubTargetMethodDesc(pTargetMD);
1267 RETURN Stub::NewStub(JitILStub(pStubMD));
1271 Stub * CreateInstantiatingILStub(MethodDesc* pTargetMD, void* pHiddenArg)
1278 PRECONDITION(CheckPointer(pHiddenArg));
1279 POSTCONDITION(CheckPointer(RETVAL));
1283 SigTypeContext typeContext;
1284 MethodTable* pStubMT;
1285 if (pTargetMD->HasMethodInstantiation())
1287 // The pHiddenArg shall be a MethodDesc*
1288 MethodDesc* pMD = static_cast<MethodDesc *>(pHiddenArg);
1289 SigTypeContext::InitTypeContext(pMD, &typeContext);
1290 pStubMT = pMD->GetMethodTable();
1294 // The pHiddenArg shall be a MethodTable*
1295 SigTypeContext::InitTypeContext(TypeHandle::FromPtr(pHiddenArg), &typeContext);
1296 pStubMT = static_cast<MethodTable *>(pHiddenArg);
1299 MetaSig msig(pTargetMD);
1301 ILStubLinker sl(pTargetMD->GetModule(),
1302 pTargetMD->GetSignature(),
1305 msig.HasThis(), // fTargetHasThis
1306 msig.HasThis(), // fStubHasThis
1307 FALSE // fIsNDirectStub
1310 ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch);
1312 // 1. Build the new signature
1313 SigBuilder stubSigBuilder;
1314 CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder);
1316 // 2. Emit the method body
1319 // 2.1 Push the thisptr
1320 pCode->EmitLoadThis();
1323 #if defined(_TARGET_X86_)
1324 // 2.2 Push the rest of the arguments for x86
1325 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
1327 pCode->EmitLDARG(i);
1329 #endif // _TARGET_X86_
1331 // 2.3 Push the hidden context param
1332 // InstantiatingStub
1333 pCode->EmitLDC((TADDR)pHiddenArg);
1335 #if !defined(_TARGET_X86_)
1336 // 2.4 Push the rest of the arguments for not x86
1337 for (unsigned i = 0; i < msig.NumFixedArgs();i++)
1339 pCode->EmitLDARG(i);
1341 #endif // !_TARGET_X86_
1343 // 2.5 Push the target address
1344 pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY));
1347 pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1);
1350 PCCOR_SIGNATURE pSig;
1352 pTargetMD->GetSig(&pSig,&cbSig);
1353 PTR_Module pLoaderModule = pTargetMD->GetLoaderModule();
1354 MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(),
1356 ILSTUB_INSTANTIATINGSTUB,
1357 pTargetMD->GetModule(),
1362 ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver();
1364 DWORD cbTargetSig = 0;
1365 PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig);
1366 pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig);
1367 pResolver->SetStubTargetMethodDesc(pTargetMD);
1369 RETURN Stub::NewStub(JitILStub(pStubMD));
1373 /* Make a stub that for a value class method that expects a BOXed this pointer */
1374 Stub * MakeUnboxingStubWorker(MethodDesc *pMD)
1380 POSTCONDITION(CheckPointer(RETVAL));
1386 _ASSERTE (pMD->GetMethodTable()->IsValueType());
1387 _ASSERTE(!pMD->ContainsGenericVariables());
1388 MethodDesc *pUnboxedMD = pMD->GetWrappedMethodDesc();
1390 _ASSERTE(pUnboxedMD != NULL && pUnboxedMD != pMD);
1392 #ifdef FEATURE_STUBS_AS_IL
1393 if (pUnboxedMD->RequiresInstMethodTableArg())
1395 pstub = CreateUnboxingILStubForSharedGenericValueTypeMethods(pUnboxedMD);
1401 sl.EmitUnboxMethodStub(pUnboxedMD);
1402 pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap());
1407 #if defined(FEATURE_SHARE_GENERIC_CODE)
1408 Stub * MakeInstantiatingStubWorker(MethodDesc *pMD)
1414 PRECONDITION(pMD->IsInstantiatingStub());
1415 PRECONDITION(!pMD->RequiresInstArg());
1416 PRECONDITION(!pMD->IsSharedByGenericMethodInstantiations());
1417 POSTCONDITION(CheckPointer(RETVAL));
1421 // Note: this should be kept idempotent ... in the sense that
1422 // if multiple threads get in here for the same pMD
1423 // it should not matter whose stuff finally gets used.
1425 MethodDesc *pSharedMD = NULL;
1426 void* extraArg = NULL;
1428 // It's an instantiated generic method
1429 // Fetch the shared code associated with this instantiation
1430 pSharedMD = pMD->GetWrappedMethodDesc();
1431 _ASSERTE(pSharedMD != NULL && pSharedMD != pMD);
1433 if (pMD->HasMethodInstantiation())
1439 // It's a per-instantiation static method
1440 extraArg = pMD->GetMethodTable();
1444 #ifdef FEATURE_STUBS_AS_IL
1445 pstub = CreateInstantiatingILStub(pSharedMD, extraArg);
1448 _ASSERTE(pSharedMD != NULL && pSharedMD != pMD);
1449 sl.EmitInstantiatingMethodStub(pSharedMD, extraArg);
1451 pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap());
1456 #endif // defined(FEATURE_SHARE_GENERIC_CODE)
1458 #if defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
1460 extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactEntryPoint (PCODE pCode)
1462 _ASSERTE (pCode >= PC_REG_RELATIVE_OFFSET);
1464 pCode = (PCODE) (pCode - PC_REG_RELATIVE_OFFSET + THUMB_CODE);
1466 _ASSERTE (MethodDescChunk::IsCompactEntryPointAtAddress (pCode));
1468 return MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, FALSE);
1471 #endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
1473 //=============================================================================
1474 // This function generates the real code for a method and installs it into
1475 // the methoddesc. Usually ***BUT NOT ALWAYS***, this function runs only once
1476 // per methoddesc. In addition to installing the new code, this function
1477 // returns a pointer to the new code for the prestub's convenience.
1478 //=============================================================================
1479 extern "C" PCODE STDCALL PreStubWorker(TransitionBlock * pTransitionBlock, MethodDesc * pMD)
1481 PCODE pbRetVal = NULL;
1483 BEGIN_PRESERVE_LAST_ERROR;
1485 STATIC_CONTRACT_THROWS;
1486 STATIC_CONTRACT_GC_TRIGGERS;
1487 STATIC_CONTRACT_MODE_COOPERATIVE;
1488 STATIC_CONTRACT_ENTRY_POINT;
1490 MAKE_CURRENT_THREAD_AVAILABLE();
1493 Thread::ObjectRefFlush(CURRENT_THREAD);
1496 FrameWithCookie<PrestubMethodFrame> frame(pTransitionBlock, pMD);
1497 PrestubMethodFrame * pPFrame = &frame;
1499 pPFrame->Push(CURRENT_THREAD);
1501 INSTALL_MANAGED_EXCEPTION_DISPATCHER;
1502 INSTALL_UNWIND_AND_CONTINUE_HANDLER;
1504 ETWOnStartup (PrestubWorker_V1,PrestubWorkerEnd_V1);
1506 _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
1508 // Running the PreStubWorker on a method causes us to access its MethodTable
1509 g_IBCLogger.LogMethodDescAccess(pMD);
1511 // Make sure the method table is restored, and method instantiation if present
1512 pMD->CheckRestore();
1514 CONSISTENCY_CHECK(GetAppDomain()->CheckCanExecuteManagedCode(pMD));
1516 // Note this is redundant with the above check but we do it anyway for safety
1518 // This has been disabled so we have a better chance of catching these. Note that this check is
1519 // NOT sufficient for domain neutral and ngen cases.
1521 // pMD->EnsureActive();
1523 MethodTable *pDispatchingMT = NULL;
1525 if (pMD->IsVtableMethod())
1527 OBJECTREF curobj = pPFrame->GetThis();
1529 if (curobj != NULL) // Check for virtual function called non-virtually on a NULL object
1531 pDispatchingMT = curobj->GetMethodTable();
1533 #ifdef FEATURE_ICASTABLE
1534 if (pDispatchingMT->IsICastable())
1536 MethodTable *pMDMT = pMD->GetMethodTable();
1537 TypeHandle objectType(pDispatchingMT);
1538 TypeHandle methodType(pMDMT);
1540 GCStress<cfg_any>::MaybeTrigger();
1541 INDEBUG(curobj = NULL); // curobj is unprotected and CanCastTo() can trigger GC
1542 if (!objectType.CanCastTo(methodType))
1544 // Apparently ICastable magic was involved when we chose this method to be called
1545 // that's why we better stick to the MethodTable it belongs to, otherwise
1546 // DoPrestub() will fail not being able to find implementation for pMD in pDispatchingMT.
1548 pDispatchingMT = pMDMT;
1551 #endif // FEATURE_ICASTABLE
1553 // For value types, the only virtual methods are interface implementations.
1554 // Thus pDispatching == pMT because there
1555 // is no inheritance in value types. Note the BoxedEntryPointStubs are shared
1556 // between all sharable generic instantiations, so the == test is on
1557 // canonical method tables.
1559 MethodTable *pMDMT = pMD->GetMethodTable(); // put this here to see what the MT is in debug mode
1560 _ASSERTE(!pMD->GetMethodTable()->IsValueType() ||
1561 (pMD->IsUnboxingStub() && (pDispatchingMT->GetCanonicalMethodTable() == pMDMT->GetCanonicalMethodTable())));
1566 GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
1567 pbRetVal = pMD->DoPrestub(pDispatchingMT);
1569 UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
1570 UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
1573 HardwareExceptionHolder
1575 // Give debugger opportunity to stop here
1579 pPFrame->Pop(CURRENT_THREAD);
1581 POSTCONDITION(pbRetVal != NULL);
1583 END_PRESERVE_LAST_ERROR;
1590 // These are two functions for testing purposes only, in debug builds only. They can be used by setting
1591 // InjectFatalError to 3. They ensure that we really can restore the guard page for SEH try/catch clauses.
1593 // @todo: Do we use this for anything anymore?
1595 static void TestSEHGuardPageRestoreOverflow()
1599 static void TestSEHGuardPageRestore()
1601 PAL_TRY(void *, unused, NULL)
1603 TestSEHGuardPageRestoreOverflow();
1605 PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER)
1607 _ASSERTE(!"Got first overflow.");
1611 PAL_TRY(void *, unused, NULL)
1613 TestSEHGuardPageRestoreOverflow();
1615 PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER)
1617 // If you get two asserts, then it works!
1618 _ASSERTE(!"Got second overflow.");
1624 // Separated out the body of PreStubWorker for the case where we don't have a frame.
1626 // Note that pDispatchingMT may not actually be the MT that is indirected through.
1627 // If a virtual method is called non-virtually, pMT will be used to indirect through
1629 // This returns a pointer to the stable entrypoint for the jitted method. Typically, this
1630 // is the same as the pointer to the top of the JITted code of the method. However, in
1631 // the case of methods that require stubs to be executed first (e.g., remoted methods
1632 // that require remoting stubs to be executed first), this stable entrypoint would be a
1633 // pointer to the stub, and not a pointer directly to the JITted code.
1634 PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
1639 POSTCONDITION(RETVAL != NULL);
1646 Thread *pThread = GetThread();
1648 MethodTable *pMT = GetMethodTable();
1650 // Running a prestub on a method causes us to access its MethodTable
1651 g_IBCLogger.LogMethodDescAccess(this);
1653 if (ContainsGenericVariables())
1655 COMPlusThrow(kInvalidOperationException, IDS_EE_CODEEXECUTION_CONTAINSGENERICVAR);
1658 /************************** DEBUG CHECKS *************************/
1659 /*-----------------------------------------------------------------
1660 // Halt if needed, GC stress, check the sharing count etc.
1664 static unsigned ctr = 0;
1667 if (g_pConfig->ShouldPrestubHalt(this))
1669 _ASSERTE(!"PreStubHalt");
1672 LOG((LF_CLASSLOADER, LL_INFO10000, "In PreStubWorker for %s::%s\n",
1673 m_pszDebugClassName, m_pszDebugMethodName));
1675 // This is a nice place to test out having some fatal EE errors. We do this only in a checked build, and only
1676 // under the InjectFatalError key.
1677 if (g_pConfig->InjectFatalError() == 1)
1679 EEPOLICY_HANDLE_FATAL_ERROR(COR_E_EXECUTIONENGINE);
1681 else if (g_pConfig->InjectFatalError() == 2)
1683 EEPOLICY_HANDLE_FATAL_ERROR(COR_E_STACKOVERFLOW);
1685 else if (g_pConfig->InjectFatalError() == 3)
1687 TestSEHGuardPageRestore();
1690 // Useful to test GC with the prestub on the call stack
1691 if (g_pConfig->ShouldPrestubGC(this))
1694 GCHeapUtilities::GetGCHeap()->GarbageCollect(-1);
1698 STRESS_LOG1(LF_CLASSLOADER, LL_INFO10000, "Prestubworker: method %pM\n", this);
1701 GCStress<cfg_any, EeconfigFastGcSPolicy, CoopGcModePolicy>::MaybeTrigger();
1704 #ifdef FEATURE_COMINTEROP
1705 /************************** INTEROP *************************/
1706 /*-----------------------------------------------------------------
1707 // Some method descriptors are COMPLUS-to-COM call descriptors
1708 // they are not your every day method descriptors, for example
1709 // they don't have an IL or code.
1711 if (IsComPlusCall() || IsGenericComPlusCall())
1713 pCode = GetStubForInteropMethod(this);
1715 GetPrecode()->SetTargetInterlocked(pCode);
1717 RETURN GetStableEntryPoint();
1719 #endif // FEATURE_COMINTEROP
1721 // workaround: This is to handle a punted work item dealing with a skipped module constructor
1722 // due to appdomain unload. Basically shared code was JITted in domain A, and then
1723 // this caused a link to another shared module with a module CCTOR, which was skipped
1724 // or aborted in another appdomain we were trying to propagate the activation to.
1726 // Note that this is not a fix, but that it just minimizes the window in which the
1728 if (pThread->IsAbortRequested())
1730 pThread->HandleThreadAbort();
1733 /*************************** CALL COUNTER ***********************/
1734 // If we are counting calls for tiered compilation, leave the prestub
1735 // in place so that we can continue intercepting method invocations.
1736 // When the TieredCompilationManager has received enough call notifications
1737 // for this method only then do we back-patch it.
1738 BOOL fCanBackpatchPrestub = TRUE;
1739 #ifdef FEATURE_TIERED_COMPILATION
1740 BOOL fNeedsCallCounting = FALSE;
1741 TieredCompilationManager* pTieredCompilationManager = nullptr;
1742 if (IsEligibleForTieredCompilation() && TieredCompilationManager::RequiresCallCounting(this))
1744 pTieredCompilationManager = GetAppDomain()->GetTieredCompilationManager();
1745 CallCounter * pCallCounter = GetCallCounter();
1746 BOOL fWasPromotedToTier1 = FALSE;
1747 pCallCounter->OnMethodCalled(this, pTieredCompilationManager, &fCanBackpatchPrestub, &fWasPromotedToTier1);
1748 fNeedsCallCounting = !fWasPromotedToTier1;
1752 /*************************** VERSIONABLE CODE *********************/
1754 BOOL fIsPointingToPrestub = IsPointingToPrestub();
1755 #ifdef FEATURE_CODE_VERSIONING
1756 if (IsVersionableWithPrecode() ||
1757 (!fIsPointingToPrestub && IsVersionableWithJumpStamp()))
1759 pCode = GetCodeVersionManager()->PublishVersionableCodeIfNecessary(this, fCanBackpatchPrestub);
1761 #ifdef FEATURE_TIERED_COMPILATION
1762 if (pTieredCompilationManager != nullptr && fNeedsCallCounting && fCanBackpatchPrestub && pCode != NULL)
1764 pTieredCompilationManager->OnMethodCallCountingStoppedWithoutTier1Promotion(this);
1768 fIsPointingToPrestub = IsPointingToPrestub();
1772 /************************** BACKPATCHING *************************/
1773 // See if the addr of code has changed from the pre-stub
1774 if (!fIsPointingToPrestub)
1776 LOG((LF_CLASSLOADER, LL_INFO10000,
1777 " In PreStubWorker, method already jitted, backpatching call point\n"));
1778 #if defined(FEATURE_JIT_PITCHING)
1779 MarkMethodNotPitchingCandidate(this);
1781 RETURN DoBackpatch(pMT, pDispatchingMT, TRUE);
1786 // The only reasons we are still pointing to prestub is because the call counter
1787 // prevented it or this thread lost the race with another thread in updating the
1788 // entry point. We should still short circuit and return the code without
1793 /************************** CODE CREATION *************************/
1794 if (IsUnboxingStub())
1796 pStub = MakeUnboxingStubWorker(this);
1798 #if defined(FEATURE_SHARE_GENERIC_CODE)
1799 else if (IsInstantiatingStub())
1801 pStub = MakeInstantiatingStubWorker(this);
1803 #endif // defined(FEATURE_SHARE_GENERIC_CODE)
1804 else if (IsIL() || IsNoMetadata())
1806 if (!IsNativeCodeStableAfterInit())
1808 GetOrCreatePrecode();
1810 pCode = PrepareInitialCode();
1811 } // end else if (IsIL() || IsNoMetadata())
1812 else if (IsNDirect())
1814 pCode = GetStubForInteropMethod(this);
1815 GetOrCreatePrecode();
1819 // Get the fcall implementation
1820 BOOL fSharedOrDynamicFCallImpl;
1821 pCode = ECall::GetFCallImpl(this, &fSharedOrDynamicFCallImpl);
1823 if (fSharedOrDynamicFCallImpl)
1825 // Fake ctors share one implementation that has to be wrapped by prestub
1826 GetOrCreatePrecode();
1831 pStub = GenerateArrayOpStub((ArrayMethodDesc*)this);
1833 else if (IsEEImpl())
1835 _ASSERTE(GetMethodTable()->IsDelegate());
1836 pCode = COMDelegate::GetInvokeMethodStub((EEImplMethodDesc*)this);
1837 GetOrCreatePrecode();
1841 // This is a method type we don't handle yet
1842 _ASSERTE(!"Unknown Method Type");
1845 /************************** POSTJIT *************************/
1846 _ASSERTE(pCode == NULL || GetNativeCode() == NULL || pCode == GetNativeCode());
1848 // At this point we must have either a pointer to managed code or to a stub. All of the above code
1849 // should have thrown an exception if it couldn't make a stub.
1850 _ASSERTE((pStub != NULL) ^ (pCode != NULL));
1852 /************************** SECURITY *************************/
1854 // Lets check to see if we need declarative security on this stub, If we have
1855 // security checks on this method or class then we need to add an intermediate
1856 // stub that performs declarative checks prior to calling the real stub.
1857 // record if security needs to intercept this call (also depends on whether we plan to use stubs for declarative security)
1860 _ASSERTE((pStub != NULL) ^ (pCode != NULL));
1862 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1864 // We are seeing memory reordering race around fixups (see DDB 193514 and related bugs). We get into
1865 // situation where the patched precode is visible by other threads, but the resolved fixups
1866 // are not. IT SHOULD NEVER HAPPEN according to our current understanding of x86/x64 memory model.
1867 // (see email thread attached to the bug for details).
1869 // We suspect that there may be bug in the hardware or that hardware may have shortcuts that may be
1870 // causing grief. We will try to avoid the race by executing an extra memory barrier.
1878 GetPrecode()->SetTargetInterlocked(pCode);
1880 if (!HasStableEntryPoint())
1882 SetStableEntryPointInterlocked(pCode);
1887 if (!GetOrCreatePrecode()->SetTargetInterlocked(pStub->GetEntryPoint()))
1892 if (pStub->HasExternalEntryPoint())
1894 // If the Stub wraps code that is outside of the Stub allocation, then we
1895 // need to free the Stub allocation now.
1900 _ASSERTE(!IsPointingToPrestub());
1901 _ASSERTE(HasStableEntryPoint());
1903 RETURN DoBackpatch(pMT, pDispatchingMT, FALSE);
1906 #endif // !DACCESS_COMPILE
1908 //==========================================================================
1909 // The following code manages the PreStub. All method stubs initially
1911 //==========================================================================
1913 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1914 static PCODE g_UMThunkPreStub;
1915 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1917 #ifndef DACCESS_COMPILE
1919 void ThePreStubManager::Init(void)
1921 STANDARD_VM_CONTRACT;
1924 // Add the prestub manager
1927 StubManager::AddStubManager(new ThePreStubManager());
1930 //-----------------------------------------------------------
1931 // Initialize the prestub.
1932 //-----------------------------------------------------------
1933 void InitPreStubManager(void)
1935 STANDARD_VM_CONTRACT;
1937 if (NingenEnabled())
1942 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1943 g_UMThunkPreStub = GenerateUMThunkPrestub()->GetEntryPoint();
1944 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1946 ThePreStubManager::Init();
1949 PCODE TheUMThunkPreStub()
1951 LIMITED_METHOD_CONTRACT;
1953 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1954 return g_UMThunkPreStub;
1955 #else // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1956 return GetEEFuncEntryPoint(TheUMEntryPrestub);
1957 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1960 PCODE TheVarargNDirectStub(BOOL hasRetBuffArg)
1962 LIMITED_METHOD_CONTRACT;
1964 #if !defined(_TARGET_X86_) && !defined(_TARGET_ARM64_)
1967 return GetEEFuncEntryPoint(VarargPInvokeStub_RetBuffArg);
1972 return GetEEFuncEntryPoint(VarargPInvokeStub);
1976 static PCODE PatchNonVirtualExternalMethod(MethodDesc * pMD, PCODE pCode, PTR_CORCOMPILE_IMPORT_SECTION pImportSection, TADDR pIndirection)
1978 STANDARD_VM_CONTRACT;
1981 // Skip fixup precode jump for better perf. Since we have MethodDesc available, we can use cheaper method
1982 // than code:Precode::TryToSkipFixupPrecode.
1984 #ifdef HAS_FIXUP_PRECODE
1985 if (pMD->HasPrecode() && pMD->GetPrecode()->GetType() == PRECODE_FIXUP
1986 && pMD->IsNativeCodeStableAfterInit())
1988 PCODE pDirectTarget = pMD->IsFCall() ? ECall::GetFCallImpl(pMD) : pMD->GetNativeCode();
1989 if (pDirectTarget != NULL)
1990 pCode = pDirectTarget;
1992 #endif //HAS_FIXUP_PRECODE
1994 if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
1996 CORCOMPILE_EXTERNAL_METHOD_THUNK * pThunk = (CORCOMPILE_EXTERNAL_METHOD_THUNK *)pIndirection;
1998 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1999 INT64 oldValue = *(INT64*)pThunk;
2000 BYTE* pOldValue = (BYTE*)&oldValue;
2002 if (pOldValue[0] == X86_INSTR_CALL_REL32)
2004 INT64 newValue = oldValue;
2005 BYTE* pNewValue = (BYTE*)&newValue;
2006 pNewValue[0] = X86_INSTR_JMP_REL32;
2008 *(INT32 *)(pNewValue+1) = rel32UsingJumpStub((INT32*)(&pThunk->callJmp[1]), pCode, pMD, NULL);
2010 _ASSERTE(IS_ALIGNED((size_t)pThunk, sizeof(INT64)));
2011 EnsureWritableExecutablePages(pThunk, sizeof(INT64));
2012 FastInterlockCompareExchangeLong((INT64*)pThunk, newValue, oldValue);
2014 FlushInstructionCache(GetCurrentProcess(), pThunk, 8);
2016 #elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
2017 // Patchup the thunk to point to the actual implementation of the cross module external method
2018 EnsureWritableExecutablePages(&pThunk->m_pTarget);
2019 pThunk->m_pTarget = pCode;
2021 #if defined(_TARGET_ARM_)
2022 // ThumbBit must be set on the target address
2023 _ASSERTE(pCode & THUMB_CODE);
2026 PORTABILITY_ASSERT("ExternalMethodFixupWorker");
2031 *EnsureWritableExecutablePages((TADDR *)pIndirection) = pCode;
2037 //==========================================================================================
2038 // In NGen images calls to external methods start out pointing to jump thunks.
2039 // These jump thunks initially point to the assembly code _ExternalMethodFixupStub
2040 // It transfers control to ExternalMethodFixupWorker which will patch the jump
2041 // thunk to point to the actual cross module address for the method body
2042 // Some methods also have one-time prestubs we defer the patching until
2043 // we have the final stable method entry point.
2045 EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBlock, TADDR pIndirection, DWORD sectionIndex, Module * pModule)
2047 STATIC_CONTRACT_THROWS;
2048 STATIC_CONTRACT_GC_TRIGGERS;
2049 STATIC_CONTRACT_MODE_COOPERATIVE;
2050 STATIC_CONTRACT_ENTRY_POINT;
2052 // We must save (and restore) the Last Error code before we call anything
2053 // that could overwrite it. Any callsite that leads to TlsGetValue will
2054 // potentially overwrite the Last Error code.
2057 // In Dev10 bug 837293 we were overwriting the Last Error code on the first
2058 // call to a PInvoke method. This occurred when we were running a
2059 // (precompiled) PInvoke IL stub implemented in the ngen image.
2061 // In this IL stub implementation we call the native method kernel32!GetFileAttributes,
2062 // and then we immediately try to save the Last Error code by calling the
2063 // mscorlib method System.StubHelpers.StubHelpers.SetLastError().
2065 // However when we are coming from a precompiled IL Stub in an ngen image
2066 // we must use an ExternalMethodFixup to find the target address of
2067 // System.StubHelpers.StubHelpers.SetLastError() and this was overwriting
2068 // the value of the Last Error before it could be retrieved and saved.
2073 BEGIN_PRESERVE_LAST_ERROR;
2075 MAKE_CURRENT_THREAD_AVAILABLE();
2078 Thread::ObjectRefFlush(CURRENT_THREAD);
2081 FrameWithCookie<ExternalMethodFrame> frame(pTransitionBlock);
2082 ExternalMethodFrame * pEMFrame = &frame;
2084 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
2085 // Decode indirection cell from callsite if it is not present
2086 if (pIndirection == NULL)
2088 // Asssume that the callsite is call [xxxxxxxx]
2089 PCODE retAddr = pEMFrame->GetReturnAddress();
2091 pIndirection = *(((TADDR *)retAddr) - 1);
2093 pIndirection = *(((INT32 *)retAddr) - 1) + retAddr;
2098 // FUTURE: Consider always passing in module and section index to avoid the lookups
2099 if (pModule == NULL)
2101 pModule = ExecutionManager::FindZapModule(pIndirection);
2102 sectionIndex = (DWORD)-1;
2104 _ASSERTE(pModule != NULL);
2106 pEMFrame->SetCallSite(pModule, pIndirection);
2108 pEMFrame->Push(CURRENT_THREAD); // Push the new ExternalMethodFrame onto the frame stack
2110 INSTALL_MANAGED_EXCEPTION_DISPATCHER;
2111 INSTALL_UNWIND_AND_CONTINUE_HANDLER;
2113 bool fVirtual = false;
2114 MethodDesc * pMD = NULL;
2115 MethodTable * pMT = NULL;
2119 GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
2121 PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage();
2123 RVA rva = pNativeImage->GetDataRva(pIndirection);
2125 PTR_CORCOMPILE_IMPORT_SECTION pImportSection;
2126 if (sectionIndex != (DWORD)-1)
2128 pImportSection = pModule->GetImportSectionFromIndex(sectionIndex);
2129 _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva));
2133 pImportSection = pModule->GetImportSectionForRVA(rva);
2135 _ASSERTE(pImportSection != NULL);
2138 if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
2140 _ASSERTE(pImportSection->EntrySize == sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK));
2141 index = (rva - pImportSection->Section.VirtualAddress) / sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK);
2145 _ASSERTE(pImportSection->EntrySize == sizeof(TADDR));
2146 index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR);
2149 PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures));
2151 PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]);
2153 BYTE kind = *pBlob++;
2155 Module * pInfoModule = pModule;
2156 if (kind & ENCODE_MODULE_OVERRIDE)
2158 DWORD moduleIndex = CorSigUncompressData(pBlob);
2159 pInfoModule = pModule->GetModuleFromIndex(moduleIndex);
2160 kind &= ~ENCODE_MODULE_OVERRIDE;
2166 case ENCODE_METHOD_ENTRY:
2168 pMD = ZapSig::DecodeMethod(pModule,
2172 if (pModule->IsReadyToRun())
2174 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2175 pMD->EnsureActive();
2181 case ENCODE_METHOD_ENTRY_DEF_TOKEN:
2183 mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef);
2184 pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE);
2186 pMD->PrepareForUseAsADependencyOfANativeImage();
2188 if (pModule->IsReadyToRun())
2190 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2191 pMD->EnsureActive();
2197 case ENCODE_METHOD_ENTRY_REF_TOKEN:
2199 SigTypeContext typeContext;
2200 mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef);
2201 FieldDesc * pFD = NULL;
2203 MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th);
2204 _ASSERTE(pMD != NULL);
2206 pMD->PrepareForUseAsADependencyOfANativeImage();
2208 if (pModule->IsReadyToRun())
2210 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2211 pMD->EnsureActive();
2215 #ifdef FEATURE_WINMD_RESILIENT
2216 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2217 pMD->EnsureActive();
2224 case ENCODE_VIRTUAL_ENTRY:
2226 pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th);
2229 pMD->PrepareForUseAsADependencyOfANativeImage();
2231 if (pMD->IsVtableMethod())
2233 slot = pMD->GetSlot();
2234 pMT = th.IsNull() ? pMD->GetMethodTable() : th.GetMethodTable();
2239 if (pModule->IsReadyToRun())
2241 // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2242 pMD->EnsureActive();
2247 case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2249 mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef);
2250 pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE);
2255 case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2257 mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef);
2259 FieldDesc * pFD = NULL;
2261 SigTypeContext typeContext;
2262 MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th, TRUE /* actual type required */);
2263 _ASSERTE(pMD != NULL);
2268 case ENCODE_VIRTUAL_ENTRY_SLOT:
2270 slot = CorSigUncompressData(pBlob);
2271 pMT = ZapSig::DecodeType(pModule, pInfoModule, pBlob).GetMethodTable();
2278 _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND");
2279 ThrowHR(COR_E_BADIMAGEFORMAT);
2284 GCX_COOP_THREAD_EXISTS(CURRENT_THREAD);
2286 // Get the stub manager for this module
2287 VirtualCallStubManager *pMgr = pModule->GetLoaderAllocator()->GetVirtualCallStubManager();
2289 OBJECTREF *protectedObj = pEMFrame->GetThisPtr();
2290 _ASSERTE(protectedObj != NULL);
2291 if (*protectedObj == NULL) {
2292 COMPlusThrow(kNullReferenceException);
2295 DispatchToken token;
2296 if (pMT->IsInterface() || MethodTable::VTableIndir_t::isRelative)
2298 token = pMT->GetLoaderAllocator()->GetDispatchToken(pMT->GetTypeID(), slot);
2299 StubCallSite callSite(pIndirection, pEMFrame->GetReturnAddress());
2300 pCode = pMgr->ResolveWorker(&callSite, protectedObj, token, VirtualCallStubManager::SK_LOOKUP);
2304 pCode = pMgr->GetVTableCallStub(slot);
2305 *EnsureWritableExecutablePages((TADDR *)pIndirection) = pCode;
2307 _ASSERTE(pCode != NULL);
2311 _ASSERTE(pMD != NULL);
2314 // Switch to cooperative mode to avoid racing with GC stackwalk
2315 GCX_COOP_THREAD_EXISTS(CURRENT_THREAD);
2316 pEMFrame->SetFunction(pMD);
2319 pCode = pMD->GetMethodEntryPoint();
2322 // Note that we do not want to call code:MethodDesc::IsPointingToPrestub() here. It does not take remoting interception
2323 // into account and so it would cause otherwise intercepted methods to be JITed. It is a compat issue if the JITing fails.
2325 if (!DoesSlotCallPrestub(pCode))
2327 pCode = PatchNonVirtualExternalMethod(pMD, pCode, pImportSection, pIndirection);
2331 #if defined (FEATURE_JIT_PITCHING)
2332 DeleteFromPitchingCandidate(pMD);
2336 // Force a GC on every jit if the stress level is high enough
2337 GCStress<cfg_any>::MaybeTrigger();
2341 UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
2342 UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
2344 pEMFrame->Pop(CURRENT_THREAD); // Pop the ExternalMethodFrame from the frame stack
2346 END_PRESERVE_LAST_ERROR;
2352 #if !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_)
2354 //==========================================================================================
2355 // In NGen image, virtual slots inherited from cross-module dependencies point to jump thunks.
2356 // These jump thunk initially point to VirtualMethodFixupStub which transfers control here.
2357 // This method 'VirtualMethodFixupWorker' will patch the jump thunk to point to the actual
2358 // inherited method body after we have execute the precode and a stable entry point.
2360 EXTERN_C PCODE VirtualMethodFixupWorker(Object * pThisPtr, CORCOMPILE_VIRTUAL_IMPORT_THUNK *pThunk)
2371 _ASSERTE(pThisPtr != NULL);
2372 VALIDATEOBJECT(pThisPtr);
2374 MethodTable * pMT = pThisPtr->GetMethodTable();
2376 WORD slotNumber = pThunk->slotNum;
2377 _ASSERTE(slotNumber != (WORD)-1);
2379 PCODE pCode = pMT->GetRestoredSlot(slotNumber);
2381 if (!DoesSlotCallPrestub(pCode))
2383 // Skip fixup precode jump for better perf
2384 PCODE pDirectTarget = Precode::TryToSkipFixupPrecode(pCode);
2385 if (pDirectTarget != NULL)
2386 pCode = pDirectTarget;
2388 // Patch the thunk to the actual method body
2389 if (EnsureWritableExecutablePagesNoThrow(&pThunk->m_pTarget, sizeof(pThunk->m_pTarget)))
2390 pThunk->m_pTarget = pCode;
2392 #if defined(_TARGET_ARM_)
2393 // The target address should have the thumb bit set
2394 _ASSERTE(pCode & THUMB_CODE);
2398 #endif // !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_)
2400 #ifdef FEATURE_READYTORUN
2402 static PCODE getHelperForInitializedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD)
2404 STANDARD_VM_CONTRACT;
2406 PCODE pHelper = NULL;
2410 case ENCODE_STATIC_BASE_NONGC_HELPER:
2415 baseNonGC = pMT->GetNonGCStaticsBasePointer();
2417 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseNonGC);
2420 case ENCODE_STATIC_BASE_GC_HELPER:
2425 baseGC = pMT->GetGCStaticsBasePointer();
2427 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseGC);
2430 case ENCODE_CCTOR_TRIGGER:
2431 pHelper = DynamicHelpers::CreateReturn(pModule->GetLoaderAllocator());
2433 case ENCODE_FIELD_ADDRESS:
2435 _ASSERTE(pFD->IsStatic());
2443 if (!pFD->IsRVA()) // for RVA the base is ignored
2444 base = pFD->GetBase();
2445 pAddress = pFD->GetStaticAddressHandle((void *)dac_cast<TADDR>(base));
2448 // The following code assumes that the statics are pinned that is not the case for collectible types
2449 _ASSERTE(!pFD->GetEnclosingMethodTable()->Collectible());
2451 // Unbox valuetype fields
2452 if (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE && !pFD->IsRVA())
2453 pHelper = DynamicHelpers::CreateReturnIndirConst(pModule->GetLoaderAllocator(), (TADDR)pAddress, (INT8)Object::GetOffsetOfFirstField());
2455 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)pAddress);
2459 _ASSERTE(!"Unexpected statics CORCOMPILE_FIXUP_BLOB_KIND");
2460 ThrowHR(COR_E_BADIMAGEFORMAT);
2466 static PCODE getHelperForSharedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD)
2468 STANDARD_VM_CONTRACT;
2470 _ASSERTE(kind == ENCODE_FIELD_ADDRESS);
2472 CorInfoHelpFunc helpFunc = CEEInfo::getSharedStaticsHelper(pFD, pMT);
2474 TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID();
2477 if (helpFunc != CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR && helpFunc != CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR)
2479 if (pMT->IsDynamicStatics())
2481 classID = pMT->GetModuleDynamicEntryID();
2485 classID = pMT->GetClassIndex();
2489 bool fUnbox = (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE);
2491 AllocMemTracker amTracker;
2493 StaticFieldAddressArgs * pArgs = (StaticFieldAddressArgs *)amTracker.Track(
2494 pModule->GetLoaderAllocator()->GetHighFrequencyHeap()->
2495 AllocMem(S_SIZE_T(sizeof(StaticFieldAddressArgs))));
2497 pArgs->staticBaseHelper = (FnStaticBaseHelper)CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc);
2498 pArgs->arg0 = moduleID;
2499 pArgs->arg1 = classID;
2500 pArgs->offset = pFD->GetOffset();
2502 PCODE pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), (TADDR)pArgs,
2503 fUnbox ? GetEEFuncEntryPoint(JIT_StaticFieldAddressUnbox_Dynamic) : GetEEFuncEntryPoint(JIT_StaticFieldAddress_Dynamic));
2505 amTracker.SuppressRelease();
2510 static PCODE getHelperForStaticBase(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT)
2512 STANDARD_VM_CONTRACT;
2514 int helpFunc = CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE;
2516 if (kind == ENCODE_STATIC_BASE_GC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER)
2518 helpFunc = CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2521 if (pMT->IsDynamicStatics())
2523 const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2527 if (!pMT->HasClassConstructor() && !pMT->HasBoxedRegularStatics())
2529 const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2533 if (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER)
2535 const int delta = CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2540 if (helpFunc == CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR || helpFunc == CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR)
2542 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), pMT->GetModule()->GetModuleID(), CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc));
2546 TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID();
2549 if (pMT->IsDynamicStatics())
2551 classID = pMT->GetModuleDynamicEntryID();
2555 classID = pMT->GetClassIndex();
2558 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), moduleID, classID, CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc));
2564 TADDR GetFirstArgumentRegisterValuePtr(TransitionBlock * pTransitionBlock)
2566 TADDR pArgument = (TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters();
2568 // x86 is special as always
2569 pArgument += offsetof(ArgumentRegisters, ECX);
2575 void ProcessDynamicDictionaryLookup(TransitionBlock * pTransitionBlock,
2577 Module * pInfoModule,
2579 PCCOR_SIGNATURE pBlob,
2580 PCCOR_SIGNATURE pBlobStart,
2581 CORINFO_RUNTIME_LOOKUP * pResult,
2582 DWORD * pDictionaryIndexAndSlot)
2584 STANDARD_VM_CONTRACT;
2586 TADDR genericContextPtr = *(TADDR*)GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2588 pResult->testForFixup = pResult->testForNull = false;
2589 pResult->signature = NULL;
2591 pResult->indirectFirstOffset = 0;
2592 pResult->indirectSecondOffset = 0;
2594 pResult->indirections = CORINFO_USEHELPER;
2596 DWORD numGenericArgs = 0;
2597 MethodTable* pContextMT = NULL;
2598 MethodDesc* pContextMD = NULL;
2600 if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)
2602 pContextMD = (MethodDesc*)genericContextPtr;
2603 numGenericArgs = pContextMD->GetNumGenericMethodArgs();
2604 pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_METHOD;
2608 pContextMT = (MethodTable*)genericContextPtr;
2610 if (kind == ENCODE_DICTIONARY_LOOKUP_THISOBJ)
2612 TypeHandle contextTypeHandle = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2614 SigPointer p(pBlob);
2618 pContextMT = pContextMT->GetMethodTableMatchingParentClass(contextTypeHandle.AsMethodTable());
2621 numGenericArgs = pContextMT->GetNumGenericArgs();
2622 pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_CLASS;
2625 _ASSERTE(numGenericArgs > 0);
2627 CORCOMPILE_FIXUP_BLOB_KIND signatureKind = (CORCOMPILE_FIXUP_BLOB_KIND)CorSigUncompressData(pBlob);
2630 // Optimization cases
2632 if (signatureKind == ENCODE_TYPE_HANDLE)
2634 SigPointer sigptr(pBlob, -1);
2636 CorElementType type;
2637 IfFailThrow(sigptr.GetElemType(&type));
2639 if ((type == ELEMENT_TYPE_MVAR) && (kind == ENCODE_DICTIONARY_LOOKUP_METHOD))
2641 pResult->indirections = 2;
2642 pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo);
2644 if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative)
2646 pResult->indirectFirstOffset = 1;
2650 IfFailThrow(sigptr.GetData(&data));
2651 pResult->offsets[1] = sizeof(TypeHandle) * data;
2655 else if ((type == ELEMENT_TYPE_VAR) && (kind != ENCODE_DICTIONARY_LOOKUP_METHOD))
2657 pResult->indirections = 3;
2658 pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo();
2659 pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1);
2662 IfFailThrow(sigptr.GetData(&data));
2663 pResult->offsets[2] = sizeof(TypeHandle) * data;
2665 if (MethodTable::IsPerInstInfoRelative())
2667 pResult->indirectFirstOffset = 1;
2668 pResult->indirectSecondOffset = 1;
2675 if (pContextMT != NULL && pContextMT->GetNumDicts() > 0xFFFF)
2676 ThrowHR(COR_E_BADIMAGEFORMAT);
2678 // Dictionary index and slot number are encoded in a 32-bit DWORD. The higher 16 bits
2679 // are used for the dictionary index, and the lower 16 bits for the slot number.
2680 *pDictionaryIndexAndSlot = (pContextMT == NULL ? 0 : pContextMT->GetNumDicts() - 1);
2681 *pDictionaryIndexAndSlot <<= 16;
2683 WORD dictionarySlot;
2685 if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)
2687 if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMD->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 1, FromReadyToRunImage, &dictionarySlot))
2689 pResult->testForNull = 1;
2691 // Indirect through dictionary table pointer in InstantiatedMethodDesc
2692 pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo);
2694 if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative)
2696 pResult->indirectFirstOffset = 1;
2699 *pDictionaryIndexAndSlot |= dictionarySlot;
2703 // It's a class dictionary lookup (CORINFO_LOOKUP_CLASSPARAM or CORINFO_LOOKUP_THISOBJ)
2706 if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMT->GetClass()->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 2, FromReadyToRunImage, &dictionarySlot))
2708 pResult->testForNull = 1;
2710 // Indirect through dictionary table pointer in vtable
2711 pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo();
2713 // Next indirect through the dictionary appropriate to this instantiated type
2714 pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1);
2716 if (MethodTable::IsPerInstInfoRelative())
2718 pResult->indirectFirstOffset = 1;
2719 pResult->indirectSecondOffset = 1;
2722 *pDictionaryIndexAndSlot |= dictionarySlot;
2727 PCODE DynamicHelperFixup(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND * pKind, TypeHandle * pTH, MethodDesc ** ppMD, FieldDesc ** ppFD)
2729 STANDARD_VM_CONTRACT;
2731 PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage();
2733 RVA rva = pNativeImage->GetDataRva((TADDR)pCell);
2735 PTR_CORCOMPILE_IMPORT_SECTION pImportSection = pModule->GetImportSectionFromIndex(sectionIndex);
2736 _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva));
2738 _ASSERTE(pImportSection->EntrySize == sizeof(TADDR));
2740 COUNT_T index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR);
2742 PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures));
2744 PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]);
2745 PCCOR_SIGNATURE pBlobStart = pBlob;
2747 BYTE kind = *pBlob++;
2749 Module * pInfoModule = pModule;
2750 if (kind & ENCODE_MODULE_OVERRIDE)
2752 DWORD moduleIndex = CorSigUncompressData(pBlob);
2753 pInfoModule = pModule->GetModuleFromIndex(moduleIndex);
2754 kind &= ~ENCODE_MODULE_OVERRIDE;
2757 bool fReliable = false;
2759 MethodDesc * pMD = NULL;
2760 FieldDesc * pFD = NULL;
2761 CORINFO_RUNTIME_LOOKUP genericLookup;
2762 DWORD dictionaryIndexAndSlot = -1;
2766 case ENCODE_NEW_HELPER:
2767 th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2768 th.AsMethodTable()->EnsureInstanceActive();
2770 case ENCODE_ISINSTANCEOF_HELPER:
2771 case ENCODE_CHKCAST_HELPER:
2773 case ENCODE_NEW_ARRAY_HELPER:
2774 th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2777 case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2778 case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2779 case ENCODE_STATIC_BASE_NONGC_HELPER:
2780 case ENCODE_STATIC_BASE_GC_HELPER:
2781 case ENCODE_CCTOR_TRIGGER:
2782 th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2784 th.AsMethodTable()->EnsureInstanceActive();
2785 th.AsMethodTable()->CheckRunClassInitThrowing();
2789 case ENCODE_FIELD_ADDRESS:
2790 pFD = ZapSig::DecodeField(pModule, pInfoModule, pBlob, &th);
2791 _ASSERTE(pFD->IsStatic());
2794 case ENCODE_VIRTUAL_ENTRY:
2795 // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2796 // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2797 // case ENCODE_VIRTUAL_ENTRY_SLOT:
2799 case ENCODE_DELEGATE_CTOR:
2801 pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th);
2802 if (pMD->RequiresInstArg())
2804 pMD = MethodDesc::FindOrCreateAssociatedMethodDesc(pMD,
2806 FALSE /* forceBoxedEntryPoint */,
2807 pMD->GetMethodInstantiation(),
2808 FALSE /* allowInstParam */);
2810 pMD->EnsureActive();
2814 case ENCODE_DICTIONARY_LOOKUP_THISOBJ:
2815 case ENCODE_DICTIONARY_LOOKUP_TYPE:
2816 case ENCODE_DICTIONARY_LOOKUP_METHOD:
2817 ProcessDynamicDictionaryLookup(pTransitionBlock, pModule, pInfoModule, kind, pBlob, pBlobStart, &genericLookup, &dictionaryIndexAndSlot);
2821 _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND");
2822 ThrowHR(COR_E_BADIMAGEFORMAT);
2825 PCODE pHelper = NULL;
2829 // For reliable helpers, exceptions in creating the optimized helper are non-fatal. Swallow them to make CER work well.
2834 case ENCODE_ISINSTANCEOF_HELPER:
2835 case ENCODE_CHKCAST_HELPER:
2837 bool fClassMustBeRestored;
2838 CorInfoHelpFunc helpFunc = CEEInfo::getCastingHelperStatic(th, /* throwing */ (kind == ENCODE_CHKCAST_HELPER), &fClassMustBeRestored);
2839 pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc));
2842 case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2843 case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2844 case ENCODE_STATIC_BASE_NONGC_HELPER:
2845 case ENCODE_STATIC_BASE_GC_HELPER:
2846 case ENCODE_CCTOR_TRIGGER:
2847 case ENCODE_FIELD_ADDRESS:
2849 MethodTable * pMT = th.AsMethodTable();
2851 bool fNeedsNonTrivialHelper = false;
2853 if (pMT->IsDomainNeutral() && !IsSingleAppDomain())
2855 fNeedsNonTrivialHelper = true;
2858 if (pMT->Collectible() && (kind != ENCODE_CCTOR_TRIGGER))
2860 // Collectible statics are not pinned - the fast getters expect statics to be pinned
2861 fNeedsNonTrivialHelper = true;
2867 fNeedsNonTrivialHelper = !!pFD->IsSpecialStatic();
2871 fNeedsNonTrivialHelper = (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER) || (kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER);
2875 if (fNeedsNonTrivialHelper)
2881 _ASSERTE(!"Fast getter for rare kinds of static fields");
2885 pHelper = getHelperForSharedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD);
2890 pHelper = getHelperForStaticBase(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT);
2895 // Delay the creation of the helper until the type is initialized
2896 if (pMT->IsClassInited())
2897 pHelper = getHelperForInitializedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD);
2902 case ENCODE_VIRTUAL_ENTRY:
2903 // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2904 // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2905 // case ENCODE_VIRTUAL_ENTRY_SLOT:
2907 if (!pMD->IsVtableMethod())
2909 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode());
2913 AllocMemTracker amTracker;
2915 VirtualFunctionPointerArgs * pArgs = (VirtualFunctionPointerArgs *)amTracker.Track(
2916 pModule->GetLoaderAllocator()->GetHighFrequencyHeap()->
2917 AllocMem(S_SIZE_T(sizeof(VirtualFunctionPointerArgs))));
2919 pArgs->classHnd = (CORINFO_CLASS_HANDLE)th.AsPtr();
2920 pArgs->methodHnd = (CORINFO_METHOD_HANDLE)pMD;
2922 pHelper = DynamicHelpers::CreateHelperWithArg(pModule->GetLoaderAllocator(), (TADDR)pArgs,
2923 GetEEFuncEntryPoint(JIT_VirtualFunctionPointer_Dynamic));
2925 amTracker.SuppressRelease();
2934 if (pHelper != NULL)
2936 *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper;
2940 // Always execute the reliable fallback in debug builds
2947 EX_END_CATCH (SwallowAllExceptions);
2953 case ENCODE_NEW_HELPER:
2955 CorInfoHelpFunc helpFunc = CEEInfo::getNewHelperStatic(th.AsMethodTable());
2956 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc));
2959 case ENCODE_NEW_ARRAY_HELPER:
2961 CorInfoHelpFunc helpFunc = CEEInfo::getNewArrHelperStatic(th);
2962 ArrayTypeDesc *pArrayTypeDesc = th.AsArray();
2963 MethodTable *pArrayMT = pArrayTypeDesc->GetTemplateMethodTable();
2964 pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), dac_cast<TADDR>(pArrayMT), CEEJitInfo::getHelperFtnStatic(helpFunc));
2968 case ENCODE_DELEGATE_CTOR:
2970 MethodTable * pDelegateType = NULL;
2975 TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2977 if (pArgument != NULL)
2979 pDelegateType = (*(Object **)pArgument)->GetMethodTable();
2980 _ASSERTE(pDelegateType->IsDelegate());
2984 DelegateCtorArgs ctorData;
2985 ctorData.pMethod = NULL;
2986 ctorData.pArg3 = NULL;
2987 ctorData.pArg4 = NULL;
2988 ctorData.pArg5 = NULL;
2990 MethodDesc * pDelegateCtor = NULL;
2992 if (pDelegateType != NULL)
2994 pDelegateCtor = COMDelegate::GetDelegateCtor(TypeHandle(pDelegateType), pMD, &ctorData);
2996 if (ctorData.pArg4 != NULL || ctorData.pArg5 != NULL)
2998 // This should never happen - we should never get collectible or secure delegates here
3000 pDelegateCtor = NULL;
3004 TADDR target = NULL;
3006 if (pDelegateCtor != NULL)
3008 target = pDelegateCtor->GetMultiCallableAddrOfCode();
3012 target = ECall::GetFCallImpl(MscorlibBinder::GetMethod(METHOD__DELEGATE__CONSTRUCT_DELEGATE));
3013 ctorData.pArg3 = NULL;
3016 if (ctorData.pArg3 != NULL)
3018 pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), (TADDR)ctorData.pArg3, target);
3022 pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), target);
3027 case ENCODE_DICTIONARY_LOOKUP_THISOBJ:
3028 case ENCODE_DICTIONARY_LOOKUP_TYPE:
3029 case ENCODE_DICTIONARY_LOOKUP_METHOD:
3031 pHelper = DynamicHelpers::CreateDictionaryLookupHelper(pModule->GetLoaderAllocator(), &genericLookup, dictionaryIndexAndSlot, pModule);
3039 if (pHelper != NULL)
3041 *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper;
3045 *pKind = (CORCOMPILE_FIXUP_BLOB_KIND)kind;
3053 extern "C" SIZE_T STDCALL DynamicHelperWorker(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, INT frameFlags)
3055 PCODE pHelper = NULL;
3056 SIZE_T result = NULL;
3058 STATIC_CONTRACT_THROWS;
3059 STATIC_CONTRACT_GC_TRIGGERS;
3060 STATIC_CONTRACT_MODE_COOPERATIVE;
3062 MAKE_CURRENT_THREAD_AVAILABLE();
3065 Thread::ObjectRefFlush(CURRENT_THREAD);
3068 FrameWithCookie<DynamicHelperFrame> frame(pTransitionBlock, frameFlags);
3069 DynamicHelperFrame * pFrame = &frame;
3071 pFrame->Push(CURRENT_THREAD);
3073 INSTALL_MANAGED_EXCEPTION_DISPATCHER;
3074 INSTALL_UNWIND_AND_CONTINUE_HANDLER;
3076 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
3077 // Decode indirection cell from callsite if it is not present
3080 // Asssume that the callsite is call [xxxxxxxx]
3081 PCODE retAddr = pFrame->GetReturnAddress();
3083 pCell = *(((TADDR **)retAddr) - 1);
3085 pCell = (TADDR *)(*(((INT32 *)retAddr) - 1) + retAddr);
3089 _ASSERTE(pCell != NULL);
3092 MethodDesc * pMD = NULL;
3093 FieldDesc * pFD = NULL;
3094 CORCOMPILE_FIXUP_BLOB_KIND kind = ENCODE_NONE;
3097 GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
3099 pHelper = DynamicHelperFixup(pTransitionBlock, pCell, sectionIndex, pModule, &kind, &th, &pMD, &pFD);
3102 if (pHelper == NULL)
3104 TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock);
3108 case ENCODE_ISINSTANCEOF_HELPER:
3109 case ENCODE_CHKCAST_HELPER:
3111 BOOL throwInvalidCast = (kind == ENCODE_CHKCAST_HELPER);
3112 if (*(Object **)pArgument == NULL || ObjIsInstanceOf(*(Object **)pArgument, th, throwInvalidCast))
3114 result = (SIZE_T)(*(Object **)pArgument);
3118 _ASSERTE (!throwInvalidCast);
3123 case ENCODE_STATIC_BASE_NONGC_HELPER:
3124 result = (SIZE_T)th.AsMethodTable()->GetNonGCStaticsBasePointer();
3126 case ENCODE_STATIC_BASE_GC_HELPER:
3127 result = (SIZE_T)th.AsMethodTable()->GetGCStaticsBasePointer();
3129 case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
3130 ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable());
3131 result = (SIZE_T)th.AsMethodTable()->GetNonGCThreadStaticsBasePointer();
3133 case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
3134 ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable());
3135 result = (SIZE_T)th.AsMethodTable()->GetGCThreadStaticsBasePointer();
3137 case ENCODE_CCTOR_TRIGGER:
3139 case ENCODE_FIELD_ADDRESS:
3140 result = (SIZE_T)pFD->GetCurrentStaticAddress();
3142 case ENCODE_VIRTUAL_ENTRY:
3143 // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
3144 // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
3145 // case ENCODE_VIRTUAL_ENTRY_SLOT:
3147 OBJECTREF objRef = ObjectToOBJECTREF(*(Object **)pArgument);
3149 GCPROTECT_BEGIN(objRef);
3152 COMPlusThrow(kNullReferenceException);
3154 // Duplicated logic from JIT_VirtualFunctionPointer_Framed
3155 if (!pMD->IsVtableMethod())
3157 result = pMD->GetMultiCallableAddrOfCode();
3161 result = pMD->GetMultiCallableAddrOfVirtualizedCode(&objRef, th);
3172 UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
3173 UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
3175 pFrame->Pop(CURRENT_THREAD);
3177 if (pHelper == NULL)
3178 *(SIZE_T *)((TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters()) = result;
3182 #endif // FEATURE_READYTORUN
3184 #endif // !DACCESS_COMPILE