0c01efd64eaa8c006bdb99a740a6ae347e2ec41f
[platform/upstream/coreclr.git] / src / vm / prestub.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4 // ===========================================================================
5 // File: Prestub.cpp
6 //
7
8 // ===========================================================================
9 // This file contains the implementation for creating and using prestubs
10 // ===========================================================================
11 //
12
13  
14 #include "common.h"
15 #include "vars.hpp"
16 #include "security.h"
17 #include "eeconfig.h"
18 #include "dllimport.h"
19 #include "comdelegate.h"
20 #include "dbginterface.h"
21 #include "listlock.inl"
22 #include "stubgen.h"
23 #include "eventtrace.h"
24 #include "array.h"
25 #include "compile.h"
26 #include "ecall.h"
27 #include "virtualcallstub.h"
28
29 #ifdef FEATURE_PREJIT
30 #include "compile.h"
31 #endif
32
33 #ifdef FEATURE_INTERPRETER
34 #include "interpreter.h"
35 #endif
36
37 #ifdef FEATURE_COMINTEROP 
38 #include "clrtocomcall.h"
39 #endif
40
41 #include "mdaassistants.h"
42
43 #ifdef FEATURE_STACK_SAMPLING
44 #include "stacksampler.h"
45 #endif
46
47 #ifdef FEATURE_PERFMAP
48 #include "perfmap.h"
49 #endif
50
51 #ifdef FEATURE_TIERED_COMPILATION
52 #include "callcounter.h"
53 #endif
54
55 #ifndef DACCESS_COMPILE
56
57 #if defined(FEATURE_JIT_PITCHING)
58 EXTERN_C void CheckStacksAndPitch();
59 EXTERN_C void SavePitchingCandidate(MethodDesc* pMD, ULONG sizeOfCode);
60 EXTERN_C void DeleteFromPitchingCandidate(MethodDesc* pMD);
61 EXTERN_C void MarkMethodNotPitchingCandidate(MethodDesc* pMD);
62 #endif
63
64 EXTERN_C void STDCALL ThePreStub();
65
66 #if defined(HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
67
68 EXTERN_C void STDCALL ThePreStubCompactARM();
69
70 #endif // defined(HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
71
72 EXTERN_C void STDCALL ThePreStubPatch();
73
74 //==========================================================================
75
76 PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BOOL fFullBackPatch)
77 {
78     CONTRACTL
79     {
80         STANDARD_VM_CHECK;
81         PRECONDITION(!ContainsGenericVariables());
82 #ifndef FEATURE_INTERPRETER
83         PRECONDITION(HasStableEntryPoint());
84 #endif // FEATURE_INTERPRETER
85         PRECONDITION(pMT == GetMethodTable());
86     }
87     CONTRACTL_END;
88 #ifdef FEATURE_INTERPRETER
89     PCODE pTarget = GetMethodEntryPoint();
90 #else
91     PCODE pTarget = GetStableEntryPoint();
92 #endif
93
94     if (!HasTemporaryEntryPoint())
95         return pTarget;
96
97     PCODE pExpected = GetTemporaryEntryPoint();
98
99     if (pExpected == pTarget)
100         return pTarget;
101
102     // True interface methods are never backpatched
103     if (pMT->IsInterface() && !IsStatic())
104         return pTarget;
105
106     if (fFullBackPatch)
107     {
108         FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
109         if (pFuncPtrStubs != NULL)
110         {
111             Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
112             if (pFuncPtrPrecode != NULL)
113             {
114                 // If there is a funcptr precode to patch, we are done for this round.
115                 if (pFuncPtrPrecode->SetTargetInterlocked(pTarget))
116                     return pTarget;
117             }
118         }
119
120 #ifndef HAS_COMPACT_ENTRYPOINTS
121         // Patch the fake entrypoint if necessary
122         Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget);
123 #endif // HAS_COMPACT_ENTRYPOINTS
124     }
125
126     if (HasNonVtableSlot())
127         return pTarget;
128
129     BOOL fBackpatched = FALSE;
130
131 #define BACKPATCH(pPatchedMT)                           \
132     do                                                  \
133     {                                                   \
134         if (pPatchedMT->GetSlot(dwSlot) == pExpected)   \
135         {                                               \
136             pPatchedMT->SetSlot(dwSlot, pTarget);       \
137             fBackpatched = TRUE;                        \
138         }                                               \
139     }                                                   \
140     while(0)
141
142     // The owning slot has been updated already, so there is no need to backpatch it
143     _ASSERTE(pMT->GetSlot(GetSlot()) == pTarget);
144
145     if (pDispatchingMT != NULL && pDispatchingMT != pMT)
146     {
147         DWORD dwSlot = GetSlot();
148
149         BACKPATCH(pDispatchingMT);
150
151         if (fFullBackPatch)
152         {
153             //
154             // Backpatch the MethodTable that code:MethodTable::GetRestoredSlot() reads the value from. 
155             // VSD reads the slot value using code:MethodTable::GetRestoredSlot(), and so we need to make sure 
156             // that it returns the stable entrypoint eventually to avoid going through the slow path all the time.
157             //
158             MethodTable * pRestoredSlotMT = pDispatchingMT->GetRestoredSlotMT(dwSlot);
159
160             BACKPATCH(pRestoredSlotMT);
161         }
162     }
163
164     if (IsMethodImpl())
165     {
166         MethodImpl::Iterator it(this);
167         while (it.IsValid())
168         {
169             DWORD dwSlot = it.GetSlot();
170
171             BACKPATCH(pMT);
172
173             if (pDispatchingMT != NULL)
174             {
175                 BACKPATCH(pDispatchingMT);
176             }
177
178             it.Next();
179         }
180     }
181
182     if (fFullBackPatch && !fBackpatched && IsDuplicate())
183     {
184         // If this is a duplicate, let's scan the rest of the VTable hunting for other hits.
185         unsigned numSlots = pMT->GetNumVirtuals();
186         for (DWORD dwSlot=0; dwSlot<numSlots; dwSlot++)
187         {
188             BACKPATCH(pMT);
189
190             if (pDispatchingMT != NULL)
191             {
192                 BACKPATCH(pDispatchingMT);
193             }
194         }
195     }
196
197 #undef BACKPATCH
198
199     return pTarget;
200 }
201
202 // <TODO> FIX IN BETA 2
203 //
204 // g_pNotificationTable is only modified by the DAC and therefore the
205 // optmizer can assume that it will always be its default value and has
206 // been seen to (on IA64 free builds) eliminate the code in DACNotifyCompilationFinished
207 // such that DAC notifications are no longer sent.
208 //
209 // TODO: fix this in Beta 2
210 // the RIGHT fix is to make g_pNotificationTable volatile, but currently
211 // we don't have DAC macros to do that. Additionally, there are a number
212 // of other places we should look at DAC definitions to determine if they
213 // should be also declared volatile.
214 //
215 // for now we just turn off optimization for these guys
216 #ifdef _MSC_VER 
217 #pragma optimize("", off)
218 #endif
219
220 void DACNotifyCompilationFinished(MethodDesc *methodDesc)
221 {
222     CONTRACTL
223     {
224         NOTHROW;
225         GC_NOTRIGGER;
226         SO_INTOLERANT;
227         MODE_PREEMPTIVE;
228     }
229     CONTRACTL_END;
230
231     // Is the list active?
232     JITNotifications jn(g_pNotificationTable);
233     if (jn.IsActive())
234     {
235         // Get Module and mdToken
236         mdToken t = methodDesc->GetMemberDef();
237         Module *modulePtr = methodDesc->GetModule();
238
239         _ASSERTE(modulePtr);
240
241 #ifndef FEATURE_GDBJIT
242         // Are we listed?
243         USHORT jnt = jn.Requested((TADDR) modulePtr, t);
244         if (jnt & CLRDATA_METHNOTIFY_GENERATED)
245         {
246             // If so, throw an exception!
247 #endif
248             DACNotify::DoJITNotification(methodDesc);
249 #ifndef FEATURE_GDBJIT
250         }
251 #endif
252     }
253 }
254
255 #ifdef _MSC_VER 
256 #pragma optimize("", on)
257 #endif
258 // </TODO>
259
260
261 // ********************************************************************
262 //                  README!!
263 // ********************************************************************
264
265 // MakeJitWorker is the thread safe way to invoke the JIT compiler
266 // If multiple threads get in here for the same pMD, ALL of them
267 // MUST return the SAME value for pstub.
268 //
269 // This function creates a DeadlockAware list of methods being jitted
270 // which prevents us from trying to JIT the same method more that once.
271
272 PCODE MethodDesc::MakeJitWorker(COR_ILMETHOD_DECODER* ILHeader, CORJIT_FLAGS flags)
273 {
274     STANDARD_VM_CONTRACT;
275
276     BOOL fIsILStub = IsILStub();        // @TODO: understand the need for this special case
277
278     LOG((LF_JIT, LL_INFO1000000,
279          "MakeJitWorker(" FMT_ADDR ", %s) for %s:%s\n",
280          DBG_ADDR(this),
281          fIsILStub               ? " TRUE" : "FALSE",
282          GetMethodTable()->GetDebugClassName(),
283          m_pszDebugMethodName));
284
285 #if defined(FEATURE_JIT_PITCHING)
286     CheckStacksAndPitch();
287 #endif
288
289     PCODE pCode = NULL;
290     ULONG sizeOfCode = 0;
291 #if defined(FEATURE_INTERPRETER) || defined(FEATURE_TIERED_COMPILATION)
292     BOOL fStable = TRUE;  // True iff the new code address (to be stored in pCode), is a stable entry point.
293 #endif
294 #ifdef FEATURE_INTERPRETER
295     PCODE pPreviousInterpStub = NULL;
296     BOOL fInterpreted = FALSE;
297 #endif
298
299 #ifdef FEATURE_MULTICOREJIT
300     MulticoreJitManager & mcJitManager = GetAppDomain()->GetMulticoreJitManager();
301
302     bool fBackgroundThread = flags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_MCJIT_BACKGROUND);
303 #endif
304
305     // If this is the first stage of a tiered compilation progression, use tier0, otherwise
306     // use default compilation options
307 #ifdef FEATURE_TIERED_COMPILATION
308     if (!IsEligibleForTieredCompilation())
309     {
310         fStable = TRUE;
311     }
312     else
313     {
314         fStable = FALSE;
315         flags.Add(CORJIT_FLAGS(CORJIT_FLAGS::CORJIT_FLAG_TIER0));
316     }
317 #endif
318
319     {
320         // Enter the global lock which protects the list of all functions being JITd
321         ListLockHolder pJitLock (GetDomain()->GetJitLock());
322
323         // It is possible that another thread stepped in before we entered the global lock for the first time.
324         pCode = GetNativeCode();
325         if (pCode != NULL)
326         {
327 #ifdef FEATURE_INTERPRETER
328             if (Interpreter::InterpretationStubToMethodInfo(pCode) == this)
329             {
330                 pPreviousInterpStub = pCode;
331             }
332             else
333 #endif // FEATURE_INTERPRETER
334             goto Done;
335         }
336
337         const char *description = "jit lock";
338         INDEBUG(description = m_pszDebugMethodName;)
339         ListLockEntryHolder pEntry(ListLockEntry::Find(pJitLock, this, description));
340
341         // We have an entry now, we can release the global lock
342         pJitLock.Release();
343
344         // Take the entry lock
345         {
346             ListLockEntryLockHolder pEntryLock(pEntry, FALSE);
347
348             if (pEntryLock.DeadlockAwareAcquire())
349             {
350                 if (pEntry->m_hrResultCode == S_FALSE)
351                 {
352                     // Nobody has jitted the method yet
353                 }
354                 else
355                 {
356                     // We came in to jit but someone beat us so return the
357                     // jitted method!
358
359                     // We can just fall through because we will notice below that
360                     // the method has code.
361
362                     // @todo: Note that we may have a failed HRESULT here -
363                     // we might want to return an early error rather than
364                     // repeatedly failing the jit.
365                 }
366             }
367             else
368             {
369                 // Taking this lock would cause a deadlock (presumably because we
370                 // are involved in a class constructor circular dependency.)  For
371                 // instance, another thread may be waiting to run the class constructor
372                 // that we are jitting, but is currently jitting this function.
373                 //
374                 // To remedy this, we want to go ahead and do the jitting anyway.
375                 // The other threads contending for the lock will then notice that
376                 // the jit finished while they were running class constructors, and abort their
377                 // current jit effort.
378                 //
379                 // We don't have to do anything special right here since we
380                 // can check HasNativeCode() to detect this case later.
381                 //
382                 // Note that at this point we don't have the lock, but that's OK because the
383                 // thread which does have the lock is blocked waiting for us.
384             }
385
386             // It is possible that another thread stepped in before we entered the lock.
387             pCode = GetNativeCode();
388 #ifdef FEATURE_INTERPRETER
389             if (pCode != NULL && (pCode != pPreviousInterpStub))
390 #else
391             if (pCode != NULL)
392 #endif // FEATURE_INTERPRETER
393             {
394                 goto Done;
395             }
396
397             SString namespaceOrClassName, methodName, methodSignature;
398
399             PCODE pOtherCode = NULL; // Need to move here due to 'goto GotNewCode'
400             
401 #ifdef FEATURE_MULTICOREJIT
402
403             bool fCompiledInBackground = false;
404
405             // If not called from multi-core JIT thread, 
406             if (! fBackgroundThread)
407             {
408                 // Quick check before calling expensive out of line function on this method's domain has code JITted by background thread
409                 if (mcJitManager.GetMulticoreJitCodeStorage().GetRemainingMethodCount() > 0)
410                 {
411                     if (MulticoreJitManager::IsMethodSupported(this))
412                     {
413                         pCode = mcJitManager.RequestMethodCode(this); // Query multi-core JIT manager for compiled code
414
415                         // Multicore JIT manager starts background thread to pre-compile methods, but it does not back-patch it/notify profiler/notify DAC,
416                         // Jumtp to GotNewCode to do so
417                         if (pCode != NULL)
418                         {
419                             fCompiledInBackground = true;
420                     
421 #ifdef DEBUGGING_SUPPORTED
422                             // Notify the debugger of the jitted function
423                             if (g_pDebugInterface != NULL)
424                             {
425                                 g_pDebugInterface->JITComplete(this, pCode);
426                             }
427 #endif
428
429                             goto GotNewCode;
430                         }
431                     }
432                 }
433             }
434 #endif
435
436             if (fIsILStub)
437             {
438                 // we race with other threads to JIT the code for an IL stub and the
439                 // IL header is released once one of the threads completes.  As a result
440                 // we must be inside the lock to reliably get the IL header for the
441                 // stub.
442
443                 ILStubResolver* pResolver = AsDynamicMethodDesc()->GetILStubResolver();
444                 ILHeader = pResolver->GetILHeader();
445             }
446
447 #ifdef MDA_SUPPORTED 
448             MdaJitCompilationStart* pProbe = MDA_GET_ASSISTANT(JitCompilationStart);
449             if (pProbe)
450                 pProbe->NowCompiling(this);
451 #endif // MDA_SUPPORTED
452
453 #ifdef PROFILING_SUPPORTED 
454             // If profiling, need to give a chance for a tool to examine and modify
455             // the IL before it gets to the JIT.  This allows one to add probe calls for
456             // things like code coverage, performance, or whatever.
457             {
458                 BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo());
459
460 #ifdef FEATURE_MULTICOREJIT
461                 // Multicore JIT should be disabled when CORProfilerTrackJITInfo is on
462                 // But there could be corner case in which profiler is attached when multicore background thread is calling MakeJitWorker
463                 // Disable this block when calling from multicore JIT background thread
464                 if (!fBackgroundThread)
465 #endif
466                 {
467                     if (!IsNoMetadata())
468                     {
469                         g_profControlBlock.pProfInterface->JITCompilationStarted((FunctionID) this, TRUE);
470                         // The profiler may have changed the code on the callback.  Need to
471                         // pick up the new code.  Note that you have to be fully trusted in
472                         // this mode and the code will not be verified.
473                         COR_ILMETHOD *pilHeader = GetILHeader(TRUE);
474                         new (ILHeader) COR_ILMETHOD_DECODER(pilHeader, GetMDImport(), NULL);
475                     }
476                     else
477                     {
478                         unsigned int ilSize, unused;
479                         CorInfoOptions corOptions;
480                         LPCBYTE ilHeaderPointer = this->AsDynamicMethodDesc()->GetResolver()->GetCodeInfo(&ilSize, &unused, &corOptions, &unused);
481
482                         g_profControlBlock.pProfInterface->DynamicMethodJITCompilationStarted((FunctionID) this, TRUE, ilHeaderPointer, ilSize);
483                     }
484                 }
485                 END_PIN_PROFILER();
486             }
487 #endif // PROFILING_SUPPORTED
488 #ifdef FEATURE_INTERPRETER
489             // We move the ETW event for start of JITting inward, after we make the decision
490             // to JIT rather than interpret.
491 #else  // FEATURE_INTERPRETER
492             // Fire an ETW event to mark the beginning of JIT'ing
493             ETW::MethodLog::MethodJitting(this, &namespaceOrClassName, &methodName, &methodSignature);
494 #endif  // FEATURE_INTERPRETER
495
496 #ifdef FEATURE_STACK_SAMPLING
497 #ifdef FEATURE_MULTICOREJIT
498             if (!fBackgroundThread)
499 #endif // FEATURE_MULTICOREJIT
500             {
501                 StackSampler::RecordJittingInfo(this, flags);
502             }
503 #endif // FEATURE_STACK_SAMPLING
504
505             EX_TRY
506             {
507                 pCode = UnsafeJitFunction(this, ILHeader, flags, &sizeOfCode);
508             }
509             EX_CATCH
510             {
511                 // If the current thread threw an exception, but a competing thread
512                 // somehow succeeded at JITting the same function (e.g., out of memory
513                 // encountered on current thread but not competing thread), then go ahead
514                 // and swallow this current thread's exception, since we somehow managed
515                 // to successfully JIT the code on the other thread.
516                 // 
517                 // Note that if a deadlock cycle is broken, that does not result in an
518                 // exception--the thread would just pass through the lock and JIT the
519                 // function in competition with the other thread (with the winner of the
520                 // race decided later on when we do SetNativeCodeInterlocked). This
521                 // try/catch is purely to deal with the (unusual) case where a competing
522                 // thread succeeded where we aborted.
523                 
524                 pOtherCode = GetNativeCode();
525                 
526                 if (pOtherCode == NULL)
527                 {
528                     pEntry->m_hrResultCode = E_FAIL;
529                     EX_RETHROW;
530                 }
531             }
532             EX_END_CATCH(RethrowTerminalExceptions)
533
534             if (pOtherCode != NULL)
535             {
536                 // Somebody finished jitting recursively while we were jitting the method.
537                 // Just use their method & leak the one we finished. (Normally we hope
538                 // not to finish our JIT in this case, as we will abort early if we notice
539                 // a reentrant jit has occurred.  But we may not catch every place so we
540                 // do a definitive final check here.
541                 pCode = pOtherCode;
542                 goto Done;
543             }
544
545             _ASSERTE(pCode != NULL);
546
547 #ifdef HAVE_GCCOVER
548             if (GCStress<cfg_instr_jit>::IsEnabled())
549             {
550                 SetupGcCoverage(this, (BYTE*) pCode);
551             }
552 #endif // HAVE_GCCOVER
553
554 #ifdef FEATURE_INTERPRETER
555             // Determine whether the new code address is "stable"...= is not an interpreter stub.
556             fInterpreted = (Interpreter::InterpretationStubToMethodInfo(pCode) == this);
557             fStable = !fInterpreted;
558 #endif // FEATURE_INTERPRETER
559
560 #ifdef FEATURE_MULTICOREJIT
561
562             // If called from multi-core JIT background thread, store code under lock, delay patching until code is queried from application threads
563             if (fBackgroundThread)
564             {
565                 // Fire an ETW event to mark the end of JIT'ing
566                 ETW::MethodLog::MethodJitted(this, &namespaceOrClassName, &methodName, &methodSignature, pCode, 0 /* ReJITID */);
567
568 #ifdef FEATURE_PERFMAP
569                 // Save the JIT'd method information so that perf can resolve JIT'd call frames.
570                 PerfMap::LogJITCompiledMethod(this, pCode, sizeOfCode);
571 #endif
572                 
573                 mcJitManager.GetMulticoreJitCodeStorage().StoreMethodCode(this, pCode);
574                 
575                 goto Done;
576             }
577
578 GotNewCode:
579 #endif
580             // If this function had already been requested for rejit (before its original
581             // code was jitted), then give the rejit manager a chance to jump-stamp the
582             // code we just compiled so the first thread entering the function will jump
583             // to the prestub and trigger the rejit. Note that the PublishMethodHolder takes
584             // a lock to avoid a particular kind of rejit race. See
585             // code:ReJitManager::PublishMethodHolder::PublishMethodHolder#PublishCode for
586             // details on the rejit race.
587             // 
588             // Aside from rejit, performing a SetNativeCodeInterlocked at this point
589             // generally ensures that there is only one winning version of the native
590             // code. This also avoid races with profiler overriding ngened code (see
591             // matching SetNativeCodeInterlocked done after
592             // JITCachedFunctionSearchStarted)
593 #ifdef FEATURE_INTERPRETER
594             PCODE pExpected = pPreviousInterpStub;
595             if (pExpected == NULL) pExpected = GetTemporaryEntryPoint();
596 #endif
597             {
598                 ReJitPublishMethodHolder publishWorker(this, pCode);
599                 if (!SetNativeCodeInterlocked(pCode
600 #ifdef FEATURE_INTERPRETER
601                     , pExpected, fStable
602 #endif
603                     ))
604                 {
605                     // Another thread beat us to publishing its copy of the JITted code.
606                     pCode = GetNativeCode();
607                     goto Done;
608                 }
609 #if defined(FEATURE_JIT_PITCHING)
610                 else
611                 {
612                     SavePitchingCandidate(this, sizeOfCode);
613                 }
614 #endif
615             }
616
617 #ifdef FEATURE_INTERPRETER
618             // State for dynamic methods cannot be freed if the method was ever interpreted,
619             // since there is no way to ensure that it is not in use at the moment.
620             if (IsDynamicMethod() && !fInterpreted && (pPreviousInterpStub == NULL))
621             {
622                 AsDynamicMethodDesc()->GetResolver()->FreeCompileTimeState();
623             }
624 #endif // FEATURE_INTERPRETER
625
626             // We succeeded in jitting the code, and our jitted code is the one that's going to run now.
627             pEntry->m_hrResultCode = S_OK;
628
629  #ifdef PROFILING_SUPPORTED 
630             // Notify the profiler that JIT completed.
631             // Must do this after the address has been set.
632             // @ToDo: Why must we set the address before notifying the profiler ??
633             //        Note that if IsInterceptedForDeclSecurity is set no one should access the jitted code address anyway.
634             {
635                 BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo());
636                 if (!IsNoMetadata())
637                 {
638                     g_profControlBlock.pProfInterface->
639                         JITCompilationFinished((FunctionID) this,
640                                                 pEntry->m_hrResultCode, 
641                                                 TRUE);
642                 }
643                 else
644                 {
645                     g_profControlBlock.pProfInterface->DynamicMethodJITCompilationFinished((FunctionID) this, pEntry->m_hrResultCode, TRUE);
646                 }
647                 END_PIN_PROFILER();
648             }
649 #endif // PROFILING_SUPPORTED
650
651 #ifdef FEATURE_MULTICOREJIT
652             if (! fCompiledInBackground)
653 #endif
654 #ifdef FEATURE_INTERPRETER
655             // If we didn't JIT, but rather, created an interpreter stub (i.e., fStable is false), don't tell ETW that we did.
656             if (fStable)
657 #endif // FEATURE_INTERPRETER
658             {
659                 // Fire an ETW event to mark the end of JIT'ing
660                 ETW::MethodLog::MethodJitted(this, &namespaceOrClassName, &methodName, &methodSignature, pCode, 0 /* ReJITID */);
661
662 #ifdef FEATURE_PERFMAP
663                 // Save the JIT'd method information so that perf can resolve JIT'd call frames.
664                 PerfMap::LogJITCompiledMethod(this, pCode, sizeOfCode);
665 #endif
666             }
667  
668
669 #ifdef FEATURE_MULTICOREJIT
670
671             // If not called from multi-core JIT thread, not got code from storage, quick check before calling out of line function
672             if (! fBackgroundThread && ! fCompiledInBackground && mcJitManager.IsRecorderActive())
673             {
674                 if (MulticoreJitManager::IsMethodSupported(this))
675                 {
676                     mcJitManager.RecordMethodJit(this); // Tell multi-core JIT manager to record method on successful JITting
677                 }
678             }
679 #endif
680
681             if (!fIsILStub)
682             {
683                 // The notification will only occur if someone has registered for this method.
684                 DACNotifyCompilationFinished(this);
685             }
686         }
687     }
688
689 Done:
690
691     // We must have a code by now.
692     _ASSERTE(pCode != NULL);
693
694     LOG((LF_CORDB, LL_EVERYTHING, "MethodDesc::MakeJitWorker finished. Stub is" FMT_ADDR "\n",
695          DBG_ADDR(pCode)));
696
697     return pCode;
698 }
699
700 #ifdef FEATURE_STUBS_AS_IL
701
702 // CreateInstantiatingILStubTargetSig:
703 // This method is used to create the signature of the target of the ILStub
704 // for instantiating and unboxing stubs, when/where we need to introduce a generic context.
705 // And since the generic context is a hidden parameter, we're creating a signature that
706 // looks like non-generic but has one additional parameter right after the thisptr
707 void CreateInstantiatingILStubTargetSig(MethodDesc *pBaseMD, 
708                                         SigTypeContext &typeContext,
709                                         SigBuilder *stubSigBuilder)
710 {
711     STANDARD_VM_CONTRACT;
712
713     MetaSig msig(pBaseMD);
714     BYTE callingConvention = IMAGE_CEE_CS_CALLCONV_DEFAULT;
715     if (msig.HasThis())
716         callingConvention |= IMAGE_CEE_CS_CALLCONV_HASTHIS;
717     // CallingConvention
718     stubSigBuilder->AppendByte(callingConvention); 
719
720     // ParamCount
721     stubSigBuilder->AppendData(msig.NumFixedArgs() + 1); // +1 is for context param
722
723     // Return type
724     SigPointer pReturn = msig.GetReturnProps();
725     pReturn.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder, FALSE);
726
727 #ifndef _TARGET_X86_
728     // The hidden context parameter
729     stubSigBuilder->AppendElementType(ELEMENT_TYPE_I);            
730 #endif // !_TARGET_X86_
731
732     // Copy rest of the arguments
733     msig.NextArg();
734     SigPointer pArgs = msig.GetArgProps();
735     for (unsigned i = 0; i < msig.NumFixedArgs(); i++)
736     {
737         pArgs.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder);
738     }
739
740 #ifdef _TARGET_X86_
741     // The hidden context parameter
742     stubSigBuilder->AppendElementType(ELEMENT_TYPE_I);
743 #endif // _TARGET_X86_
744 }
745
746 Stub * CreateUnboxingILStubForSharedGenericValueTypeMethods(MethodDesc* pTargetMD)
747 {
748
749     CONTRACT(Stub*)
750     {
751         THROWS;
752         GC_TRIGGERS;
753         POSTCONDITION(CheckPointer(RETVAL));
754     }
755     CONTRACT_END;
756
757     SigTypeContext typeContext(pTargetMD);
758
759     MetaSig msig(pTargetMD);
760
761     _ASSERTE(msig.HasThis());
762
763     ILStubLinker sl(pTargetMD->GetModule(), 
764                     pTargetMD->GetSignature(), 
765                     &typeContext, 
766                     pTargetMD, 
767                     TRUE,           // fTargetHasThis
768                     TRUE,           // fStubHasThis
769                     FALSE           // fIsNDirectStub
770                     );
771
772     ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch);
773
774     // 1. Build the new signature
775     SigBuilder stubSigBuilder;
776     CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder);
777
778     // 2. Emit the method body
779     mdToken tokPinningHelper = pCode->GetToken(MscorlibBinder::GetField(FIELD__PINNING_HELPER__M_DATA));
780
781     // 2.1 Push the thisptr
782     // We need to skip over the MethodTable*
783     // The trick below will do that.
784     pCode->EmitLoadThis();
785     pCode->EmitLDFLDA(tokPinningHelper);
786
787 #if defined(_TARGET_X86_)
788     // 2.2 Push the rest of the arguments for x86
789     for (unsigned i = 0; i < msig.NumFixedArgs();i++)
790     {
791         pCode->EmitLDARG(i);
792     }
793 #endif
794
795     // 2.3 Push the hidden context param
796     // The context is going to be captured from the thisptr
797     pCode->EmitLoadThis();
798     pCode->EmitLDFLDA(tokPinningHelper);
799     pCode->EmitLDC(Object::GetOffsetOfFirstField());
800     pCode->EmitSUB();
801     pCode->EmitLDIND_I();
802
803 #if !defined(_TARGET_X86_)
804     // 2.4 Push the rest of the arguments for not x86
805     for (unsigned i = 0; i < msig.NumFixedArgs();i++)
806     {
807         pCode->EmitLDARG(i);
808     }
809 #endif
810
811     // 2.5 Push the target address
812     pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY));
813
814     // 2.6 Do the calli
815     pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1);
816     pCode->EmitRET();
817
818     PCCOR_SIGNATURE pSig;
819     DWORD cbSig;
820     pTargetMD->GetSig(&pSig,&cbSig);
821     PTR_Module pLoaderModule = pTargetMD->GetLoaderModule();
822     MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(),
823                                                             pLoaderModule->GetILStubCache()->GetOrCreateStubMethodTable(pLoaderModule),
824                                                             ILSTUB_UNBOXINGILSTUB, 
825                                                             pTargetMD->GetModule(),
826                                                             pSig, cbSig,
827                                                             &typeContext,
828                                                             &sl);
829
830     ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver();
831     
832     DWORD cbTargetSig = 0;
833     PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig);
834     pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig);
835     pResolver->SetStubTargetMethodDesc(pTargetMD);
836
837     RETURN Stub::NewStub(JitILStub(pStubMD));
838
839 }
840
841 Stub * CreateInstantiatingILStub(MethodDesc* pTargetMD, void* pHiddenArg)
842 {
843
844     CONTRACT(Stub*)
845     {
846         THROWS;
847         GC_TRIGGERS;
848         PRECONDITION(CheckPointer(pHiddenArg));
849         POSTCONDITION(CheckPointer(RETVAL));
850     }
851     CONTRACT_END;
852
853     SigTypeContext typeContext;
854     MethodTable* pStubMT;
855     if (pTargetMD->HasMethodInstantiation())
856     {
857         // The pHiddenArg shall be a MethodDesc*
858         MethodDesc* pMD = static_cast<MethodDesc *>(pHiddenArg);
859         SigTypeContext::InitTypeContext(pMD, &typeContext);
860         pStubMT = pMD->GetMethodTable();
861     }
862     else
863     {
864         // The pHiddenArg shall be a MethodTable*
865         SigTypeContext::InitTypeContext(TypeHandle::FromPtr(pHiddenArg), &typeContext);
866         pStubMT = static_cast<MethodTable *>(pHiddenArg);
867     }
868
869     MetaSig msig(pTargetMD);
870
871     ILStubLinker sl(pTargetMD->GetModule(), 
872                     pTargetMD->GetSignature(), 
873                     &typeContext, 
874                     pTargetMD, 
875                     msig.HasThis(), // fTargetHasThis
876                     msig.HasThis(), // fStubHasThis
877                     FALSE           // fIsNDirectStub
878                     );
879
880     ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch);
881
882     // 1. Build the new signature
883     SigBuilder stubSigBuilder;
884     CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder);
885     
886     // 2. Emit the method body
887     if (msig.HasThis())
888     {
889         // 2.1 Push the thisptr
890         pCode->EmitLoadThis();
891     }
892
893 #if defined(_TARGET_X86_)
894     // 2.2 Push the rest of the arguments for x86
895     for (unsigned i = 0; i < msig.NumFixedArgs();i++)
896     {
897         pCode->EmitLDARG(i);
898     }
899 #endif // _TARGET_X86_
900
901     // 2.3 Push the hidden context param
902     // InstantiatingStub
903     pCode->EmitLDC((TADDR)pHiddenArg);
904
905 #if !defined(_TARGET_X86_)
906     // 2.4 Push the rest of the arguments for not x86
907     for (unsigned i = 0; i < msig.NumFixedArgs();i++)
908     {
909         pCode->EmitLDARG(i);
910     }
911 #endif // !_TARGET_X86_
912
913     // 2.5 Push the target address
914     pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY));
915
916     // 2.6 Do the calli
917     pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1);
918     pCode->EmitRET();
919
920     PCCOR_SIGNATURE pSig;
921     DWORD cbSig;
922     pTargetMD->GetSig(&pSig,&cbSig);
923     PTR_Module pLoaderModule = pTargetMD->GetLoaderModule();
924     MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(),
925                                                             pStubMT,
926                                                             ILSTUB_INSTANTIATINGSTUB, 
927                                                             pTargetMD->GetModule(),
928                                                             pSig, cbSig,
929                                                             &typeContext,
930                                                             &sl);
931
932     ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver();
933
934     DWORD cbTargetSig = 0;
935     PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig);
936     pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig);
937     pResolver->SetStubTargetMethodDesc(pTargetMD);
938
939     RETURN Stub::NewStub(JitILStub(pStubMD));
940 }
941 #endif
942
943 /* Make a stub that for a value class method that expects a BOXed this pointer */
944 Stub * MakeUnboxingStubWorker(MethodDesc *pMD)
945 {
946     CONTRACT(Stub*)
947     {
948         THROWS;
949         GC_TRIGGERS;
950         POSTCONDITION(CheckPointer(RETVAL));
951     }
952     CONTRACT_END;
953
954     Stub *pstub = NULL;
955
956     _ASSERTE (pMD->GetMethodTable()->IsValueType());
957     _ASSERTE(!pMD->ContainsGenericVariables());
958     MethodDesc *pUnboxedMD = pMD->GetWrappedMethodDesc();
959
960     _ASSERTE(pUnboxedMD != NULL && pUnboxedMD != pMD);
961
962 #ifdef FEATURE_STUBS_AS_IL
963     if (pUnboxedMD->RequiresInstMethodTableArg())
964     {
965         pstub = CreateUnboxingILStubForSharedGenericValueTypeMethods(pUnboxedMD);
966     }
967     else
968 #endif
969     {
970         CPUSTUBLINKER sl;
971         sl.EmitUnboxMethodStub(pUnboxedMD);
972         pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap());
973     }
974     RETURN pstub;
975 }
976
977 #if defined(FEATURE_SHARE_GENERIC_CODE) 
978 Stub * MakeInstantiatingStubWorker(MethodDesc *pMD)
979 {
980     CONTRACT(Stub*)
981     {
982         THROWS;
983         GC_TRIGGERS;
984         PRECONDITION(pMD->IsInstantiatingStub());
985         PRECONDITION(!pMD->RequiresInstArg());
986         PRECONDITION(!pMD->IsSharedByGenericMethodInstantiations());
987         POSTCONDITION(CheckPointer(RETVAL));
988     }
989     CONTRACT_END;
990
991     // Note: this should be kept idempotent ... in the sense that
992     // if multiple threads get in here for the same pMD
993     // it should not matter whose stuff finally gets used.
994
995     MethodDesc *pSharedMD = NULL;
996     void* extraArg = NULL;
997
998     // It's an instantiated generic method
999     // Fetch the shared code associated with this instantiation
1000     pSharedMD = pMD->GetWrappedMethodDesc();
1001     _ASSERTE(pSharedMD != NULL && pSharedMD != pMD);
1002
1003     if (pMD->HasMethodInstantiation())
1004     {
1005         extraArg = pMD;
1006     }
1007     else
1008     {
1009         // It's a per-instantiation static method
1010         extraArg = pMD->GetMethodTable();
1011     }
1012     Stub *pstub = NULL;
1013
1014 #ifdef FEATURE_STUBS_AS_IL
1015     pstub = CreateInstantiatingILStub(pSharedMD, extraArg);
1016 #else
1017     CPUSTUBLINKER sl;
1018     _ASSERTE(pSharedMD != NULL && pSharedMD != pMD);
1019     sl.EmitInstantiatingMethodStub(pSharedMD, extraArg);
1020
1021     pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap());
1022 #endif
1023
1024     RETURN pstub;
1025 }
1026 #endif // defined(FEATURE_SHARE_GENERIC_CODE)
1027
1028 #if defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
1029
1030 extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactEntryPoint (PCODE pCode)
1031 {
1032     _ASSERTE (pCode >= PC_REG_RELATIVE_OFFSET);
1033
1034     pCode = (PCODE) (pCode - PC_REG_RELATIVE_OFFSET + THUMB_CODE);
1035
1036     _ASSERTE (MethodDescChunk::IsCompactEntryPointAtAddress (pCode));
1037
1038     return MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, FALSE);
1039 }
1040
1041 #endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
1042
1043 //=============================================================================
1044 // This function generates the real code for a method and installs it into
1045 // the methoddesc. Usually ***BUT NOT ALWAYS***, this function runs only once
1046 // per methoddesc. In addition to installing the new code, this function
1047 // returns a pointer to the new code for the prestub's convenience.
1048 //=============================================================================
1049 extern "C" PCODE STDCALL PreStubWorker(TransitionBlock * pTransitionBlock, MethodDesc * pMD)
1050 {
1051     PCODE pbRetVal = NULL;
1052
1053     BEGIN_PRESERVE_LAST_ERROR;
1054
1055     STATIC_CONTRACT_THROWS;
1056     STATIC_CONTRACT_GC_TRIGGERS;
1057     STATIC_CONTRACT_MODE_COOPERATIVE;
1058     STATIC_CONTRACT_ENTRY_POINT;
1059
1060     MAKE_CURRENT_THREAD_AVAILABLE();
1061
1062 #ifdef _DEBUG
1063     Thread::ObjectRefFlush(CURRENT_THREAD);
1064 #endif
1065
1066     FrameWithCookie<PrestubMethodFrame> frame(pTransitionBlock, pMD);
1067     PrestubMethodFrame * pPFrame = &frame;
1068
1069     pPFrame->Push(CURRENT_THREAD);
1070
1071     INSTALL_MANAGED_EXCEPTION_DISPATCHER;
1072     INSTALL_UNWIND_AND_CONTINUE_HANDLER;
1073
1074     ETWOnStartup (PrestubWorker_V1,PrestubWorkerEnd_V1);
1075
1076     _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
1077
1078     // Running the PreStubWorker on a method causes us to access its MethodTable
1079     g_IBCLogger.LogMethodDescAccess(pMD);
1080
1081     // Make sure the method table is restored, and method instantiation if present
1082     pMD->CheckRestore();
1083
1084     CONSISTENCY_CHECK(GetAppDomain()->CheckCanExecuteManagedCode(pMD));
1085
1086     // Note this is redundant with the above check but we do it anyway for safety
1087     //
1088     // This has been disabled so we have a better chance of catching these.  Note that this check is
1089     // NOT sufficient for domain neutral and ngen cases.
1090     //
1091     // pMD->EnsureActive();
1092
1093     MethodTable *pDispatchingMT = NULL;
1094
1095     if (pMD->IsVtableMethod())
1096     {
1097         OBJECTREF curobj = pPFrame->GetThis();
1098
1099         if (curobj != NULL) // Check for virtual function called non-virtually on a NULL object
1100         {
1101             pDispatchingMT = curobj->GetTrueMethodTable();
1102
1103 #ifdef FEATURE_ICASTABLE
1104             if (pDispatchingMT->IsICastable())
1105             {
1106                 MethodTable *pMDMT = pMD->GetMethodTable();
1107                 TypeHandle objectType(pDispatchingMT);
1108                 TypeHandle methodType(pMDMT);
1109
1110                 GCStress<cfg_any>::MaybeTrigger();
1111                 INDEBUG(curobj = NULL); // curobj is unprotected and CanCastTo() can trigger GC
1112                 if (!objectType.CanCastTo(methodType)) 
1113                 {
1114                     // Apperantly ICastable magic was involved when we chose this method to be called
1115                     // that's why we better stick to the MethodTable it belongs to, otherwise 
1116                     // DoPrestub() will fail not being able to find implementation for pMD in pDispatchingMT.
1117
1118                     pDispatchingMT = pMDMT;
1119                 }
1120             }
1121 #endif // FEATURE_ICASTABLE
1122
1123             // For value types, the only virtual methods are interface implementations.
1124             // Thus pDispatching == pMT because there
1125             // is no inheritance in value types.  Note the BoxedEntryPointStubs are shared
1126             // between all sharable generic instantiations, so the == test is on
1127             // canonical method tables.
1128 #ifdef _DEBUG 
1129             MethodTable *pMDMT = pMD->GetMethodTable(); // put this here to see what the MT is in debug mode
1130             _ASSERTE(!pMD->GetMethodTable()->IsValueType() ||
1131                      (pMD->IsUnboxingStub() && (pDispatchingMT->GetCanonicalMethodTable() == pMDMT->GetCanonicalMethodTable())));
1132 #endif // _DEBUG
1133         }
1134     }
1135
1136     GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
1137     pbRetVal = pMD->DoPrestub(pDispatchingMT);
1138
1139     UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
1140     UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
1141
1142     {
1143         HardwareExceptionHolder
1144
1145         // Give debugger opportunity to stop here
1146         ThePreStubPatch();
1147     }
1148
1149     pPFrame->Pop(CURRENT_THREAD);
1150
1151     POSTCONDITION(pbRetVal != NULL);
1152
1153     END_PRESERVE_LAST_ERROR;
1154
1155     return pbRetVal;
1156 }
1157
1158 #ifdef _DEBUG 
1159 //
1160 // These are two functions for testing purposes only, in debug builds only. They can be used by setting
1161 // InjectFatalError to 3. They ensure that we really can restore the guard page for SEH try/catch clauses.
1162 //
1163 // @todo: Do we use this for anything anymore?
1164 //
1165 static void TestSEHGuardPageRestoreOverflow()
1166 {
1167 }
1168
1169 static void TestSEHGuardPageRestore()
1170 {
1171         PAL_TRY(void *, unused, NULL)
1172         {
1173             TestSEHGuardPageRestoreOverflow();
1174         }
1175         PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER)
1176         {
1177             _ASSERTE(!"Got first overflow.");
1178         }
1179         PAL_ENDTRY;
1180
1181         PAL_TRY(void *, unused, NULL)
1182         {
1183             TestSEHGuardPageRestoreOverflow();
1184         }
1185         PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER)
1186         {
1187             // If you get two asserts, then it works!
1188             _ASSERTE(!"Got second overflow.");
1189         }
1190         PAL_ENDTRY;
1191 }
1192 #endif // _DEBUG
1193
1194 // Separated out the body of PreStubWorker for the case where we don't have a frame.
1195 //
1196 // Note that pDispatchingMT may not actually be the MT that is indirected through.
1197 // If a virtual method is called non-virtually, pMT will be used to indirect through
1198 // 
1199 // This returns a pointer to the stable entrypoint for the jitted method. Typically, this
1200 // is the same as the pointer to the top of the JITted code of the method. However, in
1201 // the case of methods that require stubs to be executed first (e.g., remoted methods
1202 // that require remoting stubs to be executed first), this stable entrypoint would be a
1203 // pointer to the stub, and not a pointer directly to the JITted code.
1204 PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
1205 {
1206     CONTRACT(PCODE)
1207     {
1208         STANDARD_VM_CHECK;
1209         POSTCONDITION(RETVAL != NULL);
1210     }
1211     CONTRACT_END;
1212
1213     Stub *pStub = NULL;
1214     PCODE pCode = NULL;
1215
1216     Thread *pThread = GetThread();
1217
1218     MethodTable *pMT = GetMethodTable();
1219
1220     // Running a prestub on a method causes us to access its MethodTable
1221     g_IBCLogger.LogMethodDescAccess(this);
1222
1223     // A secondary layer of defense against executing code in inspection-only assembly.
1224     // This should already have been taken care of by not allowing inspection assemblies
1225     // to be activated. However, this is a very inexpensive piece of insurance in the name
1226     // of security.
1227     if (IsIntrospectionOnly())
1228     {
1229         _ASSERTE(!"A ReflectionOnly assembly reached the prestub. This should not have happened.");
1230         COMPlusThrow(kInvalidOperationException, IDS_EE_CODEEXECUTION_IN_INTROSPECTIVE_ASSEMBLY);
1231     }
1232
1233     if (ContainsGenericVariables())
1234     {
1235         COMPlusThrow(kInvalidOperationException, IDS_EE_CODEEXECUTION_CONTAINSGENERICVAR);
1236     }
1237
1238     /**************************   DEBUG CHECKS  *************************/
1239     /*-----------------------------------------------------------------
1240     // Halt if needed, GC stress, check the sharing count etc.
1241     */
1242
1243 #ifdef _DEBUG 
1244     static unsigned ctr = 0;
1245     ctr++;
1246
1247     if (g_pConfig->ShouldPrestubHalt(this))
1248     {
1249         _ASSERTE(!"PreStubHalt");
1250     }
1251
1252     LOG((LF_CLASSLOADER, LL_INFO10000, "In PreStubWorker for %s::%s\n",
1253                 m_pszDebugClassName, m_pszDebugMethodName));
1254
1255     // This is a nice place to test out having some fatal EE errors. We do this only in a checked build, and only
1256     // under the InjectFatalError key.
1257     if (g_pConfig->InjectFatalError() == 1)
1258     {
1259         EEPOLICY_HANDLE_FATAL_ERROR(COR_E_EXECUTIONENGINE);
1260     }
1261     else if (g_pConfig->InjectFatalError() == 2)
1262     {
1263         EEPOLICY_HANDLE_FATAL_ERROR(COR_E_STACKOVERFLOW);
1264     }
1265     else if (g_pConfig->InjectFatalError() == 3)
1266     {
1267         TestSEHGuardPageRestore();
1268     }
1269
1270     // Useful to test GC with the prestub on the call stack
1271     if (g_pConfig->ShouldPrestubGC(this))
1272     {
1273         GCX_COOP();
1274         GCHeapUtilities::GetGCHeap()->GarbageCollect(-1);
1275     }
1276 #endif // _DEBUG
1277
1278     STRESS_LOG1(LF_CLASSLOADER, LL_INFO10000, "Prestubworker: method %pM\n", this);
1279
1280
1281     GCStress<cfg_any, EeconfigFastGcSPolicy, CoopGcModePolicy>::MaybeTrigger();
1282
1283     // Are we in the prestub because of a rejit request?  If so, let the ReJitManager
1284     // take it from here.
1285     pCode = ReJitManager::DoReJitIfNecessary(this);
1286     if (pCode != NULL)
1287     {
1288         // A ReJIT was performed, so nothing left for DoPrestub() to do. Return now.
1289         // 
1290         // The stable entrypoint will either be a pointer to the original JITted code
1291         // (with a jmp at the top to jump to the newly-rejitted code) OR a pointer to any
1292         // stub code that must be executed first (e.g., a remoting stub), which in turn
1293         // will call the original JITted code (which then jmps to the newly-rejitted
1294         // code).
1295         RETURN GetStableEntryPoint();
1296     }
1297
1298
1299 #ifdef FEATURE_COMINTEROP 
1300     /**************************   INTEROP   *************************/
1301     /*-----------------------------------------------------------------
1302     // Some method descriptors are COMPLUS-to-COM call descriptors
1303     // they are not your every day method descriptors, for example
1304     // they don't have an IL or code.
1305     */
1306     if (IsComPlusCall() || IsGenericComPlusCall())
1307     {
1308         pCode = GetStubForInteropMethod(this);
1309         
1310         GetPrecode()->SetTargetInterlocked(pCode);
1311
1312         RETURN GetStableEntryPoint();
1313     }
1314 #endif // FEATURE_COMINTEROP
1315
1316     // workaround: This is to handle a punted work item dealing with a skipped module constructor
1317     //       due to appdomain unload. Basically shared code was JITted in domain A, and then
1318     //       this caused a link to another shared module with a module CCTOR, which was skipped
1319     //       or aborted in another appdomain we were trying to propagate the activation to.
1320     //
1321     //       Note that this is not a fix, but that it just minimizes the window in which the
1322     //       issue can occur.
1323     if (pThread->IsAbortRequested())
1324     {
1325         pThread->HandleThreadAbort();
1326     }
1327
1328     /**************************   CLASS CONSTRUCTOR   ********************/
1329     // Make sure .cctor has been run
1330
1331     if (IsClassConstructorTriggeredViaPrestub())
1332     {
1333         pMT->CheckRunClassInitThrowing();
1334     }
1335
1336     /**************************   BACKPATCHING   *************************/
1337     // See if the addr of code has changed from the pre-stub
1338 #ifdef FEATURE_INTERPRETER
1339     if (!IsReallyPointingToPrestub())
1340 #else
1341     if (!IsPointingToPrestub())
1342 #endif
1343     {
1344         // If we are counting calls for tiered compilation, leave the prestub
1345         // in place so that we can continue intercepting method invocations.
1346         // When the TieredCompilationManager has received enough call notifications
1347         // for this method only then do we back-patch it.
1348 #ifdef FEATURE_TIERED_COMPILATION
1349         PCODE pNativeCode = GetNativeCode();
1350         if (pNativeCode && IsEligibleForTieredCompilation())
1351         {
1352             CallCounter * pCallCounter = GetAppDomain()->GetCallCounter();
1353             BOOL doBackPatch = pCallCounter->OnMethodCalled(this);
1354             if (!doBackPatch)
1355             {
1356                 return pNativeCode;
1357             }
1358         }
1359 #endif
1360         LOG((LF_CLASSLOADER, LL_INFO10000,
1361                 "    In PreStubWorker, method already jitted, backpatching call point\n"));
1362 #if defined(FEATURE_JIT_PITCHING)
1363         MarkMethodNotPitchingCandidate(this);
1364 #endif
1365         RETURN DoBackpatch(pMT, pDispatchingMT, TRUE);
1366     }
1367
1368     // record if remoting needs to intercept this call
1369     BOOL  fRemotingIntercepted = IsRemotingInterceptedViaPrestub();
1370
1371     BOOL  fReportCompilationFinished = FALSE;
1372     
1373     /**************************   CODE CREATION  *************************/
1374     if (IsUnboxingStub())
1375     {
1376         pStub = MakeUnboxingStubWorker(this);
1377     }
1378 #if defined(FEATURE_SHARE_GENERIC_CODE) 
1379     else if (IsInstantiatingStub())
1380     {
1381         pStub = MakeInstantiatingStubWorker(this);
1382     }
1383 #endif // defined(FEATURE_SHARE_GENERIC_CODE)
1384     else if (IsIL() || IsNoMetadata())
1385     {
1386         // remember if we need to backpatch the MethodTable slot
1387         BOOL  fBackpatch = !fRemotingIntercepted
1388                             && IsNativeCodeStableAfterInit();
1389
1390 #ifdef FEATURE_PREJIT 
1391         //
1392         // See if we have any prejitted code to use.
1393         //
1394
1395         pCode = GetPreImplementedCode();
1396
1397 #ifdef PROFILING_SUPPORTED
1398         if (pCode != NULL)
1399         {
1400             BOOL fShouldSearchCache = TRUE;
1401
1402             {
1403                 BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches());
1404                 g_profControlBlock.pProfInterface->
1405                     JITCachedFunctionSearchStarted((FunctionID) this,
1406                                                    &fShouldSearchCache);
1407                 END_PIN_PROFILER();
1408             }
1409
1410             if (!fShouldSearchCache)
1411             {
1412 #ifdef FEATURE_INTERPRETER
1413                 SetNativeCodeInterlocked(NULL, pCode, FALSE);
1414 #else
1415                 SetNativeCodeInterlocked(NULL, pCode);
1416 #endif
1417                 _ASSERTE(!IsPreImplemented());
1418                 pCode = NULL;
1419             }
1420         }
1421 #endif // PROFILING_SUPPORTED
1422
1423         if (pCode != NULL)
1424         {
1425             LOG((LF_ZAP, LL_INFO10000,
1426                 "ZAP: Using code" FMT_ADDR "for %s.%s sig=\"%s\" (token %x).\n",
1427                     DBG_ADDR(pCode),
1428                     m_pszDebugClassName,
1429                     m_pszDebugMethodName,
1430                     m_pszDebugMethodSignature,
1431                     GetMemberDef()));
1432
1433             TADDR pFixupList = GetFixupList();
1434             if (pFixupList != NULL)
1435             {
1436                 Module *pZapModule = GetZapModule();
1437                 _ASSERTE(pZapModule != NULL);
1438                 if (!pZapModule->FixupDelayList(pFixupList))
1439                 {
1440                     _ASSERTE(!"FixupDelayList failed");
1441                     ThrowHR(COR_E_BADIMAGEFORMAT);
1442                 }
1443             }
1444
1445 #ifdef HAVE_GCCOVER
1446             if (GCStress<cfg_instr_ngen>::IsEnabled())
1447                 SetupGcCoverage(this, (BYTE*) pCode);
1448 #endif // HAVE_GCCOVER
1449
1450 #ifdef PROFILING_SUPPORTED 
1451             /*
1452                 * This notifies the profiler that a search to find a
1453                 * cached jitted function has been made.
1454                 */
1455             {
1456                 BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches());
1457                 g_profControlBlock.pProfInterface->
1458                     JITCachedFunctionSearchFinished((FunctionID) this, COR_PRF_CACHED_FUNCTION_FOUND);
1459                 END_PIN_PROFILER();
1460             }
1461 #endif // PROFILING_SUPPORTED
1462         }
1463
1464         //
1465         // If not, try to jit it
1466         //
1467
1468 #endif // FEATURE_PREJIT
1469
1470 #ifdef FEATURE_READYTORUN
1471         if (pCode == NULL)
1472         {
1473             Module * pModule = GetModule();
1474             if (pModule->IsReadyToRun())
1475             {
1476                 pCode = pModule->GetReadyToRunInfo()->GetEntryPoint(this);
1477                 if (pCode != NULL)
1478                     fReportCompilationFinished = TRUE;
1479             }
1480         }
1481 #endif // FEATURE_READYTORUN
1482
1483         if (pCode == NULL)
1484         {
1485             NewHolder<COR_ILMETHOD_DECODER> pHeader(NULL);
1486             // Get the information on the method
1487             if (!IsNoMetadata())
1488             {
1489                 COR_ILMETHOD* ilHeader = GetILHeader(TRUE);
1490                 if(ilHeader == NULL)
1491                 {
1492 #ifdef FEATURE_COMINTEROP
1493                     // Abstract methods can be called through WinRT derivation if the deriving type
1494                     // is not implemented in managed code, and calls through the CCW to the abstract
1495                     // method. Throw a sensible exception in that case.
1496                     if (pMT->IsExportedToWinRT() && IsAbstract())
1497                     {
1498                         COMPlusThrowHR(E_NOTIMPL);
1499                     }
1500 #endif // FEATURE_COMINTEROP
1501
1502                     COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL);
1503                 }
1504
1505                 COR_ILMETHOD_DECODER::DecoderStatus status = COR_ILMETHOD_DECODER::FORMAT_ERROR;
1506
1507                 {
1508                     // Decoder ctor can AV on a malformed method header
1509                     AVInRuntimeImplOkayHolder AVOkay;
1510                     pHeader = new COR_ILMETHOD_DECODER(ilHeader, GetMDImport(), &status);
1511                     if(pHeader == NULL)
1512                         status = COR_ILMETHOD_DECODER::FORMAT_ERROR;
1513                 }
1514
1515                 if (status == COR_ILMETHOD_DECODER::VERIFICATION_ERROR &&
1516                     Security::CanSkipVerification(GetModule()->GetDomainAssembly()))
1517                 {
1518                     status = COR_ILMETHOD_DECODER::SUCCESS;
1519                 }
1520
1521                 if (status != COR_ILMETHOD_DECODER::SUCCESS)
1522                 {
1523                     if (status == COR_ILMETHOD_DECODER::VERIFICATION_ERROR)
1524                     {
1525                         // Throw a verification HR
1526                         COMPlusThrowHR(COR_E_VERIFICATION);
1527                     }
1528                     else
1529                     {
1530                         COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL);
1531                     }
1532                 }
1533
1534 #ifdef _VER_EE_VERIFICATION_ENABLED 
1535                 static ConfigDWORD peVerify;
1536
1537                 if (peVerify.val(CLRConfig::EXTERNAL_PEVerify))
1538                     Verify(pHeader, TRUE, FALSE);   // Throws a VerifierException if verification fails
1539 #endif // _VER_EE_VERIFICATION_ENABLED
1540             } // end if (!IsNoMetadata())
1541
1542             // JIT it
1543             LOG((LF_CLASSLOADER, LL_INFO1000000,
1544                     "    In PreStubWorker, calling MakeJitWorker\n"));
1545
1546             // Create the precode eagerly if it is going to be needed later.
1547             if (!fBackpatch)
1548             {
1549                 GetOrCreatePrecode();
1550             }
1551
1552             // Mark the code as hot in case the method ends up in the native image
1553             g_IBCLogger.LogMethodCodeAccess(this);
1554
1555             pCode = MakeJitWorker(pHeader, CORJIT_FLAGS());
1556
1557 #ifdef FEATURE_INTERPRETER
1558             if ((pCode != NULL) && !HasStableEntryPoint())
1559             {
1560                 // We don't yet have a stable entry point, so don't do backpatching yet.
1561                 // But we do have to handle some extra cases that occur in backpatching.
1562                 // (Perhaps I *should* get to the backpatching code, but in a mode where we know
1563                 // we're not dealing with the stable entry point...)
1564                 if (HasNativeCodeSlot())
1565                 {
1566                     // We called "SetNativeCodeInterlocked" in MakeJitWorker, which updated the native
1567                     // code slot, but I think we also want to update the regular slot...
1568                     PCODE tmpEntry = GetTemporaryEntryPoint();
1569                     PCODE pFound = FastInterlockCompareExchangePointer(GetAddrOfSlot(), pCode, tmpEntry);
1570                     // Doesn't matter if we failed -- if we did, it's because somebody else made progress.
1571                     if (pFound != tmpEntry) pCode = pFound;
1572                 }
1573
1574                 // Now we handle the case of a FuncPtrPrecode.  
1575                 FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
1576                 if (pFuncPtrStubs != NULL)
1577                 {
1578                     Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
1579                     if (pFuncPtrPrecode != NULL)
1580                     {
1581                         // If there is a funcptr precode to patch, attempt to patch it.  If we lose, that's OK,
1582                         // somebody else made progress.
1583                         pFuncPtrPrecode->SetTargetInterlocked(pCode);
1584                     }
1585                 }
1586             }
1587 #endif // FEATURE_INTERPRETER
1588         } // end if (pCode == NULL)
1589     } // end else if (IsIL() || IsNoMetadata())
1590     else if (IsNDirect())
1591     {
1592         pCode = GetStubForInteropMethod(this);
1593         GetOrCreatePrecode();
1594     }
1595     else if (IsFCall())
1596     {
1597         // Get the fcall implementation
1598         BOOL fSharedOrDynamicFCallImpl;
1599         pCode = ECall::GetFCallImpl(this, &fSharedOrDynamicFCallImpl);
1600
1601         if (fSharedOrDynamicFCallImpl)
1602         {
1603             // Fake ctors share one implementation that has to be wrapped by prestub
1604             GetOrCreatePrecode();
1605         }
1606     }
1607     else if (IsArray())
1608     {
1609         pStub = GenerateArrayOpStub((ArrayMethodDesc*)this);
1610     }
1611     else if (IsEEImpl())
1612     {
1613         _ASSERTE(GetMethodTable()->IsDelegate());
1614         pCode = COMDelegate::GetInvokeMethodStub((EEImplMethodDesc*)this);
1615         GetOrCreatePrecode();
1616     }
1617     else
1618     {
1619         // This is a method type we don't handle yet
1620         _ASSERTE(!"Unknown Method Type");
1621     }
1622
1623     /**************************   POSTJIT *************************/
1624 #ifndef FEATURE_INTERPRETER
1625     _ASSERTE(pCode == NULL || GetNativeCode() == NULL || pCode == GetNativeCode());
1626 #else // FEATURE_INTERPRETER
1627     // Interpreter adds a new possiblity == someone else beat us to installing an intepreter stub.
1628     _ASSERTE(pCode == NULL || GetNativeCode() == NULL || pCode == GetNativeCode()
1629              || Interpreter::InterpretationStubToMethodInfo(pCode) == this);
1630 #endif // FEATURE_INTERPRETER
1631
1632     // At this point we must have either a pointer to managed code or to a stub. All of the above code
1633     // should have thrown an exception if it couldn't make a stub.
1634     _ASSERTE((pStub != NULL) ^ (pCode != NULL));
1635
1636     /**************************   SECURITY   *************************/
1637
1638     // Lets check to see if we need declarative security on this stub, If we have
1639     // security checks on this method or class then we need to add an intermediate
1640     // stub that performs declarative checks prior to calling the real stub.
1641     // record if security needs to intercept this call (also depends on whether we plan to use stubs for declarative security)
1642
1643
1644     _ASSERTE((pStub != NULL) ^ (pCode != NULL));
1645
1646 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1647     //
1648     // We are seeing memory reordering race around fixups (see DDB 193514 and related bugs). We get into
1649     // situation where the patched precode is visible by other threads, but the resolved fixups 
1650     // are not. IT SHOULD NEVER HAPPEN according to our current understanding of x86/x64 memory model.
1651     // (see email thread attached to the bug for details).
1652     //
1653     // We suspect that there may be bug in the hardware or that hardware may have shortcuts that may be 
1654     // causing grief. We will try to avoid the race by executing an extra memory barrier.
1655     //
1656     MemoryBarrier();
1657 #endif
1658
1659     // If we are counting calls for tiered compilation, leave the prestub
1660     // in place so that we can continue intercepting method invocations.
1661     // When the TieredCompilationManager has received enough call notifications
1662     // for this method only then do we back-patch it.
1663 #ifdef FEATURE_TIERED_COMPILATION
1664     if (pCode && IsEligibleForTieredCompilation())
1665     {
1666         CallCounter * pCallCounter = GetAppDomain()->GetCallCounter();
1667         BOOL doBackPatch = pCallCounter->OnMethodCalled(this);
1668         if (!doBackPatch)
1669         {
1670             return pCode;
1671         }
1672     }
1673 #endif
1674
1675     if (pCode != NULL)
1676     {
1677         if (HasPrecode())
1678             GetPrecode()->SetTargetInterlocked(pCode);
1679         else
1680         if (!HasStableEntryPoint())
1681         {
1682             // Is the result an interpreter stub?
1683 #ifdef FEATURE_INTERPRETER
1684             if (Interpreter::InterpretationStubToMethodInfo(pCode) == this)
1685             {
1686                 SetEntryPointInterlocked(pCode);
1687             }
1688             else
1689 #endif // FEATURE_INTERPRETER
1690             {
1691                 ReJitPublishMethodHolder publishWorker(this, pCode);
1692                 SetStableEntryPointInterlocked(pCode);
1693             }
1694         }
1695     }
1696     else
1697     {
1698         if (!GetOrCreatePrecode()->SetTargetInterlocked(pStub->GetEntryPoint()))
1699         {
1700             pStub->DecRef();
1701         }
1702         else
1703         if (pStub->HasExternalEntryPoint())
1704         {
1705             // If the Stub wraps code that is outside of the Stub allocation, then we
1706             // need to free the Stub allocation now.
1707             pStub->DecRef();
1708         }
1709     }
1710
1711 #ifdef FEATURE_INTERPRETER
1712     _ASSERTE(!IsReallyPointingToPrestub());
1713 #else // FEATURE_INTERPRETER
1714     _ASSERTE(!IsPointingToPrestub());
1715     _ASSERTE(HasStableEntryPoint());
1716 #endif // FEATURE_INTERPRETER
1717
1718     if (fReportCompilationFinished)
1719         DACNotifyCompilationFinished(this);
1720
1721     RETURN DoBackpatch(pMT, pDispatchingMT, FALSE);
1722 }
1723
1724 #endif // !DACCESS_COMPILE
1725
1726 //==========================================================================
1727 // The following code manages the PreStub. All method stubs initially
1728 // use the prestub.
1729 //==========================================================================
1730
1731 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1732 static PCODE g_UMThunkPreStub;
1733 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1734
1735 #ifndef DACCESS_COMPILE 
1736
1737 void ThePreStubManager::Init(void)
1738 {
1739     STANDARD_VM_CONTRACT;
1740
1741     //
1742     // Add the prestub manager
1743     //
1744
1745     StubManager::AddStubManager(new ThePreStubManager());
1746 }
1747
1748 //-----------------------------------------------------------
1749 // Initialize the prestub.
1750 //-----------------------------------------------------------
1751 void InitPreStubManager(void)
1752 {
1753     STANDARD_VM_CONTRACT;
1754
1755     if (NingenEnabled())
1756     {
1757         return;
1758     }
1759
1760 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1761     g_UMThunkPreStub = GenerateUMThunkPrestub()->GetEntryPoint();
1762 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1763
1764     ThePreStubManager::Init();
1765 }
1766
1767 PCODE TheUMThunkPreStub()
1768 {
1769     LIMITED_METHOD_CONTRACT;
1770
1771 #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)
1772     return g_UMThunkPreStub;
1773 #else  // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1774     return GetEEFuncEntryPoint(TheUMEntryPrestub);
1775 #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL
1776 }
1777
1778 PCODE TheVarargNDirectStub(BOOL hasRetBuffArg)
1779 {
1780     LIMITED_METHOD_CONTRACT;
1781
1782 #if !defined(_TARGET_X86_)
1783     if (hasRetBuffArg)
1784     {
1785         return GetEEFuncEntryPoint(VarargPInvokeStub_RetBuffArg);
1786     }
1787     else
1788 #endif
1789     {
1790         return GetEEFuncEntryPoint(VarargPInvokeStub);
1791     }
1792 }
1793
1794 static PCODE PatchNonVirtualExternalMethod(MethodDesc * pMD, PCODE pCode, PTR_CORCOMPILE_IMPORT_SECTION pImportSection, TADDR pIndirection)
1795 {
1796     STANDARD_VM_CONTRACT;
1797
1798     //
1799     // Skip fixup precode jump for better perf. Since we have MethodDesc available, we can use cheaper method 
1800     // than code:Precode::TryToSkipFixupPrecode.
1801     //
1802 #ifdef HAS_FIXUP_PRECODE
1803     if (pMD->HasPrecode() && pMD->GetPrecode()->GetType() == PRECODE_FIXUP
1804         && pMD->IsNativeCodeStableAfterInit()
1805 #ifndef HAS_REMOTING_PRECODE
1806         && !pMD->IsRemotingInterceptedViaPrestub()
1807 #endif
1808         )
1809     {
1810         PCODE pDirectTarget = pMD->IsFCall() ? ECall::GetFCallImpl(pMD) : pMD->GetNativeCode();
1811         if (pDirectTarget != NULL)
1812             pCode = pDirectTarget;
1813     }
1814 #endif //HAS_FIXUP_PRECODE
1815
1816     if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
1817     {
1818         CORCOMPILE_EXTERNAL_METHOD_THUNK * pThunk = (CORCOMPILE_EXTERNAL_METHOD_THUNK *)pIndirection;
1819
1820 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1821         INT64 oldValue = *(INT64*)pThunk;
1822         BYTE* pOldValue = (BYTE*)&oldValue;
1823
1824         if (pOldValue[0] == X86_INSTR_CALL_REL32)
1825         {
1826             INT64 newValue = oldValue;
1827             BYTE* pNewValue = (BYTE*)&newValue;
1828             pNewValue[0] = X86_INSTR_JMP_REL32;
1829
1830             *(INT32 *)(pNewValue+1) = rel32UsingJumpStub((INT32*)(&pThunk->callJmp[1]), pCode, pMD, NULL);
1831
1832             _ASSERTE(IS_ALIGNED((size_t)pThunk, sizeof(INT64)));
1833             EnsureWritableExecutablePages(pThunk, sizeof(INT64));
1834             FastInterlockCompareExchangeLong((INT64*)pThunk, newValue, oldValue);
1835
1836             FlushInstructionCache(GetCurrentProcess(), pThunk, 8);
1837         }
1838 #elif  defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)
1839         // Patchup the thunk to point to the actual implementation of the cross module external method
1840         EnsureWritableExecutablePages(&pThunk->m_pTarget);
1841         pThunk->m_pTarget = pCode;
1842
1843         #if defined(_TARGET_ARM_)
1844         // ThumbBit must be set on the target address
1845         _ASSERTE(pCode & THUMB_CODE);
1846         #endif
1847 #else
1848         PORTABILITY_ASSERT("ExternalMethodFixupWorker");
1849 #endif
1850     }
1851     else
1852     {
1853         *EnsureWritableExecutablePages((TADDR *)pIndirection) = pCode;
1854     }
1855
1856     return pCode;
1857 }
1858
1859 //==========================================================================================
1860 // In NGen images calls to external methods start out pointing to jump thunks.
1861 // These jump thunks initially point to the assembly code _ExternalMethodFixupStub
1862 // It transfers control to ExternalMethodFixupWorker which will patch the jump 
1863 // thunk to point to the actual cross module address for the method body
1864 // Some methods also have one-time prestubs we defer the patching until
1865 // we have the final stable method entry point.
1866 //
1867 EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBlock, TADDR pIndirection, DWORD sectionIndex, Module * pModule)
1868 {
1869     STATIC_CONTRACT_THROWS;
1870     STATIC_CONTRACT_GC_TRIGGERS;
1871     STATIC_CONTRACT_MODE_COOPERATIVE;
1872     STATIC_CONTRACT_ENTRY_POINT;
1873
1874     // We must save (and restore) the Last Error code before we call anything 
1875     // that could overwrite it.  Any callsite that leads to TlsGetValue will 
1876     // potentially overwrite the Last Error code.
1877
1878     //
1879     // In Dev10 bug 837293 we were overwriting the Last Error code on the first 
1880     // call to a PInvoke method.  This occurred when we were running a 
1881     // (precompiled) PInvoke IL stub implemented in the ngen image.
1882     //
1883     // In this IL stub implementation we call the native method kernel32!GetFileAttributes,
1884     // and then we immediately try to save the Last Error code by calling the 
1885     // mscorlib method System.StubHelpers.StubHelpers.SetLastError().
1886     //
1887     // However when we are coming from a precompiled IL Stub in an ngen image
1888     // we must use an ExternalMethodFixup to find the target address of 
1889     // System.StubHelpers.StubHelpers.SetLastError() and this was overwriting
1890     // the value of the Last Error before it could be retrieved and saved.
1891     //
1892
1893     PCODE         pCode   = NULL;
1894
1895     BEGIN_PRESERVE_LAST_ERROR;
1896
1897     MAKE_CURRENT_THREAD_AVAILABLE();
1898
1899 #ifdef _DEBUG
1900     Thread::ObjectRefFlush(CURRENT_THREAD);
1901 #endif
1902
1903     FrameWithCookie<ExternalMethodFrame> frame(pTransitionBlock);
1904     ExternalMethodFrame * pEMFrame = &frame;
1905
1906 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
1907     // Decode indirection cell from callsite if it is not present
1908     if (pIndirection == NULL)
1909     {
1910         // Asssume that the callsite is call [xxxxxxxx]
1911         PCODE retAddr = pEMFrame->GetReturnAddress();
1912 #ifdef _TARGET_X86_
1913         pIndirection = *(((TADDR *)retAddr) - 1);
1914 #else
1915         pIndirection = *(((INT32 *)retAddr) - 1) + retAddr;
1916 #endif
1917     }
1918 #endif
1919
1920     // FUTURE: Consider always passing in module and section index to avoid the lookups
1921     if (pModule == NULL)
1922     {
1923         pModule = ExecutionManager::FindZapModule(pIndirection);
1924         sectionIndex = (DWORD)-1;
1925     }
1926     _ASSERTE(pModule != NULL);
1927
1928     pEMFrame->SetCallSite(pModule, pIndirection);
1929
1930     pEMFrame->Push(CURRENT_THREAD);         // Push the new ExternalMethodFrame onto the frame stack
1931
1932     INSTALL_MANAGED_EXCEPTION_DISPATCHER;
1933     INSTALL_UNWIND_AND_CONTINUE_HANDLER;
1934
1935     bool fVirtual = false;
1936     MethodDesc * pMD = NULL;
1937     MethodTable * pMT = NULL;
1938     DWORD slot = 0;
1939
1940     {
1941         GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
1942
1943         PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage();
1944
1945         RVA rva = pNativeImage->GetDataRva(pIndirection);
1946
1947         PTR_CORCOMPILE_IMPORT_SECTION pImportSection;
1948         if (sectionIndex != (DWORD)-1)
1949         {
1950             pImportSection = pModule->GetImportSectionFromIndex(sectionIndex);
1951             _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva));
1952         }
1953         else
1954         {
1955             pImportSection = pModule->GetImportSectionForRVA(rva);
1956         }
1957         _ASSERTE(pImportSection != NULL);
1958
1959         COUNT_T index;
1960         if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE)
1961         {
1962             _ASSERTE(pImportSection->EntrySize == sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK));
1963             index = (rva - pImportSection->Section.VirtualAddress) / sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK);
1964         }
1965         else
1966         {
1967             _ASSERTE(pImportSection->EntrySize == sizeof(TADDR));
1968             index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR);
1969         }
1970
1971         PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures));
1972
1973         PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]);
1974
1975         BYTE kind = *pBlob++;
1976
1977         Module * pInfoModule = pModule;
1978         if (kind & ENCODE_MODULE_OVERRIDE)
1979         {
1980             DWORD moduleIndex = CorSigUncompressData(pBlob);
1981             pInfoModule = pModule->GetModuleFromIndex(moduleIndex);
1982             kind &= ~ENCODE_MODULE_OVERRIDE;
1983         }
1984
1985         TypeHandle th;
1986         switch (kind)
1987         {
1988         case ENCODE_METHOD_ENTRY:
1989             {
1990                 pMD =  ZapSig::DecodeMethod(pModule,
1991                                             pInfoModule,
1992                                             pBlob);
1993
1994                 if (pModule->IsReadyToRun())
1995                 {
1996                     // We do not emit activation fixups for version resilient references. Activate the target explicitly.
1997                     pMD->EnsureActive();
1998                 }
1999
2000                 break;
2001             }
2002
2003         case ENCODE_METHOD_ENTRY_DEF_TOKEN:
2004             {
2005                 mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef);
2006                 pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE);
2007
2008                 pMD->PrepareForUseAsADependencyOfANativeImage();
2009
2010                 if (pModule->IsReadyToRun())
2011                 {
2012                     // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2013                     pMD->EnsureActive();
2014                 }
2015
2016                 break;
2017             }
2018
2019         case ENCODE_METHOD_ENTRY_REF_TOKEN:
2020             {
2021                 SigTypeContext typeContext;
2022                 mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef);
2023                 FieldDesc * pFD = NULL;
2024
2025                 MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th);
2026                 _ASSERTE(pMD != NULL);
2027
2028                 pMD->PrepareForUseAsADependencyOfANativeImage();
2029
2030                 if (pModule->IsReadyToRun())
2031                 {
2032                     // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2033                     pMD->EnsureActive();
2034                 }
2035                 else
2036                 {
2037 #ifdef FEATURE_WINMD_RESILIENT
2038                     // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2039                     pMD->EnsureActive();
2040 #endif
2041                 }
2042
2043                 break;
2044             }
2045
2046         case ENCODE_VIRTUAL_ENTRY:
2047             {
2048                 pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th);
2049
2050         VirtualEntry:
2051                 pMD->PrepareForUseAsADependencyOfANativeImage();
2052
2053                 if (pMD->IsVtableMethod())
2054                 {
2055                     slot = pMD->GetSlot();
2056                     pMT = th.IsNull() ? pMD->GetMethodTable() : th.GetMethodTable();
2057
2058                     fVirtual = true;
2059                 }
2060                 else
2061                 if (pModule->IsReadyToRun())
2062                 {
2063                     // We do not emit activation fixups for version resilient references. Activate the target explicitly.
2064                     pMD->EnsureActive();
2065                 }
2066                 break;
2067             }
2068
2069         case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2070             {
2071                 mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef);
2072                 pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE);
2073
2074                 goto VirtualEntry;
2075             }
2076
2077         case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2078             {
2079                 mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef);
2080
2081                 FieldDesc * pFD = NULL;
2082
2083                 SigTypeContext typeContext;
2084                 MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th, TRUE /* actual type required */);
2085                 _ASSERTE(pMD != NULL);
2086
2087                 goto VirtualEntry;
2088             }
2089
2090         case ENCODE_VIRTUAL_ENTRY_SLOT:
2091             {
2092                 slot = CorSigUncompressData(pBlob);
2093                 pMT =  ZapSig::DecodeType(pModule, pInfoModule, pBlob).GetMethodTable();
2094
2095                 fVirtual = true;
2096                 break;
2097             }
2098
2099         default:
2100             _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND");
2101             ThrowHR(COR_E_BADIMAGEFORMAT);
2102         }
2103
2104         if (fVirtual)
2105         {
2106             GCX_COOP_THREAD_EXISTS(CURRENT_THREAD);
2107
2108             // Get the stub manager for this module
2109             VirtualCallStubManager *pMgr = pModule->GetLoaderAllocator()->GetVirtualCallStubManager();
2110
2111             DispatchToken token;
2112             if (pMT->IsInterface())
2113                 token = pMT->GetLoaderAllocator()->GetDispatchToken(pMT->GetTypeID(), slot);
2114             else
2115                 token = DispatchToken::CreateDispatchToken(slot);
2116
2117             OBJECTREF *protectedObj = pEMFrame->GetThisPtr();
2118             _ASSERTE(protectedObj != NULL);
2119             if (*protectedObj == NULL) {
2120                 COMPlusThrow(kNullReferenceException);
2121             }
2122             
2123             StubCallSite callSite(pIndirection, pEMFrame->GetReturnAddress());
2124             pCode = pMgr->ResolveWorker(&callSite, protectedObj, token, VirtualCallStubManager::SK_LOOKUP);
2125             _ASSERTE(pCode != NULL);
2126         }
2127         else
2128         {
2129             _ASSERTE(pMD != NULL);
2130
2131             {
2132                 // Switch to cooperative mode to avoid racing with GC stackwalk
2133                 GCX_COOP_THREAD_EXISTS(CURRENT_THREAD);
2134                 pEMFrame->SetFunction(pMD);
2135             }
2136
2137             pCode = pMD->GetMethodEntryPoint();
2138
2139             //
2140             // Note that we do not want to call code:MethodDesc::IsPointingToPrestub() here. It does not take remoting interception 
2141             // into account and so it would cause otherwise intercepted methods to be JITed. It is a compat issue if the JITing fails.
2142             //
2143             if (!DoesSlotCallPrestub(pCode))
2144             {
2145                 pCode = PatchNonVirtualExternalMethod(pMD, pCode, pImportSection, pIndirection);
2146             }
2147         }
2148
2149 #if defined (FEATURE_JIT_PITCHING)
2150         DeleteFromPitchingCandidate(pMD);
2151 #endif
2152     }
2153
2154     // Force a GC on every jit if the stress level is high enough
2155     GCStress<cfg_any>::MaybeTrigger();
2156
2157     // Ready to return
2158
2159     UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
2160     UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
2161
2162     pEMFrame->Pop(CURRENT_THREAD);          // Pop the ExternalMethodFrame from the frame stack
2163
2164     END_PRESERVE_LAST_ERROR;
2165
2166     return pCode;
2167 }
2168
2169
2170 #if !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_)
2171
2172 //==========================================================================================
2173 // In NGen image, virtual slots inherited from cross-module dependencies point to jump thunks.
2174 // These jump thunk initially point to VirtualMethodFixupStub which transfers control here.
2175 // This method 'VirtualMethodFixupWorker' will patch the jump thunk to point to the actual
2176 // inherited method body after we have execute the precode and a stable entry point.
2177 //
2178 EXTERN_C PCODE VirtualMethodFixupWorker(Object * pThisPtr,  CORCOMPILE_VIRTUAL_IMPORT_THUNK *pThunk)
2179 {
2180     CONTRACTL
2181     {
2182         NOTHROW;
2183         GC_NOTRIGGER; 
2184         MODE_COOPERATIVE;
2185         ENTRY_POINT;
2186     }
2187     CONTRACTL_END;
2188
2189     _ASSERTE(pThisPtr != NULL);
2190     VALIDATEOBJECT(pThisPtr);
2191
2192     MethodTable * pMT = pThisPtr->GetTrueMethodTable();
2193
2194     WORD slotNumber = pThunk->slotNum;
2195     _ASSERTE(slotNumber != (WORD)-1);
2196
2197     PCODE pCode = pMT->GetRestoredSlot(slotNumber);
2198
2199     if (!DoesSlotCallPrestub(pCode))
2200     {
2201         // Skip fixup precode jump for better perf
2202         PCODE pDirectTarget = Precode::TryToSkipFixupPrecode(pCode);
2203         if (pDirectTarget != NULL)
2204             pCode = pDirectTarget;
2205
2206         // Patch the thunk to the actual method body
2207         if (EnsureWritableExecutablePagesNoThrow(&pThunk->m_pTarget, sizeof(pThunk->m_pTarget)))
2208             pThunk->m_pTarget = pCode;
2209     }
2210 #if defined(_TARGET_ARM_)
2211     // The target address should have the thumb bit set
2212     _ASSERTE(pCode & THUMB_CODE);
2213 #endif
2214     return pCode;
2215 }
2216 #endif // !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_)
2217
2218 #ifdef FEATURE_READYTORUN
2219
2220 static PCODE getHelperForInitializedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD)
2221 {
2222     STANDARD_VM_CONTRACT;
2223
2224     PCODE pHelper = NULL; 
2225
2226     switch (kind)
2227     {
2228     case ENCODE_STATIC_BASE_NONGC_HELPER:
2229         {
2230             PVOID baseNonGC;
2231             {
2232                 GCX_COOP();
2233                 baseNonGC = pMT->GetNonGCStaticsBasePointer();
2234             }
2235             pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseNonGC);
2236         }
2237         break;
2238     case ENCODE_STATIC_BASE_GC_HELPER:
2239         {
2240             PVOID baseGC;
2241             {
2242                 GCX_COOP();
2243                 baseGC = pMT->GetGCStaticsBasePointer();
2244             }
2245             pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseGC);
2246         }
2247         break;
2248     case ENCODE_CCTOR_TRIGGER:
2249         pHelper = DynamicHelpers::CreateReturn(pModule->GetLoaderAllocator());
2250         break;
2251     case ENCODE_FIELD_ADDRESS:
2252         {
2253             _ASSERTE(pFD->IsStatic());
2254
2255             PTR_VOID pAddress;
2256
2257             {
2258                 GCX_COOP();
2259
2260                 PTR_BYTE base = 0;
2261                 if (!pFD->IsRVA()) // for RVA the base is ignored
2262                     base = pFD->GetBase();
2263                 pAddress = pFD->GetStaticAddressHandle((void *)dac_cast<TADDR>(base));
2264             }
2265
2266             // The following code assumes that the statics are pinned that is not the case for collectible types
2267             _ASSERTE(!pFD->GetEnclosingMethodTable()->Collectible());
2268
2269             // Unbox valuetype fields
2270             if (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE && !pFD->IsRVA())
2271                 pHelper = DynamicHelpers::CreateReturnIndirConst(pModule->GetLoaderAllocator(), (TADDR)pAddress, (INT8)Object::GetOffsetOfFirstField());
2272             else
2273                 pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)pAddress);
2274         }
2275         break;
2276     default:
2277         _ASSERTE(!"Unexpected statics CORCOMPILE_FIXUP_BLOB_KIND");
2278         ThrowHR(COR_E_BADIMAGEFORMAT);
2279     }
2280
2281     return pHelper;
2282 }
2283
2284 static PCODE getHelperForSharedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD)
2285 {
2286     STANDARD_VM_CONTRACT;
2287
2288     _ASSERTE(kind == ENCODE_FIELD_ADDRESS);
2289
2290     CorInfoHelpFunc helpFunc = CEEInfo::getSharedStaticsHelper(pFD, pMT);
2291
2292     TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID();
2293
2294     TADDR classID = 0;
2295     if (helpFunc != CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR && helpFunc != CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR)
2296     {
2297         if (pMT->IsDynamicStatics())
2298         {
2299             classID = pMT->GetModuleDynamicEntryID();
2300         }
2301         else
2302         {
2303             classID = pMT->GetClassIndex();
2304         }
2305     }
2306
2307     bool fUnbox = (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE);
2308
2309     AllocMemTracker amTracker;
2310
2311     StaticFieldAddressArgs * pArgs = (StaticFieldAddressArgs *)amTracker.Track(
2312         pModule->GetLoaderAllocator()->GetHighFrequencyHeap()->
2313             AllocMem(S_SIZE_T(sizeof(StaticFieldAddressArgs))));
2314
2315     pArgs->staticBaseHelper = (FnStaticBaseHelper)CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc);
2316     pArgs->arg0 = moduleID;
2317     pArgs->arg1 = classID;
2318     pArgs->offset = pFD->GetOffset(); 
2319
2320     PCODE pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), (TADDR)pArgs, 
2321         fUnbox ? GetEEFuncEntryPoint(JIT_StaticFieldAddressUnbox_Dynamic) : GetEEFuncEntryPoint(JIT_StaticFieldAddress_Dynamic));
2322
2323     amTracker.SuppressRelease();
2324
2325     return pHelper;
2326 }
2327
2328 static PCODE getHelperForStaticBase(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT)
2329 {
2330     STANDARD_VM_CONTRACT;
2331
2332     int helpFunc = CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE;
2333
2334     if (kind == ENCODE_STATIC_BASE_GC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER)
2335     {
2336         helpFunc = CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2337     }
2338
2339     if (pMT->IsDynamicStatics())
2340     {
2341         const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2342         helpFunc += delta;
2343     }
2344     else
2345     if (!pMT->HasClassConstructor() && !pMT->HasBoxedRegularStatics())
2346     {
2347         const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2348         helpFunc += delta;
2349     }
2350
2351     if (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER)
2352     {
2353         const int delta = CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE - CORINFO_HELP_GETSHARED_GCSTATIC_BASE;
2354         helpFunc += delta;
2355     }
2356
2357     PCODE pHelper; 
2358     if (helpFunc == CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR || helpFunc == CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR)
2359     {
2360         pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), pMT->GetModule()->GetModuleID(), CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc));
2361     }
2362     else
2363     {
2364         TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID();
2365
2366         TADDR classID;
2367         if (pMT->IsDynamicStatics())
2368         {
2369             classID = pMT->GetModuleDynamicEntryID();
2370         }
2371         else
2372         {
2373             classID = pMT->GetClassIndex();
2374         }
2375
2376         pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), moduleID, classID, CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc));
2377     }
2378
2379     return pHelper;
2380 }
2381
2382 TADDR GetFirstArgumentRegisterValuePtr(TransitionBlock * pTransitionBlock)
2383 {
2384     TADDR pArgument = (TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters();
2385 #ifdef _TARGET_X86_
2386     // x86 is special as always
2387     pArgument += offsetof(ArgumentRegisters, ECX);
2388 #endif
2389
2390     return pArgument;
2391 }
2392
2393 void ProcessDynamicDictionaryLookup(TransitionBlock *           pTransitionBlock, 
2394                                     Module *                    pModule, 
2395                                     Module *                    pInfoModule,                                     
2396                                     BYTE                        kind, 
2397                                     PCCOR_SIGNATURE             pBlob, 
2398                                     PCCOR_SIGNATURE             pBlobStart,                                     
2399                                     CORINFO_RUNTIME_LOOKUP *    pResult, 
2400                                     DWORD *                     pDictionaryIndexAndSlot)
2401 {
2402     STANDARD_VM_CONTRACT;
2403
2404     TADDR genericContextPtr = *(TADDR*)GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2405
2406     pResult->testForFixup = pResult->testForNull = false;
2407     pResult->signature = NULL;
2408
2409     pResult->indirectFirstOffset = 0;
2410
2411     pResult->indirections = CORINFO_USEHELPER;
2412
2413     DWORD numGenericArgs = 0;
2414     MethodTable* pContextMT = NULL;
2415     MethodDesc* pContextMD = NULL;
2416
2417     if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)
2418     {
2419         pContextMD = (MethodDesc*)genericContextPtr;
2420         numGenericArgs = pContextMD->GetNumGenericMethodArgs();
2421         pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_METHOD;
2422     }
2423     else
2424     {
2425         pContextMT = (MethodTable*)genericContextPtr;
2426
2427         if (kind == ENCODE_DICTIONARY_LOOKUP_THISOBJ)
2428         {
2429             TypeHandle contextTypeHandle = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2430
2431             SigPointer p(pBlob);
2432             p.SkipExactlyOne();
2433             pBlob = p.GetPtr();
2434
2435             pContextMT = pContextMT->GetMethodTableMatchingParentClass(contextTypeHandle.AsMethodTable());
2436         }
2437
2438         numGenericArgs = pContextMT->GetNumGenericArgs();
2439         pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_CLASS;
2440     }
2441
2442     _ASSERTE(numGenericArgs > 0);
2443
2444     CORCOMPILE_FIXUP_BLOB_KIND signatureKind = (CORCOMPILE_FIXUP_BLOB_KIND)CorSigUncompressData(pBlob);
2445
2446     //
2447     // Optimization cases
2448     //
2449     if (signatureKind == ENCODE_TYPE_HANDLE)
2450     {
2451         SigPointer sigptr(pBlob, -1);
2452
2453         CorElementType type;
2454         IfFailThrow(sigptr.GetElemType(&type));
2455
2456         if ((type == ELEMENT_TYPE_MVAR) && (kind == ENCODE_DICTIONARY_LOOKUP_METHOD))
2457         {
2458             pResult->indirections = 2;
2459             pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo);
2460
2461             if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative)
2462             {
2463                 pResult->indirectFirstOffset = 1;
2464             }
2465
2466             ULONG data;
2467             IfFailThrow(sigptr.GetData(&data));
2468             pResult->offsets[1] = sizeof(TypeHandle) * data;
2469
2470             return;
2471         }
2472         else if ((type == ELEMENT_TYPE_VAR) && (kind != ENCODE_DICTIONARY_LOOKUP_METHOD))
2473         {
2474             pResult->indirections = 3;
2475             pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo();
2476             pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1);
2477
2478             ULONG data;
2479             IfFailThrow(sigptr.GetData(&data));
2480             pResult->offsets[2] = sizeof(TypeHandle) * data;
2481
2482             return;
2483         }
2484     }
2485
2486     if (pContextMT != NULL && pContextMT->GetNumDicts() > 0xFFFF)
2487         ThrowHR(COR_E_BADIMAGEFORMAT);
2488
2489     // Dictionary index and slot number are encoded in a 32-bit DWORD. The higher 16 bits
2490     // are used for the dictionary index, and the lower 16 bits for the slot number.
2491     *pDictionaryIndexAndSlot = (pContextMT == NULL ? 0 : pContextMT->GetNumDicts() - 1);
2492     *pDictionaryIndexAndSlot <<= 16;
2493     
2494     WORD dictionarySlot;
2495
2496     if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)
2497     {
2498         if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMD->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 1, FromReadyToRunImage, &dictionarySlot))
2499         {
2500             pResult->testForNull = 1;
2501
2502             // Indirect through dictionary table pointer in InstantiatedMethodDesc
2503             pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo);
2504
2505             if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative)
2506             {
2507                 pResult->indirectFirstOffset = 1;
2508             }
2509
2510             *pDictionaryIndexAndSlot |= dictionarySlot;
2511         }
2512     }
2513
2514     // It's a class dictionary lookup (CORINFO_LOOKUP_CLASSPARAM or CORINFO_LOOKUP_THISOBJ)
2515     else
2516     {
2517         if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMT->GetClass()->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 2, FromReadyToRunImage, &dictionarySlot))
2518         {
2519             pResult->testForNull = 1;
2520
2521             // Indirect through dictionary table pointer in vtable
2522             pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo();
2523
2524             // Next indirect through the dictionary appropriate to this instantiated type
2525             pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1);
2526
2527             *pDictionaryIndexAndSlot |= dictionarySlot;
2528         }
2529     }
2530 }
2531
2532 PCODE DynamicHelperFixup(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND * pKind, TypeHandle * pTH, MethodDesc ** ppMD, FieldDesc ** ppFD)
2533 {
2534     STANDARD_VM_CONTRACT;
2535
2536     PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage();
2537
2538     RVA rva = pNativeImage->GetDataRva((TADDR)pCell);
2539
2540     PTR_CORCOMPILE_IMPORT_SECTION pImportSection = pModule->GetImportSectionFromIndex(sectionIndex);
2541     _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva));
2542
2543     _ASSERTE(pImportSection->EntrySize == sizeof(TADDR));
2544
2545     COUNT_T index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR);
2546
2547     PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures));
2548
2549     PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]);
2550     PCCOR_SIGNATURE pBlobStart = pBlob;
2551
2552     BYTE kind = *pBlob++;
2553
2554     Module * pInfoModule = pModule;
2555     if (kind & ENCODE_MODULE_OVERRIDE)
2556     {
2557         DWORD moduleIndex = CorSigUncompressData(pBlob);
2558         pInfoModule = pModule->GetModuleFromIndex(moduleIndex);
2559         kind &= ~ENCODE_MODULE_OVERRIDE;
2560     }
2561
2562     bool fReliable = false;
2563     TypeHandle th;
2564     MethodDesc * pMD = NULL;
2565     FieldDesc * pFD = NULL;
2566     CORINFO_RUNTIME_LOOKUP genericLookup;
2567     DWORD dictionaryIndexAndSlot = -1;
2568
2569     switch (kind)
2570     {
2571     case ENCODE_NEW_HELPER:
2572         th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2573         th.AsMethodTable()->EnsureInstanceActive();
2574         break;
2575     case ENCODE_ISINSTANCEOF_HELPER:
2576     case ENCODE_CHKCAST_HELPER:
2577         fReliable = true;
2578     case ENCODE_NEW_ARRAY_HELPER:
2579         th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2580         break;
2581
2582     case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2583     case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2584     case ENCODE_STATIC_BASE_NONGC_HELPER:
2585     case ENCODE_STATIC_BASE_GC_HELPER:
2586     case ENCODE_CCTOR_TRIGGER:
2587         th = ZapSig::DecodeType(pModule, pInfoModule, pBlob);
2588     Statics:
2589         th.AsMethodTable()->EnsureInstanceActive();
2590         th.AsMethodTable()->CheckRunClassInitThrowing();
2591         fReliable = true;
2592         break;
2593
2594     case ENCODE_FIELD_ADDRESS:
2595         pFD = ZapSig::DecodeField(pModule, pInfoModule, pBlob, &th);
2596         _ASSERTE(pFD->IsStatic());
2597         goto Statics;
2598
2599     case ENCODE_VIRTUAL_ENTRY:
2600     // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2601     // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2602     // case ENCODE_VIRTUAL_ENTRY_SLOT:
2603         fReliable = true;
2604     case ENCODE_DELEGATE_CTOR:
2605         {
2606             pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th);
2607             if (pMD->RequiresInstArg())
2608             {
2609                 pMD = MethodDesc::FindOrCreateAssociatedMethodDesc(pMD,
2610                     th.AsMethodTable(),
2611                     FALSE /* forceBoxedEntryPoint */,
2612                     pMD->GetMethodInstantiation(),
2613                     FALSE /* allowInstParam */);
2614             }
2615             pMD->EnsureActive();
2616         }
2617         break;
2618
2619     case ENCODE_DICTIONARY_LOOKUP_THISOBJ:
2620     case ENCODE_DICTIONARY_LOOKUP_TYPE:
2621     case ENCODE_DICTIONARY_LOOKUP_METHOD:
2622         ProcessDynamicDictionaryLookup(pTransitionBlock, pModule, pInfoModule, kind, pBlob, pBlobStart, &genericLookup, &dictionaryIndexAndSlot);
2623         break;
2624
2625     default:
2626         _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND");
2627         ThrowHR(COR_E_BADIMAGEFORMAT);
2628     }
2629
2630     PCODE pHelper = NULL;
2631
2632     if (fReliable)
2633     {
2634         // For reliable helpers, exceptions in creating the optimized helper are non-fatal. Swallow them to make CER work well.
2635         EX_TRY
2636         {
2637             switch (kind)
2638             {
2639             case ENCODE_ISINSTANCEOF_HELPER:
2640             case ENCODE_CHKCAST_HELPER:
2641                 {
2642                     bool fClassMustBeRestored;
2643                     CorInfoHelpFunc helpFunc = CEEInfo::getCastingHelperStatic(th, /* throwing */ (kind == ENCODE_CHKCAST_HELPER), &fClassMustBeRestored);
2644                     pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc));
2645                 }
2646                 break;
2647             case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2648             case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2649             case ENCODE_STATIC_BASE_NONGC_HELPER:
2650             case ENCODE_STATIC_BASE_GC_HELPER:
2651             case ENCODE_CCTOR_TRIGGER:
2652             case ENCODE_FIELD_ADDRESS:
2653                 {
2654                     MethodTable * pMT = th.AsMethodTable();
2655
2656                     bool fNeedsNonTrivialHelper = false;
2657
2658                     if (pMT->IsDomainNeutral() && !IsSingleAppDomain())
2659                     {
2660                         fNeedsNonTrivialHelper = true;
2661                     }
2662                     else
2663                     if (pMT->Collectible() && (kind != ENCODE_CCTOR_TRIGGER))
2664                     {
2665                         // Collectible statics are not pinned - the fast getters expect statics to be pinned
2666                         fNeedsNonTrivialHelper = true;
2667                     }
2668                     else
2669                     {
2670                         if (pFD != NULL)
2671                         {
2672                             fNeedsNonTrivialHelper = !!pFD->IsSpecialStatic();
2673                         }
2674                         else
2675                         {
2676                             fNeedsNonTrivialHelper = (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER) || (kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER);
2677                         }
2678                     }
2679
2680                     if (fNeedsNonTrivialHelper)
2681                     {
2682                         if (pFD != NULL)
2683                         {
2684                             if (pFD->IsRVA() || pFD->IsContextStatic())
2685                             {
2686                                 _ASSERTE(!"Fast getter for rare kinds of static fields");
2687                             }
2688                             else
2689                             {
2690                                 pHelper = getHelperForSharedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD);
2691                             }
2692                         }
2693                         else
2694                         {
2695                             pHelper = getHelperForStaticBase(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT);
2696                         }
2697                     }
2698                     else
2699                     {
2700                         // Delay the creation of the helper until the type is initialized
2701                         if (pMT->IsClassInited())
2702                             pHelper = getHelperForInitializedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD);
2703                     }
2704                 }
2705                 break;
2706
2707             case ENCODE_VIRTUAL_ENTRY:
2708             // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2709             // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2710             // case ENCODE_VIRTUAL_ENTRY_SLOT:
2711                 {
2712                    if (!pMD->IsVtableMethod())
2713                    {
2714                         pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode());
2715                     }
2716                     else
2717                     {
2718                         AllocMemTracker amTracker;
2719
2720                         VirtualFunctionPointerArgs * pArgs = (VirtualFunctionPointerArgs *)amTracker.Track(
2721                             pModule->GetLoaderAllocator()->GetHighFrequencyHeap()->
2722                                 AllocMem(S_SIZE_T(sizeof(VirtualFunctionPointerArgs))));
2723
2724                         pArgs->classHnd = (CORINFO_CLASS_HANDLE)th.AsPtr();
2725                         pArgs->methodHnd = (CORINFO_METHOD_HANDLE)pMD;
2726
2727                         pHelper = DynamicHelpers::CreateHelperWithArg(pModule->GetLoaderAllocator(), (TADDR)pArgs, 
2728                             GetEEFuncEntryPoint(JIT_VirtualFunctionPointer_Dynamic));
2729
2730                         amTracker.SuppressRelease();
2731                     }
2732                 }
2733                 break;
2734
2735             default:
2736                 UNREACHABLE();
2737             }
2738
2739             if (pHelper != NULL)
2740             {
2741                 *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper;
2742             }
2743
2744 #ifdef _DEBUG
2745             // Always execute the reliable fallback in debug builds
2746             pHelper = NULL;
2747 #endif
2748         }
2749         EX_CATCH
2750         {
2751         }
2752         EX_END_CATCH (SwallowAllExceptions);
2753     }
2754     else
2755     {
2756         switch (kind)
2757         {
2758         case ENCODE_NEW_HELPER:
2759             {
2760                 CorInfoHelpFunc helpFunc = CEEInfo::getNewHelperStatic(th.AsMethodTable());
2761                 pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc));
2762             }
2763             break;
2764         case ENCODE_NEW_ARRAY_HELPER:
2765             {
2766                 CorInfoHelpFunc helpFunc = CEEInfo::getNewArrHelperStatic(th);
2767                 ArrayTypeDesc *pArrayTypeDesc = th.AsArray();
2768                 MethodTable *pArrayMT = pArrayTypeDesc->GetTemplateMethodTable();
2769                 pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), dac_cast<TADDR>(pArrayMT), CEEJitInfo::getHelperFtnStatic(helpFunc));
2770             }
2771             break;
2772
2773         case ENCODE_DELEGATE_CTOR:
2774             {
2775                 MethodTable * pDelegateType = NULL;
2776
2777                 {
2778                     GCX_COOP();
2779
2780                     TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2781
2782                     if (pArgument != NULL)
2783                     {
2784                         pDelegateType = (*(Object **)pArgument)->GetMethodTable();
2785                         _ASSERTE(pDelegateType->IsDelegate());
2786                     }
2787                 }
2788
2789                 DelegateCtorArgs ctorData;
2790                 ctorData.pMethod = NULL;
2791                 ctorData.pArg3 = NULL;
2792                 ctorData.pArg4 = NULL;
2793                 ctorData.pArg5 = NULL;
2794
2795                 MethodDesc * pDelegateCtor = NULL;
2796
2797                 if (pDelegateType != NULL)
2798                 {
2799                     pDelegateCtor = COMDelegate::GetDelegateCtor(TypeHandle(pDelegateType), pMD, &ctorData);
2800
2801                     if (ctorData.pArg4 != NULL || ctorData.pArg5 != NULL)
2802                     {
2803                         // This should never happen - we should never get collectible or secure delegates here
2804                         _ASSERTE(false);
2805                         pDelegateCtor = NULL;
2806                     }
2807                 }
2808
2809                 TADDR target = NULL;
2810
2811                 if (pDelegateCtor != NULL)
2812                 {
2813                     target = pDelegateCtor->GetMultiCallableAddrOfCode();
2814                 }
2815                 else
2816                 {
2817                     target = ECall::GetFCallImpl(MscorlibBinder::GetMethod(METHOD__DELEGATE__CONSTRUCT_DELEGATE));
2818                     ctorData.pArg3 = NULL;
2819                 }
2820
2821                 if (ctorData.pArg3 != NULL)
2822                 {
2823                     pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), (TADDR)ctorData.pArg3, target);
2824                 }
2825                 else
2826                 {
2827                     pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), target);
2828                 }
2829             }
2830             break;
2831
2832         case ENCODE_DICTIONARY_LOOKUP_THISOBJ:
2833         case ENCODE_DICTIONARY_LOOKUP_TYPE:
2834         case ENCODE_DICTIONARY_LOOKUP_METHOD:
2835             {
2836                 pHelper = DynamicHelpers::CreateDictionaryLookupHelper(pModule->GetLoaderAllocator(), &genericLookup, dictionaryIndexAndSlot, pModule);
2837             }
2838             break;
2839
2840         default:
2841             UNREACHABLE();
2842         }
2843
2844         if (pHelper != NULL)
2845         {
2846             *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper;
2847         }
2848     }
2849
2850     *pKind = (CORCOMPILE_FIXUP_BLOB_KIND)kind;
2851     *pTH = th;
2852     *ppMD = pMD;
2853     *ppFD = pFD;
2854
2855     return pHelper;
2856 }
2857
2858 extern "C" SIZE_T STDCALL DynamicHelperWorker(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, INT frameFlags)
2859 {
2860     PCODE pHelper = NULL;
2861     SIZE_T result = NULL;
2862
2863     STATIC_CONTRACT_THROWS;
2864     STATIC_CONTRACT_GC_TRIGGERS;
2865     STATIC_CONTRACT_MODE_COOPERATIVE;
2866
2867     MAKE_CURRENT_THREAD_AVAILABLE();
2868
2869 #ifdef _DEBUG
2870     Thread::ObjectRefFlush(CURRENT_THREAD);
2871 #endif
2872
2873     FrameWithCookie<DynamicHelperFrame> frame(pTransitionBlock, frameFlags);
2874     DynamicHelperFrame * pFrame = &frame;
2875
2876     pFrame->Push(CURRENT_THREAD);
2877
2878     INSTALL_MANAGED_EXCEPTION_DISPATCHER;
2879     INSTALL_UNWIND_AND_CONTINUE_HANDLER;
2880
2881 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
2882     // Decode indirection cell from callsite if it is not present
2883     if (pCell == NULL)
2884     {
2885         // Asssume that the callsite is call [xxxxxxxx]
2886         PCODE retAddr = pFrame->GetReturnAddress();
2887 #ifdef _TARGET_X86_
2888         pCell = *(((TADDR **)retAddr) - 1);
2889 #else
2890         pCell = (TADDR *)(*(((INT32 *)retAddr) - 1) + retAddr);
2891 #endif
2892     }
2893 #endif
2894     _ASSERTE(pCell != NULL);
2895
2896     TypeHandle th;
2897     MethodDesc * pMD = NULL;
2898     FieldDesc * pFD = NULL;
2899     CORCOMPILE_FIXUP_BLOB_KIND kind = ENCODE_NONE;
2900
2901     {
2902         GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD);
2903
2904         pHelper = DynamicHelperFixup(pTransitionBlock, pCell, sectionIndex, pModule, &kind, &th, &pMD, &pFD);
2905     }
2906
2907     if (pHelper == NULL)
2908     {
2909         TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock);
2910
2911         switch (kind)
2912         {
2913         case ENCODE_ISINSTANCEOF_HELPER:
2914         case ENCODE_CHKCAST_HELPER:
2915             {
2916                 BOOL throwInvalidCast = (kind == ENCODE_CHKCAST_HELPER);
2917                 if (*(Object **)pArgument == NULL || ObjIsInstanceOf(*(Object **)pArgument, th, throwInvalidCast))
2918                 {
2919                     result = (SIZE_T)(*(Object **)pArgument);
2920                 }
2921                 else
2922                 {
2923                     _ASSERTE (!throwInvalidCast);
2924                     result = NULL;
2925                 }
2926             }
2927             break;
2928         case ENCODE_STATIC_BASE_NONGC_HELPER:
2929             result = (SIZE_T)th.AsMethodTable()->GetNonGCStaticsBasePointer();
2930             break;
2931         case ENCODE_STATIC_BASE_GC_HELPER:
2932             result = (SIZE_T)th.AsMethodTable()->GetGCStaticsBasePointer();
2933             break;
2934         case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER:
2935             ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable());
2936             result = (SIZE_T)th.AsMethodTable()->GetNonGCThreadStaticsBasePointer();
2937             break;
2938         case ENCODE_THREAD_STATIC_BASE_GC_HELPER:
2939             ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable());
2940             result = (SIZE_T)th.AsMethodTable()->GetGCThreadStaticsBasePointer();
2941             break;
2942         case ENCODE_CCTOR_TRIGGER:
2943             break;
2944         case ENCODE_FIELD_ADDRESS:
2945             result = (SIZE_T)pFD->GetCurrentStaticAddress();
2946             break;
2947         case ENCODE_VIRTUAL_ENTRY:
2948         // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN:
2949         // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN:
2950         // case ENCODE_VIRTUAL_ENTRY_SLOT:
2951             {
2952                 OBJECTREF objRef = ObjectToOBJECTREF(*(Object **)pArgument);
2953
2954                 GCPROTECT_BEGIN(objRef);
2955
2956                 if (objRef == NULL)
2957                     COMPlusThrow(kNullReferenceException);
2958
2959                 // Duplicated logic from JIT_VirtualFunctionPointer_Framed
2960                 if (!pMD->IsVtableMethod())
2961                 {
2962                     result = pMD->GetMultiCallableAddrOfCode();
2963                 }
2964                 else
2965                 {
2966                     result = pMD->GetMultiCallableAddrOfVirtualizedCode(&objRef, th);
2967                 }
2968
2969                 GCPROTECT_END();
2970             }
2971             break;
2972         default:
2973             UNREACHABLE();
2974         }
2975     }
2976
2977     UNINSTALL_UNWIND_AND_CONTINUE_HANDLER;
2978     UNINSTALL_MANAGED_EXCEPTION_DISPATCHER;
2979
2980     pFrame->Pop(CURRENT_THREAD);
2981
2982     if (pHelper == NULL)
2983         *(SIZE_T *)((TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters()) = result;
2984     return pHelper;
2985 }
2986
2987 #endif // FEATURE_READYTORUN
2988
2989 #endif // !DACCESS_COMPILE