1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
4 // File: methodtable.cpp
12 #include "classcompat.h"
17 #include "siginfo.hpp"
21 #include "dllimport.h"
23 #include "jitinterface.h"
26 #include "fieldmarshaler.h"
28 #include "gcheaputilities.h"
29 #include "dbginterface.h"
30 #include "comdelegate.h"
31 #include "eventtrace.h"
34 #include "eeprofinterfaces.h"
35 #include "dllimportcallback.h"
37 #include "methodimpl.h"
38 #include "guidfromname.h"
41 #include "comsynchronizable.h"
42 #include "customattribute.h"
43 #include "virtualcallstub.h"
44 #include "contractimpl.h"
46 #ifdef FEATURE_COMINTEROP
47 #include "comcallablewrapper.h"
48 #include "clrtocomcall.h"
49 #include "runtimecallablewrapper.h"
50 #endif // FEATURE_COMINTEROP
52 #include "typeequivalencehash.hpp"
55 #include "genericdict.h"
56 #include "typestring.h"
59 #include "castcache.h"
60 #include "dynamicinterfacecastable.h"
61 #include "frozenobjectheap.h"
63 #ifdef FEATURE_INTERPRETER
64 #include "interpreter.h"
65 #endif // FEATURE_INTERPRETER
67 #ifndef DACCESS_COMPILE
69 // Typedef for string comparison functions.
70 typedef int (__cdecl *UTF8StringCompareFuncPtr)(const char *, const char *);
72 MethodDataCache *MethodTable::s_pMethodDataCache = NULL;
73 BOOL MethodTable::s_fUseMethodDataCache = FALSE;
74 BOOL MethodTable::s_fUseParentMethodData = FALSE;
77 extern unsigned g_dupMethods;
80 #endif // !DACCESS_COMPILE
82 #ifndef DACCESS_COMPILE
83 //==========================================================================================
86 typedef MethodTable::MethodData MethodData;
88 public: // Ctor. Allocates cEntries entries. Throws.
89 static UINT32 GetObjectSize(UINT32 cEntries);
90 MethodDataCache(UINT32 cEntries);
92 MethodData *Find(MethodTable *pMT);
93 MethodData *Find(MethodTable *pMTDecl, MethodTable *pMTImpl);
94 void Insert(MethodData *pMData);
98 // This describes each entry in the cache.
101 MethodData *m_pMData;
105 MethodData *FindHelper(MethodTable *pMTDecl, MethodTable *pMTImpl, UINT32 idx);
107 inline UINT32 GetNextTimestamp()
108 { return ++m_iCurTimestamp; }
110 inline UINT32 NumEntries()
111 { LIMITED_METHOD_CONTRACT; return m_cEntries; }
113 inline void TouchEntry(UINT32 i)
114 { WRAPPER_NO_CONTRACT; m_iLastTouched = i; GetEntry(i)->m_iTimestamp = GetNextTimestamp(); }
116 inline UINT32 GetLastTouchedEntryIndex()
117 { WRAPPER_NO_CONTRACT; return m_iLastTouched; }
119 // The end of this object contains an array of Entry
120 inline Entry *GetEntryData()
121 { LIMITED_METHOD_CONTRACT; return (Entry *)(this + 1); }
123 inline Entry *GetEntry(UINT32 i)
124 { WRAPPER_NO_CONTRACT; return GetEntryData() + i; }
127 // This serializes access to the cache
130 // This allows ageing of entries to decide which to punt when
131 // inserting a new entry.
132 UINT32 m_iCurTimestamp;
134 // The number of entries in the cache
136 UINT32 m_iLastTouched;
139 UINT32 pad; // insures that we are a multiple of 8-bytes
141 }; // class MethodDataCache
143 //==========================================================================================
144 UINT32 MethodDataCache::GetObjectSize(UINT32 cEntries)
146 LIMITED_METHOD_CONTRACT;
147 return sizeof(MethodDataCache) + (sizeof(Entry) * cEntries);
150 //==========================================================================================
151 MethodDataCache::MethodDataCache(UINT32 cEntries)
152 : m_lock(COOPERATIVE_OR_PREEMPTIVE, LOCK_TYPE_DEFAULT),
154 m_cEntries(cEntries),
158 ZeroMemory(GetEntryData(), cEntries * sizeof(Entry));
161 //==========================================================================================
162 MethodTable::MethodData *MethodDataCache::FindHelper(
163 MethodTable *pMTDecl, MethodTable *pMTImpl, UINT32 idx)
171 MethodData *pEntry = GetEntry(idx)->m_pMData;
172 if (pEntry != NULL) {
173 MethodTable *pMTDeclEntry = pEntry->GetDeclMethodTable();
174 MethodTable *pMTImplEntry = pEntry->GetImplMethodTable();
175 if (pMTDeclEntry == pMTDecl && pMTImplEntry == pMTImpl) {
178 else if (pMTDecl == pMTImpl) {
179 if (pMTDeclEntry == pMTDecl) {
180 return pEntry->GetDeclMethodData();
182 if (pMTImplEntry == pMTDecl) {
183 return pEntry->GetImplMethodData();
191 //==========================================================================================
192 MethodTable::MethodData *MethodDataCache::Find(MethodTable *pMTDecl, MethodTable *pMTImpl)
201 g_sdStats.m_cCacheLookups++;
204 SimpleReadLockHolder lh(&m_lock);
206 // Check the last touched entry.
207 MethodData *pEntry = FindHelper(pMTDecl, pMTImpl, GetLastTouchedEntryIndex());
209 // Now search the entire cache.
210 if (pEntry == NULL) {
211 for (UINT32 i = 0; i < NumEntries(); i++) {
212 pEntry = FindHelper(pMTDecl, pMTImpl, i);
213 if (pEntry != NULL) {
220 if (pEntry != NULL) {
226 // Failure to find the entry in the cache.
227 g_sdStats.m_cCacheMisses++;
234 //==========================================================================================
235 MethodTable::MethodData *MethodDataCache::Find(MethodTable *pMT)
238 return Find(pMT, pMT);
241 //==========================================================================================
242 void MethodDataCache::Insert(MethodData *pMData)
245 NOTHROW; // for now, because it does not yet resize.
250 SimpleWriteLockHolder hLock(&m_lock);
252 UINT32 iMin = UINT32_MAX;
253 UINT32 idxMin = UINT32_MAX;
254 for (UINT32 i = 0; i < NumEntries(); i++) {
255 if (GetEntry(i)->m_iTimestamp < iMin) {
257 iMin = GetEntry(i)->m_iTimestamp;
260 Entry *pEntry = GetEntry(idxMin);
261 if (pEntry->m_pMData != NULL) {
262 pEntry->m_pMData->Release();
265 pEntry->m_pMData = pMData;
266 pEntry->m_iTimestamp = GetNextTimestamp();
269 //==========================================================================================
270 void MethodDataCache::Clear()
273 NOTHROW; // for now, because it does not yet resize.
278 // Taking the lock here is just a precaution. Really, the runtime
279 // should be suspended because this is called while unloading an
280 // AppDomain at the SysSuspendEE stage. But, if someone calls it
281 // outside of that context, we should be extra cautious.
282 SimpleWriteLockHolder lh(&m_lock);
284 for (UINT32 i = 0; i < NumEntries(); i++) {
285 Entry *pEntry = GetEntry(i);
286 if (pEntry->m_pMData != NULL) {
287 pEntry->m_pMData->Release();
290 ZeroMemory(GetEntryData(), NumEntries() * sizeof(Entry));
292 } // MethodDataCache::Clear
294 #endif // !DACCESS_COMPILE
297 //==========================================================================================
299 // Initialize the offsets of multipurpose slots at compile time using template metaprogramming
303 struct CountBitsAtCompileTime
305 enum { value = (N & 1) + CountBitsAtCompileTime<(N >> 1)>::value };
309 struct CountBitsAtCompileTime<0>
314 // "mask" is mask of used slots.
316 struct MethodTable::MultipurposeSlotOffset
318 // This is raw index of the slot assigned on first come first served basis
319 enum { raw = CountBitsAtCompileTime<mask>::value };
321 // This is actual index of the slot. It is equal to raw index except for the case
322 // where the first fixed slot is not used, but the second one is. The first fixed
323 // slot has to be assigned instead of the second one in this case. This assumes that
324 // there are exactly two fixed slots.
325 enum { index = (((mask & 3) == 2) && (raw == 1)) ? 0 : raw };
328 enum { slotOffset = (index == 0) ? offsetof(MethodTable, m_pMultipurposeSlot1) :
329 (index == 1) ? offsetof(MethodTable, m_pMultipurposeSlot2) :
330 (sizeof(MethodTable) + index * sizeof(TADDR) - 2 * sizeof(TADDR)) };
332 // Size of methodtable with overflow slots. It is used to compute start offset of optional members.
333 enum { totalSize = (slotOffset >= sizeof(MethodTable)) ? slotOffset : sizeof(MethodTable) };
337 // These macros recursively expand to create 2^N values for the offset arrays
339 #define MULTIPURPOSE_SLOT_OFFSET_1(mask) MULTIPURPOSE_SLOT_OFFSET (mask) MULTIPURPOSE_SLOT_OFFSET (mask | 0x01)
340 #define MULTIPURPOSE_SLOT_OFFSET_2(mask) MULTIPURPOSE_SLOT_OFFSET_1(mask) MULTIPURPOSE_SLOT_OFFSET_1(mask | 0x02)
341 #define MULTIPURPOSE_SLOT_OFFSET_3(mask) MULTIPURPOSE_SLOT_OFFSET_2(mask) MULTIPURPOSE_SLOT_OFFSET_2(mask | 0x04)
342 #define MULTIPURPOSE_SLOT_OFFSET_4(mask) MULTIPURPOSE_SLOT_OFFSET_3(mask) MULTIPURPOSE_SLOT_OFFSET_3(mask | 0x08)
343 #define MULTIPURPOSE_SLOT_OFFSET_5(mask) MULTIPURPOSE_SLOT_OFFSET_4(mask) MULTIPURPOSE_SLOT_OFFSET_4(mask | 0x10)
345 #define MULTIPURPOSE_SLOT_OFFSET(mask) MultipurposeSlotOffset<mask>::slotOffset,
346 const BYTE MethodTable::c_DispatchMapSlotOffsets[] = {
347 MULTIPURPOSE_SLOT_OFFSET_2(0)
349 const BYTE MethodTable::c_NonVirtualSlotsOffsets[] = {
350 MULTIPURPOSE_SLOT_OFFSET_3(0)
352 const BYTE MethodTable::c_ModuleOverrideOffsets[] = {
353 MULTIPURPOSE_SLOT_OFFSET_4(0)
355 #undef MULTIPURPOSE_SLOT_OFFSET
357 #define MULTIPURPOSE_SLOT_OFFSET(mask) MultipurposeSlotOffset<mask>::totalSize,
358 const BYTE MethodTable::c_OptionalMembersStartOffsets[] = {
359 MULTIPURPOSE_SLOT_OFFSET_5(0)
361 #undef MULTIPURPOSE_SLOT_OFFSET
364 //==========================================================================================
365 // Optimization intended for MethodTable::GetModule, MethodTable::GetDispatchMap and MethodTable::GetNonVirtualSlotsPtr
367 #include <optsmallperfcritical.h>
369 PTR_Module MethodTable::GetModule()
371 LIMITED_METHOD_DAC_CONTRACT;
373 // Fast path for non-generic non-array case
374 if ((m_dwFlags & (enum_flag_HasComponentSize | enum_flag_GenericsMask)) == 0)
375 return GetLoaderModule();
377 MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable();
378 if (!pMTForModule->HasModuleOverride())
379 return pMTForModule->GetLoaderModule();
381 TADDR pSlot = pMTForModule->GetMultipurposeSlotPtr(enum_flag_HasModuleOverride, c_ModuleOverrideOffsets);
382 return *dac_cast<DPTR(PTR_Module)>(pSlot);
385 //==========================================================================================
386 PTR_DispatchMap MethodTable::GetDispatchMap()
388 LIMITED_METHOD_DAC_CONTRACT;
390 MethodTable * pMT = this;
392 if (!pMT->HasDispatchMapSlot())
394 pMT = pMT->GetCanonicalMethodTable();
395 if (!pMT->HasDispatchMapSlot())
399 TADDR pSlot = pMT->GetMultipurposeSlotPtr(enum_flag_HasDispatchMapSlot, c_DispatchMapSlotOffsets);
400 return *dac_cast<DPTR(PTR_DispatchMap)>(pSlot);
403 //==========================================================================================
404 TADDR MethodTable::GetNonVirtualSlotsPtr()
406 LIMITED_METHOD_DAC_CONTRACT;
408 _ASSERTE(GetFlag(enum_flag_HasNonVirtualSlots));
409 return GetMultipurposeSlotPtr(enum_flag_HasNonVirtualSlots, c_NonVirtualSlotsOffsets);
412 #include <optdefault.h>
415 //==========================================================================================
416 PTR_Module MethodTable::GetModuleIfLoaded()
431 #ifndef DACCESS_COMPILE
432 //==========================================================================================
433 void MethodTable::SetModule(Module * pModule)
435 LIMITED_METHOD_CONTRACT;
437 if (HasModuleOverride())
439 *GetModuleOverridePtr() = pModule;
442 _ASSERTE(GetModule() == pModule);
444 #endif // DACCESS_COMPILE
446 //==========================================================================================
447 BOOL MethodTable::ValidateWithPossibleAV()
449 CANNOT_HAVE_CONTRACT;
452 // MethodTables have the canonicalization property below.
453 // i.e. canonicalize, and canonicalize again, and check the result are
454 // the same. This is a property that holds for every single valid object in
455 // the system, but which should hold for very few other addresses.
457 // For non-generic classes, we can rely on comparing
458 // object->methodtable->class->methodtable
460 // object->methodtable
462 // However, for generic instantiation this does not work. There we must
465 // object->methodtable->class->methodtable->class
467 // object->methodtable->class
469 // Of course, that's not necessarily enough to verify that the method
470 // table and class are absolutely valid - we rely on type soundness
471 // for that. We need to do more sanity checking to
472 // make sure that our pointer here is in fact a valid object.
473 PTR_EEClass pEEClass = this->GetClassWithPossibleAV();
474 return ((pEEClass && (this == pEEClass->GetMethodTableWithPossibleAV())) ||
475 ((HasInstantiation() || IsArray()) &&
476 (pEEClass && (pEEClass->GetMethodTableWithPossibleAV()->GetClassWithPossibleAV() == pEEClass))));
479 #ifndef DACCESS_COMPILE
481 //==========================================================================================
482 BOOL MethodTable::IsClassInited()
486 if (IsClassPreInited())
489 if (IsSharedByGenericInstantiations())
492 DomainLocalModule *pLocalModule = GetDomainLocalModule();
494 _ASSERTE(pLocalModule != NULL);
496 return pLocalModule->IsClassInitialized(this);
499 //==========================================================================================
500 BOOL MethodTable::IsInitError()
504 DomainLocalModule *pLocalModule = GetDomainLocalModule();
505 _ASSERTE(pLocalModule != NULL);
507 return pLocalModule->IsClassInitError(this);
510 //==========================================================================================
511 // mark the class as having its .cctor run
512 void MethodTable::SetClassInited()
515 _ASSERTE(!IsClassPreInited());
516 GetDomainLocalModule()->SetClassInitialized(this);
519 //==========================================================================================
520 void MethodTable::SetClassInitError()
523 GetDomainLocalModule()->SetClassInitError(this);
526 //==========================================================================================
527 // mark the class as having been restored.
528 void MethodTable::SetIsRestored()
537 PRECONDITION(!IsFullyLoaded());
539 InterlockedAnd((LONG*)&GetWriteableDataForWrite()->m_dwFlags, ~MethodTableWriteableData::enum_flag_Unrestored);
541 #ifndef DACCESS_COMPILE
542 if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER))
544 ETW::MethodLog::MethodTableRestored(this);
549 //==========================================================================================
550 // mark as COM object type (System.__ComObject and types deriving from it)
551 void MethodTable::SetComObjectType()
553 LIMITED_METHOD_CONTRACT;
554 SetFlag(enum_flag_ComObject);
557 #ifdef FEATURE_ICASTABLE
558 void MethodTable::SetICastable()
560 LIMITED_METHOD_CONTRACT;
561 SetFlag(enum_flag_ICastable);
565 BOOL MethodTable::IsICastable()
567 LIMITED_METHOD_DAC_CONTRACT;
568 #ifdef FEATURE_ICASTABLE
569 return GetFlag(enum_flag_ICastable);
575 void MethodTable::SetIDynamicInterfaceCastable()
577 LIMITED_METHOD_CONTRACT;
578 SetFlag(enum_flag_IDynamicInterfaceCastable);
581 BOOL MethodTable::IsIDynamicInterfaceCastable()
583 LIMITED_METHOD_DAC_CONTRACT;
584 return GetFlag(enum_flag_IDynamicInterfaceCastable);
587 void MethodTable::SetIsTrackedReferenceWithFinalizer()
589 LIMITED_METHOD_CONTRACT;
590 SetFlag(enum_flag_IsTrackedReferenceWithFinalizer);
593 #endif // !DACCESS_COMPILE
595 BOOL MethodTable::IsTrackedReferenceWithFinalizer()
597 LIMITED_METHOD_DAC_CONTRACT;
598 return GetFlag(enum_flag_IsTrackedReferenceWithFinalizer);
601 //==========================================================================================
602 WORD MethodTable::GetNumMethods()
604 LIMITED_METHOD_DAC_CONTRACT;
605 return GetClass()->GetNumMethods();
608 //==========================================================================================
609 PTR_BaseDomain MethodTable::GetDomain()
611 LIMITED_METHOD_DAC_CONTRACT;
612 return dac_cast<PTR_BaseDomain>(AppDomain::GetCurrentDomain());
615 //==========================================================================================
616 BOOL MethodTable::HasSameTypeDefAs(MethodTable *pMT)
618 LIMITED_METHOD_DAC_CONTRACT;
623 // optimize for the negative case where we expect RID mismatch
624 if (GetTypeDefRid() != pMT->GetTypeDefRid())
627 if (GetCanonicalMethodTable() == pMT->GetCanonicalMethodTable())
630 return (GetModule() == pMT->GetModule());
633 #ifndef DACCESS_COMPILE
635 //==========================================================================================
636 PTR_MethodTable InterfaceInfo_t::GetApproxMethodTable(Module * pContainingModule)
645 MethodTable * pItfMT = GetMethodTable();
646 ClassLoader::EnsureLoaded(TypeHandle(pItfMT), CLASS_LOAD_APPROXPARENTS);
650 //==========================================================================================
651 // get the method desc given the interface method desc
652 /* static */ MethodDesc *MethodTable::GetMethodDescForInterfaceMethodAndServer(
653 TypeHandle ownerType, MethodDesc *pItfMD, OBJECTREF *pServer)
655 CONTRACT(MethodDesc*)
660 PRECONDITION(CheckPointer(pItfMD));
661 PRECONDITION(pItfMD->IsInterface());
662 PRECONDITION(!ownerType.IsNull());
663 PRECONDITION(ownerType.GetMethodTable()->HasSameTypeDefAs(pItfMD->GetMethodTable()));
664 POSTCONDITION(CheckPointer(RETVAL));
667 VALIDATEOBJECTREF(*pServer);
670 MethodTable * pItfMT = ownerType.GetMethodTable();
671 PREFIX_ASSUME(pItfMT != NULL);
674 MethodTable *pServerMT = (*pServer)->GetMethodTable();
675 PREFIX_ASSUME(pServerMT != NULL);
677 #ifdef FEATURE_ICASTABLE
678 // In case of ICastable, instead of trying to find method implementation in the real object type
679 // we call GetMethodDescForInterfaceMethod() again with whatever type it returns.
680 // It allows objects that implement ICastable to mimic behavior of other types.
681 if (pServerMT->IsICastable() &&
682 !pItfMD->HasMethodInstantiation() &&
683 !TypeHandle(pServerMT).CanCastTo(ownerType)) // we need to make sure object doesn't implement this interface in a natural way
685 GCStress<cfg_any>::MaybeTrigger();
687 // Make call to ICastableHelpers.GetImplType(obj, interfaceTypeObj)
688 PREPARE_NONVIRTUAL_CALLSITE(METHOD__ICASTABLEHELPERS__GETIMPLTYPE);
690 OBJECTREF ownerManagedType = ownerType.GetManagedClassObject(); //GC triggers
692 DECLARE_ARGHOLDER_ARRAY(args, 2);
693 args[ARGNUM_0] = OBJECTREF_TO_ARGHOLDER(*pServer);
694 args[ARGNUM_1] = OBJECTREF_TO_ARGHOLDER(ownerManagedType);
696 OBJECTREF impTypeObj = NULL;
697 CALL_MANAGED_METHOD_RETREF(impTypeObj, OBJECTREF, args);
699 INDEBUG(ownerManagedType = NULL); //ownerManagedType wasn't protected during the call
700 if (impTypeObj == NULL) // GetImplType returns default(RuntimeTypeHandle)
702 COMPlusThrow(kEntryPointNotFoundException);
705 ReflectClassBaseObject* resultTypeObj = ((ReflectClassBaseObject*)OBJECTREFToObject(impTypeObj));
706 TypeHandle resultTypeHnd = resultTypeObj->GetType();
707 MethodTable *pResultMT = resultTypeHnd.GetMethodTable();
709 RETURN(pResultMT->GetMethodDescForInterfaceMethod(ownerType, pItfMD, TRUE /* throwOnConflict */));
713 // For IDynamicInterfaceCastable, instead of trying to find method implementation in the real object type
714 // we call GetInterfaceImplementation on the object and call GetMethodDescForInterfaceMethod
715 // with whatever type it returns.
716 if (pServerMT->IsIDynamicInterfaceCastable()
717 && !TypeHandle(pServerMT).CanCastTo(ownerType)) // we need to make sure object doesn't implement this interface in a natural way
719 TypeHandle implTypeHandle;
720 OBJECTREF obj = *pServer;
722 GCPROTECT_BEGIN(obj);
723 OBJECTREF implTypeRef = DynamicInterfaceCastable::GetInterfaceImplementation(&obj, ownerType);
724 _ASSERTE(implTypeRef != NULL);
726 ReflectClassBaseObject *implTypeObj = ((ReflectClassBaseObject *)OBJECTREFToObject(implTypeRef));
727 implTypeHandle = implTypeObj->GetType();
730 RETURN(implTypeHandle.GetMethodTable()->GetMethodDescForInterfaceMethod(ownerType, pItfMD, TRUE /* throwOnConflict */));
733 #ifdef FEATURE_COMINTEROP
734 if (pServerMT->IsComObjectType() && !pItfMD->HasMethodInstantiation())
736 // interop needs an exact MethodDesc
737 pItfMD = MethodDesc::FindOrCreateAssociatedMethodDesc(
739 ownerType.GetMethodTable(),
740 FALSE, // forceBoxedEntryPoint
741 Instantiation(), // methodInst
742 FALSE, // allowInstParam
743 TRUE); // forceRemotableMethod
745 RETURN(pServerMT->GetMethodDescForComInterfaceMethod(pItfMD, false));
747 #endif // !FEATURE_COMINTEROP
749 // Handle pure COM+ types.
750 RETURN (pServerMT->GetMethodDescForInterfaceMethod(ownerType, pItfMD, TRUE /* throwOnConflict */));
753 #ifdef FEATURE_COMINTEROP
754 //==========================================================================================
755 // get the method desc given the interface method desc on a COM implemented server
756 // (if fNullOk is set then NULL is an allowable return value)
757 MethodDesc *MethodTable::GetMethodDescForComInterfaceMethod(MethodDesc *pItfMD, bool fNullOk)
759 CONTRACT(MethodDesc*)
764 PRECONDITION(CheckPointer(pItfMD));
765 PRECONDITION(pItfMD->IsInterface());
766 PRECONDITION(IsComObjectType());
767 POSTCONDITION(fNullOk || CheckPointer(RETVAL));
771 MethodTable * pItfMT = pItfMD->GetMethodTable();
772 PREFIX_ASSUME(pItfMT != NULL);
774 // We now handle __ComObject class that doesn't have Dynamic Interface Map
775 if (!HasDynamicInterfaceMap())
781 // Now we handle the more complex extensible RCW's. The first thing to do is check
782 // to see if the static definition of the extensible RCW specifies that the class
783 // implements the interface.
784 DWORD slot = (DWORD) -1;
786 // Calling GetTarget here instead of FindDispatchImpl gives us caching functionality to increase speed.
787 PCODE tgt = VirtualCallStubManager::GetTarget(
788 pItfMT->GetLoaderAllocator()->GetDispatchToken(pItfMT->GetTypeID(), pItfMD->GetSlot()), this, TRUE /* throwOnConflict */);
792 RETURN(MethodTable::GetMethodDescForSlotAddress(tgt));
795 // The interface is not in the static class definition so we need to look at the
796 // dynamic interfaces.
797 else if (FindDynamicallyAddedInterface(pItfMT))
799 // This interface was added to the class dynamically so it is implemented
800 // by the COM object. We treat this dynamically added interfaces the same
801 // way we treat COM objects. That is by using the interface vtable.
810 #endif // FEATURE_COMINTEROP
813 //---------------------------------------------------------------------------------------
815 MethodTable* CreateMinimalMethodTable(Module* pContainingModule,
816 LoaderHeap* pCreationHeap,
817 AllocMemTracker* pamTracker)
824 INJECT_FAULT(COMPlusThrowOM());
828 EEClass* pClass = EEClass::CreateMinimalClass(pCreationHeap, pamTracker);
830 LOG((LF_BCL, LL_INFO100, "Level2 - Creating MethodTable {0x%p}...\n", pClass));
832 MethodTable* pMT = (MethodTable *)(void *)pamTracker->Track(pCreationHeap->AllocMem(S_SIZE_T(sizeof(MethodTable))));
834 // Note: Memory allocated on loader heap is zero filled
835 // memset(pMT, 0, sizeof(MethodTable));
837 // Allocate the private data block ("private" during runtime in the ngen'ed case).
838 BYTE* pMTWriteableData = (BYTE *)
839 pamTracker->Track(pCreationHeap->AllocMem(S_SIZE_T(sizeof(MethodTableWriteableData))));
840 pMT->SetWriteableData((PTR_MethodTableWriteableData)pMTWriteableData);
843 // Set up the EEClass
845 pClass->SetMethodTable(pMT); // in the EEClass set the pointer to this MethodTable
846 pClass->SetAttrClass(tdPublic | tdSealed);
849 // Set up the MethodTable
851 // Does not need parent. Note that MethodTable for COR_GLOBAL_PARENT_TOKEN does not have parent either,
852 // so the system has to be wired for dealing with no parent anyway.
853 pMT->SetParentMethodTable(NULL);
854 pMT->SetClass(pClass);
855 pMT->SetLoaderModule(pContainingModule);
856 pMT->SetLoaderAllocator(pContainingModule->GetLoaderAllocator());
857 pMT->SetInternalCorElementType(ELEMENT_TYPE_CLASS);
858 pMT->SetBaseSize(OBJECT_BASESIZE);
861 pClass->SetDebugClassName("dynamicClass");
862 pMT->SetDebugClassName("dynamicClass");
865 LOG((LF_BCL, LL_INFO10, "Level1 - MethodTable created {0x%p}\n", pClass));
871 #ifdef FEATURE_COMINTEROP
872 //==========================================================================================
873 OBJECTREF MethodTable::GetObjCreateDelegate()
882 _ASSERT(!IsInterface());
884 return ObjectFromHandle(GetOHDelegate());
889 //==========================================================================================
890 void MethodTable::SetObjCreateDelegate(OBJECTREF orDelegate)
896 THROWS; // From CreateHandle
901 StoreObjectInHandle(GetOHDelegate(), orDelegate);
903 SetOHDelegate (GetAppDomain()->CreateHandle(orDelegate));
905 #endif // FEATURE_COMINTEROP
908 //==========================================================================================
909 void MethodTable::SetInterfaceMap(WORD wNumInterfaces, InterfaceInfo_t* iMap)
911 LIMITED_METHOD_CONTRACT;
912 if (wNumInterfaces == 0)
914 _ASSERTE(!HasInterfaceMap());
918 m_wNumInterfaces = wNumInterfaces;
920 CONSISTENCY_CHECK(IS_ALIGNED(iMap, sizeof(void*)));
921 m_pInterfaceMap = iMap;
924 //==========================================================================================
925 // Called after GetExtraInterfaceInfoSize above to setup a new MethodTable with the additional memory to track
926 // extra interface info. If there are a non-zero number of interfaces implemented on this class but
927 // GetExtraInterfaceInfoSize() returned zero, this call must still be made (with a NULL argument).
928 void MethodTable::InitializeExtraInterfaceInfo(PVOID pInfo)
930 STANDARD_VM_CONTRACT;
932 // Check that memory was allocated or not allocated in the right scenarios.
933 _ASSERTE(((pInfo == NULL) && (GetExtraInterfaceInfoSize(GetNumInterfaces()) == 0)) ||
934 ((pInfo != NULL) && (GetExtraInterfaceInfoSize(GetNumInterfaces()) != 0)));
936 // This call is a no-op if we don't require extra interface info (in which case a buffer should never have
938 if (!HasExtraInterfaceInfo())
940 _ASSERTE(pInfo == NULL);
944 // Get pointer to optional slot that holds either a small inlined bitmap of flags or the pointer to a
946 PTR_TADDR pInfoSlot = GetExtraInterfaceInfoPtr();
948 // In either case, data inlined or held in an external buffer, the correct thing to do is to write pInfo
949 // to the slot. In the inlined case we wish to set all flags to their default value (zero, false) and
950 // writing NULL does that. Otherwise we simply want to dump the buffer pointer directly into the slot (no
951 // need for a discriminator bit, we can always infer which format we're using based on the interface
953 *pInfoSlot = (TADDR)pInfo;
955 // There shouldn't be any need for further initialization in the buffered case since loader heap
956 // allocation zeroes data.
959 for (DWORD i = 0; i < GetExtraInterfaceInfoSize(GetNumInterfaces()); i++)
960 _ASSERTE(*((BYTE*)pInfo + i) == 0);
964 // Define a macro that generates a mask for a given bit in a TADDR correctly on either 32 or 64 bit platforms.
966 #define SELECT_TADDR_BIT(_index) (1ULL << (_index))
968 #define SELECT_TADDR_BIT(_index) (1U << (_index))
971 //==========================================================================================
972 // For the given interface in the map (specified via map index) mark the interface as declared explicitly on
973 // this class. This is not legal for dynamically added interfaces (as used by RCWs).
974 void MethodTable::SetInterfaceDeclaredOnClass(DWORD index)
976 STANDARD_VM_CONTRACT;
978 _ASSERTE(HasExtraInterfaceInfo());
979 _ASSERTE(index < GetNumInterfaces());
981 // Get address of optional slot for extra info.
982 PTR_TADDR pInfoSlot = GetExtraInterfaceInfoPtr();
984 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshold)
986 // Bitmap of flags is stored inline in the optional slot.
987 *pInfoSlot |= SELECT_TADDR_BIT(index);
991 // Slot points to a buffer containing a larger bitmap.
992 TADDR *pBitmap = (PTR_TADDR)*pInfoSlot;
994 DWORD idxTaddr = index / (sizeof(TADDR) * 8); // Select TADDR in array that covers the target bit
995 DWORD idxInTaddr = index % (sizeof(TADDR) * 8);
996 TADDR bitmask = SELECT_TADDR_BIT(idxInTaddr);
998 pBitmap[idxTaddr] |= bitmask;
999 _ASSERTE((pBitmap[idxTaddr] & bitmask) == bitmask);
1003 //==========================================================================================
1004 // For the given interface return true if the interface was declared explicitly on this class.
1005 bool MethodTable::IsInterfaceDeclaredOnClass(DWORD index)
1007 STANDARD_VM_CONTRACT;
1009 _ASSERTE(HasExtraInterfaceInfo());
1011 // Dynamic interfaces are always marked as not DeclaredOnClass (I don't know why but this is how the code
1012 // was originally authored).
1013 if (index >= GetNumInterfaces())
1015 #ifdef FEATURE_COMINTEROP
1016 _ASSERTE(HasDynamicInterfaceMap());
1017 #endif // FEATURE_COMINTEROP
1021 // Get data from the optional extra info slot.
1022 TADDR taddrInfo = *GetExtraInterfaceInfoPtr();
1024 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshold)
1026 // Bitmap of flags is stored directly in the value.
1027 return (taddrInfo & SELECT_TADDR_BIT(index)) != 0;
1031 // Slot points to a buffer containing a larger bitmap.
1032 TADDR *pBitmap = (PTR_TADDR)taddrInfo;
1034 DWORD idxTaddr = index / (sizeof(TADDR) * 8); // Select TADDR in array that covers the target bit
1035 DWORD idxInTaddr = index % (sizeof(TADDR) * 8);
1036 TADDR bitmask = SELECT_TADDR_BIT(idxInTaddr);
1038 return (pBitmap[idxTaddr] & bitmask) != 0;
1042 #ifdef FEATURE_COMINTEROP
1044 //==========================================================================================
1045 PTR_InterfaceInfo MethodTable::GetDynamicallyAddedInterfaceMap()
1047 LIMITED_METHOD_DAC_CONTRACT;
1048 PRECONDITION(HasDynamicInterfaceMap());
1050 return GetInterfaceMap() + GetNumInterfaces();
1053 //==========================================================================================
1054 unsigned MethodTable::GetNumDynamicallyAddedInterfaces()
1056 LIMITED_METHOD_DAC_CONTRACT;
1057 PRECONDITION(HasDynamicInterfaceMap());
1059 PTR_InterfaceInfo pInterfaces = GetInterfaceMap();
1060 PREFIX_ASSUME(pInterfaces != NULL);
1061 return (unsigned)*(dac_cast<PTR_SIZE_T>(pInterfaces) - 1);
1064 //==========================================================================================
1065 BOOL MethodTable::FindDynamicallyAddedInterface(MethodTable *pInterface)
1067 LIMITED_METHOD_CONTRACT;
1069 _ASSERTE(IsRestored());
1070 _ASSERTE(HasDynamicInterfaceMap()); // This should never be called on for a type that is not an extensible RCW.
1072 unsigned cDynInterfaces = GetNumDynamicallyAddedInterfaces();
1073 InterfaceInfo_t *pDynItfMap = GetDynamicallyAddedInterfaceMap();
1075 for (unsigned i = 0; i < cDynInterfaces; i++)
1077 if (pDynItfMap[i].GetMethodTable() == pInterface)
1084 //==========================================================================================
1085 void MethodTable::AddDynamicInterface(MethodTable *pItfMT)
1092 PRECONDITION(IsRestored());
1093 PRECONDITION(HasDynamicInterfaceMap()); // This should never be called on for a type that is not an extensible RCW.
1097 unsigned NumDynAddedInterfaces = GetNumDynamicallyAddedInterfaces();
1098 unsigned TotalNumInterfaces = GetNumInterfaces() + NumDynAddedInterfaces;
1100 InterfaceInfo_t *pNewItfMap = NULL;
1101 S_SIZE_T AllocSize = (S_SIZE_T(S_UINT32(TotalNumInterfaces) + S_UINT32(1)) * S_SIZE_T(sizeof(InterfaceInfo_t))) + S_SIZE_T(sizeof(DWORD_PTR));
1102 if (AllocSize.IsOverflow())
1103 ThrowHR(COR_E_OVERFLOW);
1105 // Allocate the new interface table adding one for the new interface and one
1106 // more for the dummy slot before the start of the table..
1107 pNewItfMap = (InterfaceInfo_t*)(void*)GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(AllocSize);
1109 pNewItfMap = (InterfaceInfo_t*)(((BYTE *)pNewItfMap) + sizeof(DWORD_PTR));
1111 // Copy the old map into the new one.
1112 if (TotalNumInterfaces > 0) {
1113 InterfaceInfo_t *pInterfaceMap = GetInterfaceMap();
1114 PREFIX_ASSUME(pInterfaceMap != NULL);
1116 for (unsigned index = 0; index < TotalNumInterfaces; ++index)
1118 InterfaceInfo_t *pIntInfo = (InterfaceInfo_t *) (pNewItfMap + index);
1119 pIntInfo->SetMethodTable((pInterfaceMap + index)->GetMethodTable());
1123 // Add the new interface at the end of the map.
1124 pNewItfMap[TotalNumInterfaces].SetMethodTable(pItfMT);
1126 // Update the count of dynamically added interfaces.
1127 *(((DWORD_PTR *)pNewItfMap) - 1) = NumDynAddedInterfaces + 1;
1129 // Switch the old interface map with the new one.
1130 VolatileStore(&m_pInterfaceMap, pNewItfMap);
1132 // Log the fact that we leaked the interface vtable map.
1134 LOG((LF_INTEROP, LL_EVERYTHING,
1135 "Extensible RCW %s being cast to interface %s caused an interface vtable map leak",
1136 GetClass()->GetDebugClassName(), pItfMT->GetClass()->m_szDebugClassName));
1138 LOG((LF_INTEROP, LL_EVERYTHING,
1139 "Extensible RCW being cast to an interface caused an interface vtable map leak"));
1141 } // MethodTable::AddDynamicInterface
1143 #endif // FEATURE_COMINTEROP
1145 void MethodTable::SetupGenericsStaticsInfo(FieldDesc* pStaticFieldDescs)
1155 // No need to generate IDs for open types. Indeed since we don't save them
1156 // in the NGEN image it would be actively incorrect to do so. However
1157 // we still leave the optional member in the MethodTable holding the value -1 for the ID.
1159 GenericsStaticsInfo *pInfo = GetGenericsStaticsInfo();
1160 if (!ContainsGenericVariables() && !IsSharedByGenericInstantiations())
1162 Module * pModuleForStatics = GetLoaderModule();
1164 pInfo->m_DynamicTypeID = pModuleForStatics->AllocateDynamicEntry(this);
1168 pInfo->m_DynamicTypeID = (SIZE_T)-1;
1171 pInfo->m_pFieldDescs = pStaticFieldDescs;
1174 #endif // !DACCESS_COMPILE
1176 //==========================================================================================
1177 // Calculate how many bytes of storage will be required to track additional information for interfaces. This
1178 // will be zero if there are no interfaces, but can also be zero for small numbers of interfaces as well, and
1179 // callers should be ready to handle this.
1180 /* static */ SIZE_T MethodTable::GetExtraInterfaceInfoSize(DWORD cInterfaces)
1182 LIMITED_METHOD_DAC_CONTRACT;
1184 // For small numbers of interfaces we can record the info in the TADDR of the optional member itself (use
1185 // the TADDR as a bitmap).
1186 if (cInterfaces <= kInlinedInterfaceInfoThreshold)
1189 // Otherwise we'll cause an array of TADDRs to be allocated (use TADDRs since the heap space allocated
1190 // will almost certainly need to be TADDR aligned anyway).
1191 return ALIGN_UP(cInterfaces, sizeof(TADDR) * 8) / 8;
1194 #ifdef DACCESS_COMPILE
1195 //==========================================================================================
1196 void MethodTable::EnumMemoryRegionsForExtraInterfaceInfo()
1200 // No extra data to enum if the number of interfaces is below the threshold -- there is either no data or
1201 // it all fits into the optional members inline.
1202 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshold)
1205 DacEnumMemoryRegion(*GetExtraInterfaceInfoPtr(), GetExtraInterfaceInfoSize(GetNumInterfaces()));
1207 #endif // DACCESS_COMPILE
1209 //==========================================================================================
1210 Module* MethodTable::GetModuleForStatics()
1212 WRAPPER_NO_CONTRACT;
1215 if (HasGenericsStaticsInfo())
1217 DWORD dwDynamicClassDomainID;
1218 return GetGenericsStaticsModuleAndID(&dwDynamicClassDomainID);
1222 return GetLoaderModule();
1226 //==========================================================================================
1227 DWORD MethodTable::GetModuleDynamicEntryID()
1229 WRAPPER_NO_CONTRACT;
1232 _ASSERTE(IsDynamicStatics() && "Only memory reflection emit types and generics can have a dynamic ID");
1234 if (HasGenericsStaticsInfo())
1236 DWORD dwDynamicClassDomainID;
1237 GetGenericsStaticsModuleAndID(&dwDynamicClassDomainID);
1238 return dwDynamicClassDomainID;
1242 return GetClass()->GetModuleDynamicID();
1246 #ifndef DACCESS_COMPILE
1248 #ifdef FEATURE_TYPEEQUIVALENCE
1249 //==========================================================================================
1250 // Equivalence based on Guid and TypeIdentifier attributes to support the "no-PIA" feature.
1251 BOOL MethodTable::IsEquivalentTo_Worker(MethodTable *pOtherMT COMMA_INDEBUG(TypeHandlePairList *pVisited))
1261 _ASSERTE(HasTypeEquivalence() && pOtherMT->HasTypeEquivalence());
1265 if (TypeHandlePairList::Exists(pVisited, TypeHandle(this), TypeHandle(pOtherMT)))
1267 _ASSERTE(!"We are in the process of comparing these types already. That should never happen!");
1270 TypeHandlePairList newVisited(TypeHandle(this), TypeHandle(pOtherMT), pVisited);
1274 if (HasInstantiation() != pOtherMT->HasInstantiation())
1279 if (!pOtherMT->IsArray() || GetRank() != pOtherMT->GetRank())
1282 if (IsMultiDimArray() != pOtherMT->IsMultiDimArray())
1284 // A non-multidimensional array is not equivalent to an SzArray.
1285 // This case is handling the case of a Rank 1 multidimensional array
1286 // when compared to a normal array.
1290 // arrays of structures have their own unshared MTs and will take this path
1291 return (GetArrayElementTypeHandle().IsEquivalentTo(pOtherMT->GetArrayElementTypeHandle() COMMA_INDEBUG(&newVisited)));
1294 return IsEquivalentTo_WorkerInner(pOtherMT COMMA_INDEBUG(&newVisited));
1297 //==========================================================================================
1298 // Type equivalence - SO intolerant part.
1299 BOOL MethodTable::IsEquivalentTo_WorkerInner(MethodTable *pOtherMT COMMA_INDEBUG(TypeHandlePairList *pVisited))
1306 LOADS_TYPE(CLASS_DEPENDENCIES_LOADED);
1310 TypeEquivalenceHashTable *typeHashTable = NULL;
1311 AppDomain *pDomain = GetAppDomain();
1312 if (pDomain != NULL)
1314 typeHashTable = pDomain->GetTypeEquivalenceCache();
1315 TypeEquivalenceHashTable::EquivalenceMatch match = typeHashTable->CheckEquivalence(TypeHandle(this), TypeHandle(pOtherMT));
1318 case TypeEquivalenceHashTable::Match:
1320 case TypeEquivalenceHashTable::NoMatch:
1322 case TypeEquivalenceHashTable::MatchUnknown:
1330 BOOL fEquivalent = FALSE;
1332 // Check if type is generic
1333 if (HasInstantiation())
1335 // Limit equivalence on generics only to interfaces
1336 if (!IsInterface() || !pOtherMT->IsInterface())
1338 fEquivalent = FALSE;
1339 goto EquivalenceCalculated;
1342 // check whether the instantiations are equivalent
1343 Instantiation inst1 = GetInstantiation();
1344 Instantiation inst2 = pOtherMT->GetInstantiation();
1346 // Verify generic argument count
1347 if (inst1.GetNumArgs() != inst2.GetNumArgs())
1349 fEquivalent = FALSE;
1350 goto EquivalenceCalculated;
1353 // Verify each generic argument type
1354 for (DWORD i = 0; i < inst1.GetNumArgs(); i++)
1356 if (!inst1[i].IsEquivalentTo(inst2[i] COMMA_INDEBUG(pVisited)))
1358 fEquivalent = FALSE;
1359 goto EquivalenceCalculated;
1363 if (GetTypeDefRid() == pOtherMT->GetTypeDefRid() && GetModule() == pOtherMT->GetModule())
1365 // it's OK to declare the MTs equivalent at this point; the cases we care
1366 // about are IList<IFoo> and IList<IBar> where IFoo and IBar are equivalent
1371 fEquivalent = FALSE;
1373 goto EquivalenceCalculated;
1378 if (!pOtherMT->IsArray() || GetRank() != pOtherMT->GetRank())
1380 fEquivalent = FALSE;
1381 goto EquivalenceCalculated;
1384 // arrays of structures have their own unshared MTs and will take this path
1385 TypeHandle elementType1 = GetArrayElementTypeHandle();
1386 TypeHandle elementType2 = pOtherMT->GetArrayElementTypeHandle();
1387 fEquivalent = elementType1.IsEquivalentTo(elementType2 COMMA_INDEBUG(pVisited));
1388 goto EquivalenceCalculated;
1391 fEquivalent = CompareTypeDefsForEquivalence(GetCl(), pOtherMT->GetCl(), GetModule(), pOtherMT->GetModule(), NULL);
1393 EquivalenceCalculated:
1394 // Record equivalence matches if a table exists
1395 if (typeHashTable != NULL)
1397 // Collectible type results will not get cached.
1398 if ((!Collectible() && !pOtherMT->Collectible()))
1400 auto match = fEquivalent ? TypeEquivalenceHashTable::Match : TypeEquivalenceHashTable::NoMatch;
1401 typeHashTable->RecordEquivalence(TypeHandle(this), TypeHandle(pOtherMT), match);
1407 #endif // FEATURE_TYPEEQUIVALENCE
1409 //==========================================================================================
1410 BOOL MethodTable::CanCastToInterface(MethodTable *pTargetMT, TypeHandlePairList *pVisited)
1418 PRECONDITION(CheckPointer(pTargetMT));
1419 PRECONDITION(pTargetMT->IsInterface());
1420 PRECONDITION(IsRestored());
1424 if (!pTargetMT->HasVariance())
1426 if (IsInterface() && IsEquivalentTo(pTargetMT))
1429 return ImplementsEquivalentInterface(pTargetMT);
1433 if (CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited))
1436 if (pTargetMT->IsSpecialMarkerTypeForGenericCasting())
1437 return FALSE; // The special marker types cannot be cast to (at this time, they are the open generic types, so they are however, valid input to this method).
1439 InterfaceMapIterator it = IterateInterfaceMap();
1442 if (it.GetInterfaceApprox()->CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited, this))
1449 //==========================================================================================
1450 BOOL MethodTable::CanCastByVarianceToInterfaceOrDelegate(MethodTable *pTargetMT, TypeHandlePairList *pVisited, MethodTable* pMTInterfaceMapOwner /*= NULL*/)
1458 PRECONDITION(CheckPointer(pTargetMT));
1459 PRECONDITION(pTargetMT->HasVariance());
1460 PRECONDITION(pTargetMT->IsInterface() || pTargetMT->IsDelegate());
1461 PRECONDITION(IsRestored());
1465 // shortcut when having same types
1466 if (this == pTargetMT)
1471 // Shortcut for generic approx type scenario
1472 if (pMTInterfaceMapOwner != NULL &&
1473 !pMTInterfaceMapOwner->ContainsGenericVariables() &&
1474 IsSpecialMarkerTypeForGenericCasting() &&
1475 GetTypeDefRid() == pTargetMT->GetTypeDefRid() &&
1476 GetModule() == pTargetMT->GetModule() &&
1477 pTargetMT->GetInstantiation().ContainsAllOneType(pMTInterfaceMapOwner))
1482 if (GetTypeDefRid() != pTargetMT->GetTypeDefRid() || GetModule() != pTargetMT->GetModule() ||
1483 TypeHandlePairList::Exists(pVisited, this, pTargetMT))
1488 EEClass* pClass = NULL;
1489 TypeHandlePairList pairList(this, pTargetMT, pVisited);
1490 BOOL returnValue = FALSE;
1493 pClass = pTargetMT->GetClass();
1494 Instantiation inst = GetInstantiation();
1495 Instantiation targetInst = pTargetMT->GetInstantiation();
1497 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
1499 TypeHandle thArg = inst[i];
1500 if (IsSpecialMarkerTypeForGenericCasting() && pMTInterfaceMapOwner && !pMTInterfaceMapOwner->ContainsGenericVariables())
1502 thArg = pMTInterfaceMapOwner;
1505 TypeHandle thTargetArg = targetInst[i];
1507 // If argument types are not equivalent, test them for compatibility
1508 // in accordance with the variance annotation
1509 if (!thArg.IsEquivalentTo(thTargetArg))
1511 switch (pClass->GetVarianceOfTypeParameter(i))
1514 if (!thArg.IsBoxedAndCanCastTo(thTargetArg, &pairList))
1518 case gpContravariant :
1519 if (!thTargetArg.IsBoxedAndCanCastTo(thArg, &pairList))
1527 _ASSERTE(!"Illegal variance annotation");
1541 //==========================================================================================
1542 BOOL MethodTable::CanCastToClass(MethodTable *pTargetMT, TypeHandlePairList *pVisited)
1550 PRECONDITION(CheckPointer(pTargetMT));
1551 PRECONDITION(!pTargetMT->IsArray());
1552 PRECONDITION(!pTargetMT->IsInterface());
1556 MethodTable *pMT = this;
1558 // If the target type has variant type parameters, we take a slower path
1559 if (pTargetMT->HasVariance())
1561 // At present, we support variance only on delegates and interfaces
1562 CONSISTENCY_CHECK(pTargetMT->IsDelegate());
1564 // First chase inheritance hierarchy until we hit a class that only differs in its instantiation
1566 // Cheap check for equivalence
1567 if (pMT->IsEquivalentTo(pTargetMT))
1570 if (pMT->CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited))
1573 pMT = pMT->GetParentMethodTable();
1577 // If there are no variant type parameters, just chase the hierarchy
1581 if (pMT->IsEquivalentTo(pTargetMT))
1584 pMT = pMT->GetParentMethodTable();
1591 #include <optsmallperfcritical.h>
1593 //==========================================================================================
1594 BOOL MethodTable::CanCastTo(MethodTable* pTargetMT, TypeHandlePairList* pVisited)
1602 PRECONDITION(CheckPointer(pTargetMT));
1603 PRECONDITION(IsRestored());
1607 // we cannot cache T --> Nullable<T> here since result is contextual.
1608 // callers should have handled this already according to their rules.
1609 _ASSERTE(!Nullable::IsNullableForType(TypeHandle(pTargetMT), this));
1613 if (pTargetMT->IsArray())
1615 return ArrayIsInstanceOf(pTargetMT, pVisited);
1617 else if (pTargetMT->IsInterface() && pTargetMT->HasInstantiation())
1619 return ArraySupportsBizarreInterface(pTargetMT, pVisited);
1622 else if (pTargetMT->IsArray())
1624 CastCache::TryAddToCache(this, pTargetMT, false);
1628 BOOL result = pTargetMT->IsInterface() ?
1629 CanCastToInterface(pTargetMT, pVisited) :
1630 CanCastToClass(pTargetMT, pVisited);
1632 // We only consider type-based conversion rules here.
1633 // Therefore a negative result cannot rule out convertibility for ICastable, IDynamicInterfaceCastable, and COM objects
1634 if (result || !(pTargetMT->IsInterface() && (this->IsComObjectType() || this->IsICastable() || this->IsIDynamicInterfaceCastable())))
1636 CastCache::TryAddToCache(this, pTargetMT, (BOOL)result);
1642 //==========================================================================================
1643 BOOL MethodTable::ArraySupportsBizarreInterface(MethodTable * pInterfaceMT, TypeHandlePairList* pVisited)
1649 PRECONDITION(this->IsArray());
1650 PRECONDITION(pInterfaceMT->IsInterface());
1651 PRECONDITION(pInterfaceMT->HasInstantiation());
1654 // IList<T> & IReadOnlyList<T> only supported for SZ_ARRAYS
1655 if (this->IsMultiDimArray() ||
1656 !IsImplicitInterfaceOfSZArray(pInterfaceMT))
1658 CastCache::TryAddToCache(this, pInterfaceMT, FALSE);
1662 BOOL result = TypeDesc::CanCastParam(this->GetArrayElementTypeHandle(), pInterfaceMT->GetInstantiation()[0], pVisited);
1664 CastCache::TryAddToCache(this, pInterfaceMT, (BOOL)result);
1668 BOOL MethodTable::ArrayIsInstanceOf(MethodTable* pTargetMT, TypeHandlePairList* pVisited)
1674 PRECONDITION(this->IsArray());
1675 PRECONDITION(pTargetMT->IsArray());
1678 // GetRank touches EEClass. Try to avoid it for SZArrays.
1679 if (pTargetMT->GetInternalCorElementType() == ELEMENT_TYPE_SZARRAY)
1681 if (this->IsMultiDimArray())
1683 CastCache::TryAddToCache(this, pTargetMT, FALSE);
1684 return TypeHandle::CannotCast;
1689 if (this->GetRank() != pTargetMT->GetRank())
1691 CastCache::TryAddToCache(this, pTargetMT, FALSE);
1692 return TypeHandle::CannotCast;
1695 _ASSERTE(this->GetRank() == pTargetMT->GetRank());
1697 TypeHandle elementTypeHandle = this->GetArrayElementTypeHandle();
1698 TypeHandle toElementTypeHandle = pTargetMT->GetArrayElementTypeHandle();
1700 BOOL result = (elementTypeHandle == toElementTypeHandle) ||
1701 TypeDesc::CanCastParam(elementTypeHandle, toElementTypeHandle, pVisited);
1703 CastCache::TryAddToCache(this, pTargetMT, (BOOL)result);
1707 #include <optdefault.h>
1710 MethodTable::IsExternallyVisible()
1722 return GetArrayElementTypeHandle().IsExternallyVisible();
1725 BOOL bIsVisible = IsTypeDefExternallyVisible(GetCl(), GetModule(), GetClass()->GetAttrClass());
1727 if (bIsVisible && HasInstantiation() && !IsGenericTypeDefinition())
1729 for (COUNT_T i = 0; i < GetNumGenericArgs(); i++)
1731 if (!GetInstantiation()[i].IsExternallyVisible())
1737 } // MethodTable::IsExternallyVisible
1739 BOOL MethodTable::IsAllGCPointers()
1741 if (this->ContainsPointers())
1743 // check for canonical GC encoding for all-pointer types
1744 CGCDesc* pDesc = CGCDesc::GetCGCDescFromMT(this);
1745 if (pDesc->GetNumSeries() != 1)
1748 int offsetToData = IsArray() ? ArrayBase::GetDataPtrOffset(this) : sizeof(size_t);
1749 CGCDescSeries* pSeries = pDesc->GetHighestSeries();
1750 return ((int)pSeries->GetSeriesOffset() == offsetToData) &&
1751 ((SSIZE_T)pSeries->GetSeriesSize() == -(SSIZE_T)(offsetToData + sizeof(size_t)));
1761 MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug)
1763 //diag functions shouldn't affect normal behavior
1772 const size_t cchBuff = MAX_CLASSNAME_LENGTH + 30;
1773 LPSTR buff = fDebug ? (LPSTR) qb.AllocNoThrow(cchBuff * sizeof(CHAR)) : NULL;
1775 if ((buff == NULL) && fDebug)
1777 OutputDebugStringUtf8("OOM when dumping VTable - falling back to logging");
1783 sprintf_s(buff, cchBuff, "Vtable (with interface dupes) for '%s':\n", szClassName);
1785 sprintf_s(&buff[strlen(buff)], cchBuff - strlen(buff) , " Total duplicate slots = %d\n", g_dupMethods);
1787 OutputDebugStringUtf8(buff);
1791 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1792 LOG((LF_ALWAYS, LL_ALWAYS, "Vtable (with interface dupes) for '%s':\n", szClassName));
1793 LOG((LF_ALWAYS, LL_ALWAYS, " Total duplicate slots = %d\n", g_dupMethods));
1799 MethodIterator it(this);
1800 for (; it.IsValid(); it.Next())
1802 MethodDesc *pMD = it.GetMethodDesc();
1803 LPCUTF8 pszName = pMD->GetName((USHORT) it.GetSlotNumber());
1804 DWORD dwAttrs = pMD->GetAttrs();
1808 DefineFullyQualifiedNameForClass();
1809 LPCUTF8 name = GetFullyQualifiedNameForClass(pMD->GetMethodTable());
1810 sprintf_s(buff, cchBuff,
1811 " slot %2d: %s::%s%s 0x%p (slot = %2d)\n",
1815 IsMdFinal(dwAttrs) ? " (final)" : "",
1816 (VOID *)pMD->GetMethodEntryPoint(),
1819 OutputDebugStringUtf8(buff);
1823 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1824 LOG((LF_ALWAYS, LL_ALWAYS,
1825 " slot %2d: %s::%s%s 0x%p (slot = %2d)\n",
1827 pMD->GetClass()->GetDebugClassName(),
1829 IsMdFinal(dwAttrs) ? " (final)" : "",
1830 (VOID *)pMD->GetMethodEntryPoint(),
1834 if (it.GetSlotNumber() == (DWORD)(GetNumMethods()-1))
1838 OutputDebugStringUtf8(" <-- vtable ends here\n");
1842 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1843 LOG((LF_ALWAYS, LL_ALWAYS, " <-- vtable ends here\n"));
1848 EX_CATCH_HRESULT(hr);
1852 OutputDebugStringUtf8("\n");
1856 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1857 LOG((LF_ALWAYS, LL_ALWAYS, "\n"));
1859 } // MethodTable::DebugDumpVtable
1862 MethodTable::Debug_DumpInterfaceMap(
1863 LPCSTR szInterfaceMapPrefix)
1865 // Diagnostic functions shouldn't affect normal behavior
1873 if (GetNumInterfaces() == 0)
1874 { // There are no interfaces, no point in printing interface map info
1878 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1879 LOG((LF_ALWAYS, LL_ALWAYS,
1880 "%s Interface Map for '%s':\n",
1881 szInterfaceMapPrefix,
1882 GetDebugClassName()));
1883 LOG((LF_ALWAYS, LL_ALWAYS,
1884 " Number of interfaces = %d\n",
1885 GetNumInterfaces()));
1890 InterfaceMapIterator it(this);
1893 MethodTable *pInterfaceMT = it.GetInterfaceApprox();
1895 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1896 LOG((LF_ALWAYS, LL_ALWAYS,
1897 " index %2d: %s 0x%p\n",
1899 pInterfaceMT->GetDebugClassName(),
1902 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1903 LOG((LF_ALWAYS, LL_ALWAYS, " <-- interface map ends here\n"));
1905 EX_CATCH_HRESULT(hr);
1907 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1908 LOG((LF_ALWAYS, LL_ALWAYS, "\n"));
1909 } // MethodTable::Debug_DumpInterfaceMap
1912 MethodTable::Debug_DumpDispatchMap()
1914 WRAPPER_NO_CONTRACT; // It's a dev helper, we don't care about contracts
1916 if (!HasDispatchMap())
1917 { // There is no dipstch map for this type, no point in printing the info
1921 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1922 LOG((LF_ALWAYS, LL_ALWAYS, "Dispatch Map for '%s':\n", GetDebugClassName()));
1924 InterfaceInfo_t * pInterfaceMap = GetInterfaceMap();
1925 DispatchMap::EncodedMapIterator it(this);
1927 while (it.IsValid())
1929 DispatchMapEntry *pEntry = it.Entry();
1931 UINT32 nInterfaceIndex = pEntry->GetTypeID().GetInterfaceNum();
1932 _ASSERTE(nInterfaceIndex < GetNumInterfaces());
1934 MethodTable * pInterface = pInterfaceMap[nInterfaceIndex].GetMethodTable();
1935 UINT32 nInterfaceSlotNumber = pEntry->GetSlotNumber();
1936 UINT32 nImplementationSlotNumber = pEntry->GetTargetSlotNumber();
1937 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1938 LOG((LF_ALWAYS, LL_ALWAYS,
1939 " Interface %d (%s) slot %d (%s) implemented in slot %d (%s)\n",
1941 pInterface->GetDebugClassName(),
1942 nInterfaceSlotNumber,
1943 pInterface->GetMethodDescForSlot(nInterfaceSlotNumber)->GetName(),
1944 nImplementationSlotNumber,
1945 GetMethodDescForSlot(nImplementationSlotNumber)->GetName()));
1949 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1950 LOG((LF_ALWAYS, LL_ALWAYS, " <-- Dispatch map ends here\n"));
1951 } // MethodTable::Debug_DumpDispatchMap
1955 //==========================================================================================
1956 NOINLINE BOOL MethodTable::ImplementsInterface(MethodTable *pInterface)
1958 WRAPPER_NO_CONTRACT;
1960 if (pInterface->IsSpecialMarkerTypeForGenericCasting())
1961 return FALSE; // The special marker types cannot be cast to (at this time, they are the open generic types, so they are however, valid input to this method).
1963 return ImplementsInterfaceInline(pInterface);
1966 //==========================================================================================
1967 BOOL MethodTable::ImplementsEquivalentInterface(MethodTable *pInterface)
1973 PRECONDITION(pInterface->IsInterface()); // class we are looking up should be an interface
1977 if (pInterface->IsSpecialMarkerTypeForGenericCasting())
1978 return FALSE; // The special marker types cannot be cast to (at this time, they are the open generic types, so they are however, valid input to this method).
1980 // look for exact match first (optimize for success)
1981 if (ImplementsInterfaceInline(pInterface))
1984 if (!pInterface->HasTypeEquivalence())
1987 DWORD numInterfaces = GetNumInterfaces();
1988 if (numInterfaces == 0)
1991 InterfaceInfo_t *pInfo = GetInterfaceMap();
1995 if (pInfo->GetMethodTable()->IsEquivalentTo(pInterface))
2000 while (--numInterfaces);
2005 //==========================================================================================
2006 MethodDesc *MethodTable::GetMethodDescForInterfaceMethod(MethodDesc *pInterfaceMD, BOOL throwOnConflict)
2012 PRECONDITION(!pInterfaceMD->HasClassOrMethodInstantiation());
2015 WRAPPER_NO_CONTRACT;
2017 return GetMethodDescForInterfaceMethod(TypeHandle(pInterfaceMD->GetMethodTable()), pInterfaceMD, throwOnConflict);
2020 //==========================================================================================
2021 MethodDesc *MethodTable::GetMethodDescForInterfaceMethod(TypeHandle ownerType, MethodDesc *pInterfaceMD, BOOL throwOnConflict)
2027 PRECONDITION(!ownerType.IsNull());
2028 PRECONDITION(ownerType.GetMethodTable()->IsInterface());
2029 PRECONDITION(ownerType.GetMethodTable()->HasSameTypeDefAs(pInterfaceMD->GetMethodTable()));
2030 PRECONDITION(IsArray() || ImplementsEquivalentInterface(ownerType.GetMethodTable()) || ownerType.GetMethodTable()->HasVariance());
2034 MethodDesc *pMD = NULL;
2036 MethodTable *pInterfaceMT = ownerType.AsMethodTable();
2038 PCODE pTgt = VirtualCallStubManager::GetTarget(
2039 pInterfaceMT->GetLoaderAllocator()->GetDispatchToken(pInterfaceMT->GetTypeID(), pInterfaceMD->GetSlot()),
2040 this, throwOnConflict);
2043 _ASSERTE(!throwOnConflict);
2046 pMD = MethodTable::GetMethodDescForSlotAddress(pTgt);
2049 MethodDesc *pDispSlotMD = FindDispatchSlotForInterfaceMD(ownerType, pInterfaceMD, throwOnConflict).GetMethodDesc();
2050 _ASSERTE(pDispSlotMD == pMD);
2053 pMD->CheckRestore();
2057 #endif // DACCESS_COMPILE
2059 const DWORD EnCFieldIndex = 0x10000000;
2061 //==========================================================================================
2062 PTR_FieldDesc MethodTable::GetFieldDescByIndex(DWORD fieldIndex)
2064 LIMITED_METHOD_CONTRACT;
2066 // Check if the field index is for an EnC field lookup.
2067 // See GetIndexForFieldDesc() for when this is applied and why.
2068 if ((fieldIndex & EnCFieldIndex) == EnCFieldIndex)
2070 DWORD rid = fieldIndex & ~EnCFieldIndex;
2071 LOG((LF_ENC, LL_INFO100, "MT:GFDBI: rid:0x%08x\n", rid));
2073 mdFieldDef tokenToFind = TokenFromRid(rid, mdtFieldDef);
2074 EncApproxFieldDescIterator fdIterator(
2076 ApproxFieldDescIterator::ALL_FIELDS,
2077 (EncApproxFieldDescIterator::FixUpEncFields | EncApproxFieldDescIterator::OnlyEncFields));
2078 PTR_FieldDesc pField;
2079 while ((pField = fdIterator.Next()) != NULL)
2081 mdFieldDef token = pField->GetMemberDef();
2082 if (tokenToFind == token)
2084 LOG((LF_ENC, LL_INFO100, "MT:GFDBI: Found pField:%p\n", pField));
2089 LOG((LF_ENC, LL_INFO100, "MT:GFDBI: Failed to find rid:0x%08x\n", rid));
2093 if (HasGenericsStaticsInfo() &&
2094 fieldIndex >= GetNumIntroducedInstanceFields())
2096 return GetGenericsStaticFieldDescs() + (fieldIndex - GetNumIntroducedInstanceFields());
2100 return GetClass()->GetFieldDescList() + fieldIndex;
2104 //==========================================================================================
2105 DWORD MethodTable::GetIndexForFieldDesc(FieldDesc *pField)
2107 LIMITED_METHOD_CONTRACT;
2109 // EnC methods are not in a location where computing an index through
2110 // pointer arithmetic is possible. Instead we use the RID and a high
2111 // bit that is ECMA encodable (that is, < 0x1fffffff) and also doesn't
2112 // conflict with any other RID (that is, > 0x00ffffff).
2113 // See FieldDescSlot usage in the JIT interface.
2114 if (pField->IsEnCNew())
2116 mdFieldDef tok = pField->GetMemberDef();
2117 DWORD rid = RidFromToken(tok);
2118 LOG((LF_ENC, LL_INFO100, "MT:GIFFD: pField:%p rid:0x%08x\n", pField, rid));
2119 return rid | EnCFieldIndex;
2122 if (pField->IsStatic() && HasGenericsStaticsInfo())
2124 FieldDesc *pStaticFields = GetGenericsStaticFieldDescs();
2126 return GetNumIntroducedInstanceFields() + DWORD(pField - pStaticFields);
2131 FieldDesc *pFields = GetClass()->GetFieldDescList();
2133 return DWORD(pField - pFields);
2137 //==========================================================================================
2139 #pragma optimize("t", on)
2141 // compute whether the type can be considered to have had its
2142 // static initialization run without doing anything at all, i.e. whether we know
2143 // immediately that the type requires nothing to do for initialization
2145 // If a type used as a representiative during JITting is PreInit then
2146 // any types that it may represent within a code-sharing
2147 // group are also PreInit. For example, if List<object> is PreInit then List<string>
2148 // and List<MyType> are also PreInit. This is because the dynamicStatics, staticRefHandles
2149 // and hasCCtor are all identical given a head type, and weakening the domainNeutrality
2150 // to DomainSpecific only makes more types PreInit.
2151 BOOL MethodTable::IsClassPreInited()
2153 LIMITED_METHOD_CONTRACT;
2155 if (ContainsGenericVariables())
2158 if (HasClassConstructor())
2161 if (HasBoxedRegularStatics())
2164 if (IsDynamicStatics())
2170 #pragma optimize("", on)
2173 //========================================================================================
2177 // Does this type have fields that are implicitly defined through repetition and not explicitly defined in metadata?
2178 bool HasImpliedRepeatedFields(MethodTable* pMT, MethodTable* pFirstFieldValueType = nullptr)
2183 PRECONDITION(CheckPointer(pMT));
2187 // InlineArray types and fixed buffer types have implied repeated fields.
2188 // Checking if a type is an InlineArray type is cheap, so we'll do that first.
2189 if (pMT->GetClass()->IsInlineArray())
2194 DWORD numIntroducedFields = pMT->GetNumIntroducedInstanceFields();
2195 FieldDesc *pFieldStart = pMT->GetApproxFieldDescListRaw();
2196 CorElementType firstFieldElementType = pFieldStart->GetFieldType();
2198 // A fixed buffer type is always a value type that has exactly one value type field at offset 0
2199 // and who's size is an exact multiple of the size of the field.
2200 // It is possible that we catch a false positive with this check, but that chance is extremely slim
2201 // and the user can always change their structure to something more descriptive of what they want
2202 // instead of adding additional padding at the end of a one-field structure.
2203 // We do this check here to save looking up the FixedBufferAttribute when loading the field
2205 return numIntroducedFields == 1
2206 && ( CorTypeInfo::IsPrimitiveType_NoThrow(firstFieldElementType)
2207 || firstFieldElementType == ELEMENT_TYPE_VALUETYPE)
2208 && (pFieldStart->GetOffset() == 0)
2210 && (pMT->GetNumInstanceFieldBytes() % pFieldStart->GetSize(pFirstFieldValueType) == 0);
2214 #if defined(UNIX_AMD64_ABI_ITF)
2216 #if defined(_DEBUG) && defined(LOGGING)
2218 const char* GetSystemVClassificationTypeName(SystemVClassificationType t)
2222 case SystemVClassificationTypeUnknown: return "Unknown";
2223 case SystemVClassificationTypeStruct: return "Struct";
2224 case SystemVClassificationTypeNoClass: return "NoClass";
2225 case SystemVClassificationTypeMemory: return "Memory";
2226 case SystemVClassificationTypeInteger: return "Integer";
2227 case SystemVClassificationTypeIntegerReference: return "IntegerReference";
2228 case SystemVClassificationTypeIntegerByRef: return "IntegerByReference";
2229 case SystemVClassificationTypeSSE: return "SSE";
2230 default: return "ERROR";
2233 #endif // _DEBUG && LOGGING
2235 // Returns 'true' if the struct is passed in registers, 'false' otherwise.
2236 bool MethodTable::ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool useNativeLayout, MethodTable** pByValueClassCache)
2238 if (useNativeLayout)
2240 _ASSERTE(pByValueClassCache == NULL);
2241 return ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel, startOffsetOfStruct, GetNativeLayoutInfo());
2245 return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout, pByValueClassCache);
2249 // If we have a field classification already, but there is a union, we must merge the classification type of the field. Returns the
2250 // new, merged classification type.
2252 static SystemVClassificationType ReClassifyField(SystemVClassificationType originalClassification, SystemVClassificationType newFieldClassification)
2254 _ASSERTE((newFieldClassification == SystemVClassificationTypeInteger) ||
2255 (newFieldClassification == SystemVClassificationTypeIntegerReference) ||
2256 (newFieldClassification == SystemVClassificationTypeIntegerByRef) ||
2257 (newFieldClassification == SystemVClassificationTypeSSE));
2259 switch (newFieldClassification)
2261 case SystemVClassificationTypeInteger:
2262 // Integer overrides everything; the resulting classification is Integer. Can't merge Integer and IntegerReference.
2263 _ASSERTE((originalClassification == SystemVClassificationTypeInteger) ||
2264 (originalClassification == SystemVClassificationTypeSSE));
2266 return SystemVClassificationTypeInteger;
2268 case SystemVClassificationTypeSSE:
2269 // If the old and new classifications are both SSE, then the merge is SSE, otherwise it will be integer. Can't merge SSE and IntegerReference.
2270 _ASSERTE((originalClassification == SystemVClassificationTypeInteger) ||
2271 (originalClassification == SystemVClassificationTypeSSE));
2273 if (originalClassification == SystemVClassificationTypeSSE)
2275 return SystemVClassificationTypeSSE;
2279 return SystemVClassificationTypeInteger;
2282 case SystemVClassificationTypeIntegerReference:
2283 // IntegerReference can only merge with IntegerReference.
2284 _ASSERTE(originalClassification == SystemVClassificationTypeIntegerReference);
2285 return SystemVClassificationTypeIntegerReference;
2287 case SystemVClassificationTypeIntegerByRef:
2288 // IntegerByReference can only merge with IntegerByReference.
2289 _ASSERTE(originalClassification == SystemVClassificationTypeIntegerByRef);
2290 return SystemVClassificationTypeIntegerByRef;
2293 _ASSERTE(false); // Unexpected type.
2294 return SystemVClassificationTypeUnknown;
2298 static MethodTable* ByValueClassCacheLookup(MethodTable** pByValueClassCache, unsigned index)
2300 LIMITED_METHOD_CONTRACT;
2301 if (pByValueClassCache == NULL)
2304 return pByValueClassCache[index];
2307 // Returns 'true' if the struct is passed in registers, 'false' otherwise.
2308 bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassingHelperPtr helperPtr,
2309 unsigned int nestingLevel,
2310 unsigned int startOffsetOfStruct,
2311 bool useNativeLayout,
2312 MethodTable** pByValueClassCache)
2314 STANDARD_VM_CONTRACT;
2316 DWORD numIntroducedFields = GetNumIntroducedInstanceFields();
2318 // It appears the VM gives a struct with no fields of size 1.
2319 // Don't pass in register such structure.
2320 if (numIntroducedFields == 0)
2325 // The SIMD Intrinsic types are meant to be handled specially and should not be passed as struct registers
2326 if (IsIntrinsicType())
2328 LPCUTF8 namespaceName;
2329 LPCUTF8 className = GetFullyQualifiedNameInfo(&namespaceName);
2331 if ((strcmp(className, "Vector512`1") == 0) || (strcmp(className, "Vector256`1") == 0) ||
2332 (strcmp(className, "Vector128`1") == 0) || (strcmp(className, "Vector64`1") == 0))
2334 assert(strcmp(namespaceName, "System.Runtime.Intrinsics") == 0);
2336 LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithManagedLayout: struct %s is a SIMD intrinsic type; will not be enregistered\n",
2337 nestingLevel * 5, "", this->GetDebugClassName()));
2342 if ((strcmp(className, "Vector`1") == 0) && (strcmp(namespaceName, "System.Numerics") == 0))
2344 LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithManagedLayout: struct %s is a SIMD intrinsic type; will not be enregistered\n",
2345 nestingLevel * 5, "", this->GetDebugClassName()));
2352 LOG((LF_JIT, LL_EVERYTHING, "%*s**** Classify %s (%p), startOffset %d, total struct size %d\n",
2353 nestingLevel * 5, "", this->GetDebugClassName(), this, startOffsetOfStruct, helperPtr->structSize));
2356 FieldDesc *pFieldStart = GetApproxFieldDescListRaw();
2358 bool hasImpliedRepeatedFields = HasImpliedRepeatedFields(this, ByValueClassCacheLookup(pByValueClassCache, 0));
2360 if (hasImpliedRepeatedFields)
2362 numIntroducedFields = GetNumInstanceFieldBytes() / pFieldStart->GetSize(ByValueClassCacheLookup(pByValueClassCache, 0));
2365 for (unsigned int fieldIndex = 0; fieldIndex < numIntroducedFields; fieldIndex++)
2369 unsigned int fieldIndexForCacheLookup = fieldIndex;
2371 if (hasImpliedRepeatedFields)
2373 pField = pFieldStart;
2374 fieldIndexForCacheLookup = 0;
2375 fieldOffset = fieldIndex * pField->GetSize(ByValueClassCacheLookup(pByValueClassCache, fieldIndexForCacheLookup));
2379 pField = &pFieldStart[fieldIndex];
2380 fieldOffset = pField->GetOffset();
2383 unsigned int normalizedFieldOffset = fieldOffset + startOffsetOfStruct;
2385 unsigned int fieldSize = pField->GetSize(ByValueClassCacheLookup(pByValueClassCache, fieldIndexForCacheLookup));
2386 _ASSERTE(fieldSize != (unsigned int)-1);
2388 // The field can't span past the end of the struct.
2389 if ((normalizedFieldOffset + fieldSize) > helperPtr->structSize)
2391 _ASSERTE(false && "Invalid struct size. The size of fields and overall size don't agree");
2395 CorElementType fieldType = pField->GetFieldType();
2396 SystemVClassificationType fieldClassificationType = CorInfoType2UnixAmd64Classification(fieldType);
2400 pField->GetName_NoThrow(&fieldName);
2402 if (fieldClassificationType == SystemVClassificationTypeStruct)
2405 if (pByValueClassCache != NULL)
2406 th = TypeHandle(pByValueClassCache[fieldIndexForCacheLookup]);
2408 th = pField->GetApproxFieldTypeHandleThrowing();
2409 _ASSERTE(!th.IsNull());
2410 MethodTable* pFieldMT = th.GetMethodTable();
2412 bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct;
2413 helperPtr->inEmbeddedStruct = true;
2415 bool structRet = false;
2416 // If classifying for marshaling/PInvoke and the aggregated struct has a native layout
2417 // use the native classification. If not, continue using the managed layout.
2418 if (useNativeLayout && pFieldMT->HasLayout())
2420 structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, pFieldMT->GetNativeLayoutInfo());
2424 structRet = pFieldMT->ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout, NULL);
2427 helperPtr->inEmbeddedStruct = inEmbeddedStructPrev;
2431 // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister.
2438 if ((normalizedFieldOffset % fieldSize) != 0)
2440 // The spec requires that struct values on the stack from register passed fields expects
2441 // those fields to be at their natural alignment.
2443 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d not at natural alignment; not enregistering struct\n",
2444 nestingLevel * 5, "", fieldIndex, fieldName, fieldOffset, normalizedFieldOffset, fieldSize));
2448 if ((int)normalizedFieldOffset <= helperPtr->largestFieldOffset)
2450 // Find the field corresponding to this offset and update the size if needed.
2451 // If the offset matches a previously encountered offset, update the classification and field size.
2453 for (i = helperPtr->currentUniqueOffsetField - 1; i >= 0; i--)
2455 if (helperPtr->fieldOffsets[i] == normalizedFieldOffset)
2457 if (fieldSize > helperPtr->fieldSizes[i])
2459 helperPtr->fieldSizes[i] = fieldSize;
2462 helperPtr->fieldClassifications[i] = ReClassifyField(helperPtr->fieldClassifications[i], fieldClassificationType);
2464 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d, union with uniqueOffsetField %d, field type classification %s, reclassified field to %s\n",
2465 nestingLevel * 5, "", fieldIndex, fieldName, fieldOffset, normalizedFieldOffset, fieldSize, i,
2466 GetSystemVClassificationTypeName(fieldClassificationType),
2467 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[i])));
2475 // The proper size of the union set of fields has been set above; continue to the next field.
2481 helperPtr->largestFieldOffset = (int)normalizedFieldOffset;
2484 // Set the data for a new field.
2486 // The new field classification must not have been initialized yet.
2487 _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass);
2489 // There are only a few field classifications that are allowed.
2490 _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) ||
2491 (fieldClassificationType == SystemVClassificationTypeIntegerReference) ||
2492 (fieldClassificationType == SystemVClassificationTypeIntegerByRef) ||
2493 (fieldClassificationType == SystemVClassificationTypeSSE));
2495 helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType;
2496 helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldSize;
2497 helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset;
2499 LOG((LF_JIT, LL_EVERYTHING, " %*s**** Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n",
2500 nestingLevel * 5, "", fieldIndex, fieldName, fieldOffset, normalizedFieldOffset, fieldSize, helperPtr->currentUniqueOffsetField,
2501 GetSystemVClassificationTypeName(fieldClassificationType),
2502 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField])));
2504 _ASSERTE(helperPtr->currentUniqueOffsetField < SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT);
2505 helperPtr->currentUniqueOffsetField++;
2506 } // end per-field for loop
2508 AssignClassifiedEightByteTypes(helperPtr, nestingLevel);
2513 // Returns 'true' if the struct is passed in registers, 'false' otherwise.
2514 bool MethodTable::ClassifyEightBytesWithNativeLayout(SystemVStructRegisterPassingHelperPtr helperPtr,
2515 unsigned int nestingLevel,
2516 unsigned int startOffsetOfStruct,
2517 EEClassNativeLayoutInfo const* pNativeLayoutInfo)
2519 STANDARD_VM_CONTRACT;
2521 #ifdef DACCESS_COMPILE
2522 // No register classification for this case.
2524 #else // DACCESS_COMPILE
2528 // If there is no native layout for this struct use the managed layout instead.
2529 return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, true, NULL);
2532 const NativeFieldDescriptor *pNativeFieldDescs = pNativeLayoutInfo->GetNativeFieldDescriptors();
2533 UINT numIntroducedFields = pNativeLayoutInfo->GetNumFields();
2536 if (numIntroducedFields == 0)
2541 bool hasImpliedRepeatedFields = HasImpliedRepeatedFields(this);
2543 if (hasImpliedRepeatedFields)
2545 numIntroducedFields = pNativeLayoutInfo->GetSize() / pNativeFieldDescs->NativeSize();
2548 // The SIMD Intrinsic types are meant to be handled specially and should not be passed as struct registers
2549 if (IsIntrinsicType())
2551 LPCUTF8 namespaceName;
2552 LPCUTF8 className = GetFullyQualifiedNameInfo(&namespaceName);
2554 if ((strcmp(className, "Vector512`1") == 0) || (strcmp(className, "Vector256`1") == 0) ||
2555 (strcmp(className, "Vector128`1") == 0) || (strcmp(className, "Vector64`1") == 0))
2557 assert(strcmp(namespaceName, "System.Runtime.Intrinsics") == 0);
2559 LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithNativeLayout: struct %s is a SIMD intrinsic type; will not be enregistered\n",
2560 nestingLevel * 5, "", this->GetDebugClassName()));
2565 if ((strcmp(className, "Vector`1") == 0) && (strcmp(namespaceName, "System.Numerics") == 0))
2567 LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithNativeLayout: struct %s is a SIMD intrinsic type; will not be enregistered\n",
2568 nestingLevel * 5, "", this->GetDebugClassName()));
2575 LOG((LF_JIT, LL_EVERYTHING, "%*s**** Classify for native struct %s (%p), startOffset %d, total struct size %d\n",
2576 nestingLevel * 5, "", this->GetDebugClassName(), this, startOffsetOfStruct, helperPtr->structSize));
2579 for (unsigned int fieldIndex = 0; fieldIndex < numIntroducedFields; fieldIndex++)
2581 const NativeFieldDescriptor* pNFD;
2582 if (hasImpliedRepeatedFields)
2584 // Reuse the first field marshaler for all fields if a fixed buffer.
2585 pNFD = pNativeFieldDescs;
2589 pNFD = &pNativeFieldDescs[fieldIndex];
2592 FieldDesc *pField = pNFD->GetFieldDesc();
2593 CorElementType fieldType = pField->GetFieldType();
2595 // Invalid field type.
2596 if (fieldType == ELEMENT_TYPE_END)
2601 unsigned int fieldNativeSize = pNFD->NativeSize();
2602 DWORD fieldOffset = pNFD->GetExternalOffset();
2604 if (hasImpliedRepeatedFields)
2606 // Since we reuse the NativeFieldDescriptor for fixed buffers, we need to adjust the offset.
2607 fieldOffset += fieldIndex * fieldNativeSize;
2610 unsigned normalizedFieldOffset = fieldOffset + startOffsetOfStruct;
2613 _ASSERTE(fieldNativeSize != (unsigned int)-1);
2615 // The field can't span past the end of the struct.
2616 if ((normalizedFieldOffset + fieldNativeSize) > helperPtr->structSize)
2618 _ASSERTE(false && "Invalid native struct size. The size of fields and overall size don't agree");
2622 SystemVClassificationType fieldClassificationType = SystemVClassificationTypeUnknown;
2626 pField->GetName_NoThrow(&fieldName);
2629 NativeFieldCategory nfc = pNFD->GetCategory();
2631 if (nfc == NativeFieldCategory::NESTED)
2633 unsigned int numElements = pNFD->GetNumElements();
2634 unsigned int nestedElementOffset = normalizedFieldOffset;
2636 MethodTable* pFieldMT = pNFD->GetNestedNativeMethodTable();
2638 if (pFieldMT == nullptr)
2640 // If there is no method table that represents the native layout, then assume
2641 // that the type cannot be enregistered.
2645 const unsigned int nestedElementSize = pFieldMT->GetNativeSize();
2646 for (unsigned int i = 0; i < numElements; ++i, nestedElementOffset += nestedElementSize)
2648 bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct;
2649 helperPtr->inEmbeddedStruct = true;
2650 bool structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, nestedElementOffset, pFieldMT->GetNativeLayoutInfo());
2651 helperPtr->inEmbeddedStruct = inEmbeddedStructPrev;
2655 // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister.
2661 else if (nfc == NativeFieldCategory::FLOAT)
2663 fieldClassificationType = SystemVClassificationTypeSSE;
2665 else if (nfc == NativeFieldCategory::INTEGER)
2667 fieldClassificationType = SystemVClassificationTypeInteger;
2669 else if (nfc == NativeFieldCategory::ILLEGAL)
2675 UNREACHABLE_MSG("Invalid native field subcategory.");
2678 if ((normalizedFieldOffset % pNFD->AlignmentRequirement()) != 0)
2680 // The spec requires that struct values on the stack from register passed fields expects
2681 // those fields to be at their natural alignment.
2683 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Native Field %d %s: offset %d (normalized %d), required alignment %d not at natural alignment; not enregistering struct\n",
2684 nestingLevel * 5, "", fieldIndex, fieldName, fieldOffset, normalizedFieldOffset, pNFD->AlignmentRequirement()));
2688 if ((int)normalizedFieldOffset <= helperPtr->largestFieldOffset)
2690 // Find the field corresponding to this offset and update the size if needed.
2691 // If the offset matches a previously encountered offset, update the classification and field size.
2692 // We do not need to worry about this change incorrectly updating an eightbyte incorrectly
2693 // by updating a field that spans multiple eightbytes since the only field that does so is a fixed array
2694 // and a fixed array is represented by an array object in managed, which nothing can share an offset with.
2696 for (i = helperPtr->currentUniqueOffsetField - 1; i >= 0; i--)
2698 if (helperPtr->fieldOffsets[i] == normalizedFieldOffset)
2700 if (fieldNativeSize > helperPtr->fieldSizes[i])
2702 helperPtr->fieldSizes[i] = fieldNativeSize;
2705 helperPtr->fieldClassifications[i] = ReClassifyField(helperPtr->fieldClassifications[i], fieldClassificationType);
2707 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Native Field %d %s: offset %d (normalized %d), native size %d, union with uniqueOffsetField %d, field type classification %s, reclassified field to %s\n",
2708 nestingLevel * 5, "", fieldIndex, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize, i,
2709 GetSystemVClassificationTypeName(fieldClassificationType),
2710 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[i])));
2718 // The proper size of the union set of fields has been set above; continue to the next field.
2724 helperPtr->largestFieldOffset = (int)normalizedFieldOffset;
2727 // Set the data for a new field.
2729 // The new field classification must not have been initialized yet.
2730 _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass);
2732 // There are only a few field classifications that are allowed.
2733 _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) ||
2734 (fieldClassificationType == SystemVClassificationTypeIntegerReference) ||
2735 (fieldClassificationType == SystemVClassificationTypeIntegerByRef) ||
2736 (fieldClassificationType == SystemVClassificationTypeSSE));
2738 helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType;
2739 helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldNativeSize;
2740 helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset;
2742 LOG((LF_JIT, LL_EVERYTHING, " %*s**** Native Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n",
2743 nestingLevel * 5, "", fieldIndex, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize, helperPtr->currentUniqueOffsetField,
2744 GetSystemVClassificationTypeName(fieldClassificationType),
2745 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField])));
2747 _ASSERTE(helperPtr->currentUniqueOffsetField < SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT);
2748 helperPtr->currentUniqueOffsetField++;
2749 } // end per-field for loop
2751 AssignClassifiedEightByteTypes(helperPtr, nestingLevel);
2754 #endif // DACCESS_COMPILE
2757 // Assigns the classification types to the array with eightbyte types.
2758 void MethodTable::AssignClassifiedEightByteTypes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel) const
2760 static const size_t CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS = CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS * SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
2761 static_assert_no_msg(CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS == SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT);
2763 if (!helperPtr->inEmbeddedStruct)
2765 _ASSERTE(nestingLevel == 0);
2767 int largestFieldOffset = helperPtr->largestFieldOffset;
2768 _ASSERTE(largestFieldOffset != -1);
2770 // We're at the top level of the recursion, and we're done looking at the fields.
2771 // Now sort the fields by offset and set the output data.
2773 int sortedFieldOrder[CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS];
2774 for (unsigned i = 0; i < CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS; i++)
2776 sortedFieldOrder[i] = -1;
2779 unsigned numFields = helperPtr->currentUniqueOffsetField;
2780 for (unsigned i = 0; i < numFields; i++)
2782 _ASSERTE(helperPtr->fieldOffsets[i] < CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS);
2783 _ASSERTE(sortedFieldOrder[helperPtr->fieldOffsets[i]] == -1); // we haven't seen this field offset yet.
2784 sortedFieldOrder[helperPtr->fieldOffsets[i]] = i;
2787 // Calculate the eightbytes and their types.
2789 int lastFieldOrdinal = sortedFieldOrder[largestFieldOffset];
2790 unsigned int offsetAfterLastFieldByte = largestFieldOffset + helperPtr->fieldSizes[lastFieldOrdinal];
2791 SystemVClassificationType lastFieldClassification = helperPtr->fieldClassifications[lastFieldOrdinal];
2793 unsigned int usedEightBytes = 0;
2794 unsigned int accumulatedSizeForEightBytes = 0;
2795 bool foundFieldInEightByte = false;
2796 for (unsigned int offset = 0; offset < helperPtr->structSize; offset++)
2798 SystemVClassificationType fieldClassificationType;
2799 unsigned int fieldSize = 0;
2801 int ordinal = sortedFieldOrder[offset];
2804 if (offset < accumulatedSizeForEightBytes)
2806 // We're within a field and there is not an overlapping field that starts here.
2807 // There's no work we need to do, so go to the next loop iteration.
2811 // If there is no field that starts as this offset and we are not within another field,
2812 // treat its contents as padding.
2813 // Any padding that follows the last field receives the same classification as the
2814 // last field; padding between fields receives the NO_CLASS classification as per
2815 // the SysV ABI spec.
2817 fieldClassificationType = offset < offsetAfterLastFieldByte ? SystemVClassificationTypeNoClass : lastFieldClassification;
2821 foundFieldInEightByte = true;
2822 fieldSize = helperPtr->fieldSizes[ordinal];
2823 _ASSERTE(fieldSize > 0);
2825 fieldClassificationType = helperPtr->fieldClassifications[ordinal];
2826 _ASSERTE(fieldClassificationType != SystemVClassificationTypeMemory && fieldClassificationType != SystemVClassificationTypeUnknown);
2829 unsigned int fieldStartEightByte = offset / SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
2830 unsigned int fieldEndEightByte = (offset + fieldSize - 1) / SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
2832 _ASSERTE(fieldEndEightByte < CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS);
2834 usedEightBytes = Max(usedEightBytes, fieldEndEightByte + 1);
2836 for (unsigned int currentFieldEightByte = fieldStartEightByte; currentFieldEightByte <= fieldEndEightByte; currentFieldEightByte++)
2838 if (helperPtr->eightByteClassifications[currentFieldEightByte] == fieldClassificationType)
2840 // Do nothing. The eight-byte already has this classification.
2842 else if (helperPtr->eightByteClassifications[currentFieldEightByte] == SystemVClassificationTypeNoClass)
2844 helperPtr->eightByteClassifications[currentFieldEightByte] = fieldClassificationType;
2846 else if ((helperPtr->eightByteClassifications[currentFieldEightByte] == SystemVClassificationTypeInteger) ||
2847 (fieldClassificationType == SystemVClassificationTypeInteger))
2849 _ASSERTE((fieldClassificationType != SystemVClassificationTypeIntegerReference) &&
2850 (fieldClassificationType != SystemVClassificationTypeIntegerByRef));
2852 helperPtr->eightByteClassifications[currentFieldEightByte] = SystemVClassificationTypeInteger;
2854 else if ((helperPtr->eightByteClassifications[currentFieldEightByte] == SystemVClassificationTypeIntegerReference) ||
2855 (fieldClassificationType == SystemVClassificationTypeIntegerReference))
2857 helperPtr->eightByteClassifications[currentFieldEightByte] = SystemVClassificationTypeIntegerReference;
2859 else if ((helperPtr->eightByteClassifications[currentFieldEightByte] == SystemVClassificationTypeIntegerByRef) ||
2860 (fieldClassificationType == SystemVClassificationTypeIntegerByRef))
2862 helperPtr->eightByteClassifications[currentFieldEightByte] = SystemVClassificationTypeIntegerByRef;
2866 helperPtr->eightByteClassifications[currentFieldEightByte] = SystemVClassificationTypeSSE;
2870 if ((offset + 1) % SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES == 0) // If we just finished checking the last byte of an eightbyte
2872 if (!foundFieldInEightByte)
2874 // If we didn't find a field in an eight-byte (i.e. there are no explicit offsets that start a field in this eightbyte)
2875 // then the classification of this eightbyte might be NoClass. We can't hand a classification of NoClass to the JIT
2876 // so set the class to Integer (as though the struct has a char[8] padding) if the class is NoClass.
2877 if (helperPtr->eightByteClassifications[offset / SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES] == SystemVClassificationTypeNoClass)
2879 helperPtr->eightByteClassifications[offset / SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES] = SystemVClassificationTypeInteger;
2883 foundFieldInEightByte = false;
2886 accumulatedSizeForEightBytes = Max(accumulatedSizeForEightBytes, offset + fieldSize);
2889 for (unsigned int currentEightByte = 0; currentEightByte < usedEightBytes; currentEightByte++)
2891 unsigned int eightByteSize = accumulatedSizeForEightBytes < (SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES * (currentEightByte + 1))
2892 ? accumulatedSizeForEightBytes % SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES
2893 : SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
2895 // Save data for this eightbyte.
2896 helperPtr->eightByteSizes[currentEightByte] = eightByteSize;
2897 helperPtr->eightByteOffsets[currentEightByte] = currentEightByte * SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
2900 helperPtr->eightByteCount = usedEightBytes;
2902 _ASSERTE(helperPtr->eightByteCount <= CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS);
2905 LOG((LF_JIT, LL_EVERYTHING, " ----\n"));
2906 LOG((LF_JIT, LL_EVERYTHING, " **** Number EightBytes: %d\n", helperPtr->eightByteCount));
2907 for (unsigned i = 0; i < helperPtr->eightByteCount; i++)
2909 _ASSERTE(helperPtr->eightByteClassifications[i] != SystemVClassificationTypeNoClass);
2910 LOG((LF_JIT, LL_EVERYTHING, " **** eightByte %d -- classType: %s, eightByteOffset: %d, eightByteSize: %d\n",
2911 i, GetSystemVClassificationTypeName(helperPtr->eightByteClassifications[i]), helperPtr->eightByteOffsets[i], helperPtr->eightByteSizes[i]));
2917 #endif // defined(UNIX_AMD64_ABI_ITF)
2919 #if defined(TARGET_LOONGARCH64)
2921 bool MethodTable::IsLoongArch64OnlyOneField(MethodTable * pMT)
2925 bool useNativeLayout = false;
2927 MethodTable* pMethodTable = nullptr;
2929 if (!th.IsTypeDesc())
2931 pMethodTable = th.AsMethodTable();
2932 if (pMethodTable->HasLayout())
2934 useNativeLayout = true;
2936 else if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
2938 DWORD numIntroducedFields = pMethodTable->GetNumIntroducedInstanceFields();
2940 if (numIntroducedFields == 1)
2942 FieldDesc *pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
2944 CorElementType fieldType = pFieldStart[0].GetFieldType();
2946 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
2950 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
2952 pMethodTable = pFieldStart->GetApproxFieldTypeHandleThrowing().GetMethodTable();
2953 if (pMethodTable->GetNumIntroducedInstanceFields() == 1)
2955 ret = IsLoongArch64OnlyOneField(pMethodTable);
2964 _ASSERTE(th.IsNativeValueType());
2966 useNativeLayout = true;
2967 pMethodTable = th.AsNativeValueType();
2969 _ASSERTE(pMethodTable != nullptr);
2971 if (useNativeLayout)
2973 if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
2975 DWORD numIntroducedFields = pMethodTable->GetNativeLayoutInfo()->GetNumFields();
2976 FieldDesc *pFieldStart = nullptr;
2978 if (numIntroducedFields == 1)
2980 pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
2982 CorElementType fieldType = pFieldStart->GetFieldType();
2984 // InlineArray types and fixed buffer types have implied repeated fields.
2985 // Checking if a type is an InlineArray type is cheap, so we'll do that first.
2986 bool hasImpliedRepeatedFields = HasImpliedRepeatedFields(pMethodTable);
2988 if (hasImpliedRepeatedFields)
2990 numIntroducedFields = pMethodTable->GetNumInstanceFieldBytes() / pFieldStart->GetSize();
2991 if (numIntroducedFields != 1)
2997 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3001 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3003 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3004 NativeFieldCategory nfc = pNativeFieldDescs->GetCategory();
3005 if (nfc == NativeFieldCategory::NESTED)
3007 pMethodTable = pNativeFieldDescs->GetNestedNativeMethodTable();
3008 ret = IsLoongArch64OnlyOneField(pMethodTable);
3010 else if (nfc != NativeFieldCategory::ILLEGAL)
3027 int MethodTable::GetLoongArch64PassStructInRegisterFlags(CORINFO_CLASS_HANDLE cls)
3031 bool useNativeLayout = false;
3032 int size = STRUCT_NO_FLOAT_FIELD;
3033 MethodTable* pMethodTable = nullptr;
3035 if (!th.IsTypeDesc())
3037 pMethodTable = th.AsMethodTable();
3038 if (pMethodTable->HasLayout())
3040 useNativeLayout = true;
3042 else if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
3044 DWORD numIntroducedFields = pMethodTable->GetNumIntroducedInstanceFields();
3046 if (numIntroducedFields == 1)
3048 FieldDesc *pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3050 CorElementType fieldType = pFieldStart[0].GetFieldType();
3052 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3054 if (fieldType == ELEMENT_TYPE_R4)
3056 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3058 else if (fieldType == ELEMENT_TYPE_R8)
3060 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3063 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3065 pMethodTable = pFieldStart->GetApproxFieldTypeHandleThrowing().GetMethodTable();
3066 size = GetLoongArch64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3069 else if (numIntroducedFields == 2)
3071 FieldDesc *pFieldSecond;
3072 FieldDesc *pFieldFirst = pMethodTable->GetApproxFieldDescListRaw();
3073 if (pFieldFirst->GetOffset() == 0)
3075 pFieldSecond = pFieldFirst + 1;
3079 pFieldSecond = pFieldFirst;
3080 pFieldFirst = pFieldFirst + 1;
3082 assert(pFieldFirst->GetOffset() == 0);
3084 if (pFieldFirst->GetSize() > 8)
3089 CorElementType fieldType = pFieldFirst[0].GetFieldType();
3090 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3092 if (fieldType == ELEMENT_TYPE_R4)
3094 size = STRUCT_FLOAT_FIELD_FIRST;
3096 else if (fieldType == ELEMENT_TYPE_R8)
3098 size = STRUCT_FIRST_FIELD_DOUBLE;
3100 else if (pFieldFirst[0].GetSize() == 8)
3102 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3106 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3108 pMethodTable = pFieldFirst->GetApproxFieldTypeHandleThrowing().GetMethodTable();
3109 if (IsLoongArch64OnlyOneField(pMethodTable))
3111 size = GetLoongArch64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3112 if ((size & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3114 size = pFieldFirst[0].GetSize() == 8 ? STRUCT_FIRST_FIELD_DOUBLE : STRUCT_FLOAT_FIELD_FIRST;
3116 else if (size == STRUCT_NO_FLOAT_FIELD)
3118 size = pFieldFirst[0].GetSize() == 8 ? STRUCT_FIRST_FIELD_SIZE_IS8: 0;
3122 size = STRUCT_NO_FLOAT_FIELD;
3128 size = STRUCT_NO_FLOAT_FIELD;
3132 else if (pFieldFirst[0].GetSize() == 8)
3134 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3137 fieldType = pFieldSecond[0].GetFieldType();
3138 if (pFieldSecond[0].GetSize() > 8)
3140 size = STRUCT_NO_FLOAT_FIELD;
3143 else if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3145 if (fieldType == ELEMENT_TYPE_R4)
3147 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3149 else if (fieldType == ELEMENT_TYPE_R8)
3151 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3153 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3155 size = STRUCT_NO_FLOAT_FIELD;
3157 else if (pFieldSecond[0].GetSize() == 8)
3159 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3162 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3164 pMethodTable = pFieldSecond[0].GetApproxFieldTypeHandleThrowing().GetMethodTable();
3165 if (IsLoongArch64OnlyOneField(pMethodTable))
3167 int size2 = GetLoongArch64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3168 if ((size2 & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3170 if (pFieldSecond[0].GetSize() == 8)
3172 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3176 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3179 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3181 size = STRUCT_NO_FLOAT_FIELD;
3183 else if (size2 == STRUCT_NO_FLOAT_FIELD)
3185 size |= pFieldSecond[0].GetSize() == 8 ? STRUCT_SECOND_FIELD_SIZE_IS8 : 0;
3189 size = STRUCT_NO_FLOAT_FIELD;
3194 size = STRUCT_NO_FLOAT_FIELD;
3197 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3199 size = STRUCT_NO_FLOAT_FIELD;
3201 else if (pFieldSecond[0].GetSize() == 8)
3203 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3212 _ASSERTE(th.IsNativeValueType());
3214 useNativeLayout = true;
3215 pMethodTable = th.AsNativeValueType();
3217 _ASSERTE(pMethodTable != nullptr);
3219 if (useNativeLayout)
3221 if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
3223 DWORD numIntroducedFields = pMethodTable->GetNativeLayoutInfo()->GetNumFields();
3224 FieldDesc *pFieldStart = nullptr;
3226 if (numIntroducedFields == 1)
3228 pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3230 CorElementType fieldType = pFieldStart->GetFieldType();
3232 // InlineArray types and fixed buffer types have implied repeated fields.
3233 // Checking if a type is an InlineArray type is cheap, so we'll do that first.
3234 bool hasImpliedRepeatedFields = HasImpliedRepeatedFields(pMethodTable);
3236 if (hasImpliedRepeatedFields)
3238 numIntroducedFields = pMethodTable->GetNumInstanceFieldBytes() / pFieldStart->GetSize();
3239 if (numIntroducedFields > 2)
3244 if (fieldType == ELEMENT_TYPE_R4)
3246 if (numIntroducedFields == 1)
3248 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3250 else if (numIntroducedFields == 2)
3252 size = STRUCT_FLOAT_FIELD_ONLY_TWO;
3256 else if (fieldType == ELEMENT_TYPE_R8)
3258 if (numIntroducedFields == 1)
3260 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3262 else if (numIntroducedFields == 2)
3264 size = STRUCT_FIELD_TWO_DOUBLES;
3270 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3272 if (fieldType == ELEMENT_TYPE_R4)
3274 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3276 else if (fieldType == ELEMENT_TYPE_R8)
3278 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3281 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3283 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3284 NativeFieldCategory nfc = pNativeFieldDescs->GetCategory();
3285 if (nfc == NativeFieldCategory::NESTED)
3287 pMethodTable = pNativeFieldDescs->GetNestedNativeMethodTable();
3288 size = GetLoongArch64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3291 else if (nfc == NativeFieldCategory::FLOAT)
3293 if (pFieldStart->GetSize() == 4)
3295 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3297 else if (pFieldStart->GetSize() == 8)
3299 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3304 else if (numIntroducedFields == 2)
3306 pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3308 if (pFieldStart->GetSize() > 8)
3313 if (pFieldStart->GetOffset() || !pFieldStart[1].GetOffset() || (pFieldStart[0].GetSize() > pFieldStart[1].GetOffset()))
3318 CorElementType fieldType = pFieldStart[0].GetFieldType();
3319 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3321 if (fieldType == ELEMENT_TYPE_R4)
3323 size = STRUCT_FLOAT_FIELD_FIRST;
3325 else if (fieldType == ELEMENT_TYPE_R8)
3327 size = STRUCT_FIRST_FIELD_DOUBLE;
3329 else if (pFieldStart[0].GetSize() == 8)
3331 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3334 fieldType = pFieldStart[1].GetFieldType();
3335 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3337 if (fieldType == ELEMENT_TYPE_R4)
3339 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3341 else if (fieldType == ELEMENT_TYPE_R8)
3343 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3345 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3347 size = STRUCT_NO_FLOAT_FIELD;
3349 else if (pFieldStart[1].GetSize() == 8)
3351 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3356 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3358 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3360 NativeFieldCategory nfc = pNativeFieldDescs->GetCategory();
3362 if (nfc == NativeFieldCategory::NESTED)
3364 if (pNativeFieldDescs->GetNumElements() != 1)
3366 size = STRUCT_NO_FLOAT_FIELD;
3370 MethodTable* pMethodTable2 = pNativeFieldDescs->GetNestedNativeMethodTable();
3372 if (!IsLoongArch64OnlyOneField(pMethodTable2))
3374 size = STRUCT_NO_FLOAT_FIELD;
3378 size = GetLoongArch64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable2);
3379 if ((size & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3381 if (pFieldStart->GetSize() == 8)
3383 size = STRUCT_FIRST_FIELD_DOUBLE;
3387 size = STRUCT_FLOAT_FIELD_FIRST;
3390 else if (pFieldStart->GetSize() == 8)
3392 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3396 size = STRUCT_NO_FLOAT_FIELD;
3400 else if (nfc == NativeFieldCategory::FLOAT)
3402 if (pFieldStart[0].GetSize() == 4)
3404 size = STRUCT_FLOAT_FIELD_FIRST;
3406 else if (pFieldStart[0].GetSize() == 8)
3408 _ASSERTE((pMethodTable->GetNativeSize() == 8) || (pMethodTable->GetNativeSize() == 16));
3409 size = STRUCT_FIRST_FIELD_DOUBLE;
3412 else if (pFieldStart[0].GetSize() == 8)
3414 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3417 else if (fieldType == ELEMENT_TYPE_CLASS)
3419 size = STRUCT_NO_FLOAT_FIELD;
3422 else if (pFieldStart[0].GetSize() == 8)
3424 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3427 fieldType = pFieldStart[1].GetFieldType();
3428 if (pFieldStart[1].GetSize() > 8)
3430 size = STRUCT_NO_FLOAT_FIELD;
3433 else if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3435 if (fieldType == ELEMENT_TYPE_R4)
3437 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3439 else if (fieldType == ELEMENT_TYPE_R8)
3441 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3443 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3445 size = STRUCT_NO_FLOAT_FIELD;
3447 else if (pFieldStart[1].GetSize() == 8)
3449 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3452 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3454 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3455 NativeFieldCategory nfc = pNativeFieldDescs[1].GetCategory();
3457 if (nfc == NativeFieldCategory::NESTED)
3459 if (pNativeFieldDescs[1].GetNumElements() != 1)
3461 size = STRUCT_NO_FLOAT_FIELD;
3465 MethodTable* pMethodTable2 = pNativeFieldDescs[1].GetNestedNativeMethodTable();
3467 if (!IsLoongArch64OnlyOneField(pMethodTable2))
3469 size = STRUCT_NO_FLOAT_FIELD;
3473 if ((GetLoongArch64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable2) & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3475 if (pFieldStart[1].GetSize() == 4)
3477 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3479 else if (pFieldStart[1].GetSize() == 8)
3481 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3484 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3486 size = STRUCT_NO_FLOAT_FIELD;
3488 else if (pFieldStart[1].GetSize() == 8)
3490 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3493 else if (nfc == NativeFieldCategory::FLOAT)
3495 if (pFieldStart[1].GetSize() == 4)
3497 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3499 else if (pFieldStart[1].GetSize() == 8)
3501 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3504 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3506 size = STRUCT_NO_FLOAT_FIELD;
3508 else if (pFieldStart[1].GetSize() == 8)
3510 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3513 else if (fieldType == ELEMENT_TYPE_CLASS)
3515 size = STRUCT_NO_FLOAT_FIELD;
3518 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3520 size = STRUCT_NO_FLOAT_FIELD;
3522 else if (pFieldStart[1].GetSize() == 8)
3524 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3535 #if defined(TARGET_RISCV64)
3537 bool MethodTable::IsRiscV64OnlyOneField(MethodTable * pMT)
3541 bool useNativeLayout = false;
3543 MethodTable* pMethodTable = nullptr;
3545 if (!th.IsTypeDesc())
3547 pMethodTable = th.AsMethodTable();
3548 if (pMethodTable->HasLayout())
3550 useNativeLayout = true;
3552 else if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
3554 DWORD numIntroducedFields = pMethodTable->GetNumIntroducedInstanceFields();
3556 if (numIntroducedFields == 1)
3558 FieldDesc *pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3560 CorElementType fieldType = pFieldStart[0].GetFieldType();
3562 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3566 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3568 pMethodTable = pFieldStart->GetApproxFieldTypeHandleThrowing().GetMethodTable();
3569 if (pMethodTable->GetNumIntroducedInstanceFields() == 1)
3571 ret = IsRiscV64OnlyOneField(pMethodTable);
3580 _ASSERTE(th.IsNativeValueType());
3582 useNativeLayout = true;
3583 pMethodTable = th.AsNativeValueType();
3585 _ASSERTE(pMethodTable != nullptr);
3587 if (useNativeLayout)
3589 if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
3591 DWORD numIntroducedFields = pMethodTable->GetNativeLayoutInfo()->GetNumFields();
3592 FieldDesc *pFieldStart = nullptr;
3594 if (numIntroducedFields == 1)
3596 pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3598 CorElementType fieldType = pFieldStart->GetFieldType();
3600 // InlineArray types and fixed buffer types have implied repeated fields.
3601 // Checking if a type is an InlineArray type is cheap, so we'll do that first.
3602 bool hasImpliedRepeatedFields = HasImpliedRepeatedFields(pMethodTable);
3604 if (hasImpliedRepeatedFields)
3606 numIntroducedFields = pMethodTable->GetNumInstanceFieldBytes() / pFieldStart->GetSize();
3607 if (numIntroducedFields != 1)
3613 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3617 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3619 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3620 NativeFieldCategory nfc = pNativeFieldDescs->GetCategory();
3621 if (nfc == NativeFieldCategory::NESTED)
3623 pMethodTable = pNativeFieldDescs->GetNestedNativeMethodTable();
3624 ret = IsRiscV64OnlyOneField(pMethodTable);
3626 else if (nfc != NativeFieldCategory::ILLEGAL)
3643 int MethodTable::GetRiscV64PassStructInRegisterFlags(CORINFO_CLASS_HANDLE cls)
3647 bool useNativeLayout = false;
3648 int size = STRUCT_NO_FLOAT_FIELD;
3649 MethodTable* pMethodTable = nullptr;
3651 if (!th.IsTypeDesc())
3653 pMethodTable = th.AsMethodTable();
3654 if (pMethodTable->HasLayout())
3656 useNativeLayout = true;
3658 else if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
3660 DWORD numIntroducedFields = pMethodTable->GetNumIntroducedInstanceFields();
3662 if (numIntroducedFields == 1)
3664 FieldDesc *pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3666 CorElementType fieldType = pFieldStart[0].GetFieldType();
3668 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3670 if (fieldType == ELEMENT_TYPE_R4)
3672 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3674 else if (fieldType == ELEMENT_TYPE_R8)
3676 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3679 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3681 pMethodTable = pFieldStart->GetApproxFieldTypeHandleThrowing().GetMethodTable();
3682 size = GetRiscV64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3685 else if (numIntroducedFields == 2)
3687 FieldDesc *pFieldSecond;
3688 FieldDesc *pFieldFirst = pMethodTable->GetApproxFieldDescListRaw();
3689 if (pFieldFirst->GetOffset() == 0)
3691 pFieldSecond = pFieldFirst + 1;
3695 pFieldSecond = pFieldFirst;
3696 pFieldFirst = pFieldFirst + 1;
3698 assert(pFieldFirst->GetOffset() == 0);
3700 if (pFieldFirst->GetSize() > 8)
3705 CorElementType fieldType = pFieldFirst[0].GetFieldType();
3706 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3708 if (fieldType == ELEMENT_TYPE_R4)
3710 size = STRUCT_FLOAT_FIELD_FIRST;
3712 else if (fieldType == ELEMENT_TYPE_R8)
3714 size = STRUCT_FIRST_FIELD_DOUBLE;
3716 else if (pFieldFirst[0].GetSize() == 8)
3718 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3722 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3724 pMethodTable = pFieldFirst->GetApproxFieldTypeHandleThrowing().GetMethodTable();
3725 if (IsRiscV64OnlyOneField(pMethodTable))
3727 size = GetRiscV64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3728 if ((size & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3730 size = pFieldFirst[0].GetSize() == 8 ? STRUCT_FIRST_FIELD_DOUBLE : STRUCT_FLOAT_FIELD_FIRST;
3732 else if (size == STRUCT_NO_FLOAT_FIELD)
3734 size = pFieldFirst[0].GetSize() == 8 ? STRUCT_FIRST_FIELD_SIZE_IS8: 0;
3738 size = STRUCT_NO_FLOAT_FIELD;
3744 size = STRUCT_NO_FLOAT_FIELD;
3748 else if (pFieldFirst[0].GetSize() == 8)
3750 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3753 fieldType = pFieldSecond[0].GetFieldType();
3754 if (pFieldSecond[0].GetSize() > 8)
3756 size = STRUCT_NO_FLOAT_FIELD;
3759 else if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3761 if (fieldType == ELEMENT_TYPE_R4)
3763 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3765 else if (fieldType == ELEMENT_TYPE_R8)
3767 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3769 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3771 size = STRUCT_NO_FLOAT_FIELD;
3773 else if (pFieldSecond[0].GetSize() == 8)
3775 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3778 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3780 pMethodTable = pFieldSecond[0].GetApproxFieldTypeHandleThrowing().GetMethodTable();
3781 if (IsRiscV64OnlyOneField(pMethodTable))
3783 int size2 = GetRiscV64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3784 if ((size2 & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3786 if (pFieldSecond[0].GetSize() == 8)
3788 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3792 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3795 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3797 size = STRUCT_NO_FLOAT_FIELD;
3799 else if (size2 == STRUCT_NO_FLOAT_FIELD)
3801 size |= pFieldSecond[0].GetSize() == 8 ? STRUCT_SECOND_FIELD_SIZE_IS8 : 0;
3805 size = STRUCT_NO_FLOAT_FIELD;
3810 size = STRUCT_NO_FLOAT_FIELD;
3813 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3815 size = STRUCT_NO_FLOAT_FIELD;
3817 else if (pFieldSecond[0].GetSize() == 8)
3819 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3828 _ASSERTE(th.IsNativeValueType());
3830 useNativeLayout = true;
3831 pMethodTable = th.AsNativeValueType();
3833 _ASSERTE(pMethodTable != nullptr);
3835 if (useNativeLayout)
3837 if (th.GetSize() <= 16 /*MAX_PASS_MULTIREG_BYTES*/)
3839 DWORD numIntroducedFields = pMethodTable->GetNativeLayoutInfo()->GetNumFields();
3840 FieldDesc *pFieldStart = nullptr;
3842 if (numIntroducedFields == 1)
3844 pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3846 CorElementType fieldType = pFieldStart->GetFieldType();
3848 // InlineArray types and fixed buffer types have implied repeated fields.
3849 // Checking if a type is an InlineArray type is cheap, so we'll do that first.
3850 bool hasImpliedRepeatedFields = HasImpliedRepeatedFields(pMethodTable);
3852 if (hasImpliedRepeatedFields)
3854 numIntroducedFields = pMethodTable->GetNumInstanceFieldBytes() / pFieldStart->GetSize();
3855 if (numIntroducedFields > 2)
3860 if (fieldType == ELEMENT_TYPE_R4)
3862 if (numIntroducedFields == 1)
3864 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3866 else if (numIntroducedFields == 2)
3868 size = STRUCT_FLOAT_FIELD_ONLY_TWO;
3872 else if (fieldType == ELEMENT_TYPE_R8)
3874 if (numIntroducedFields == 1)
3876 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3878 else if (numIntroducedFields == 2)
3880 size = STRUCT_FIELD_TWO_DOUBLES;
3886 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3888 if (fieldType == ELEMENT_TYPE_R4)
3890 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3892 else if (fieldType == ELEMENT_TYPE_R8)
3894 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3897 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3899 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3900 NativeFieldCategory nfc = pNativeFieldDescs->GetCategory();
3901 if (nfc == NativeFieldCategory::NESTED)
3903 pMethodTable = pNativeFieldDescs->GetNestedNativeMethodTable();
3904 size = GetRiscV64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable);
3907 else if (nfc == NativeFieldCategory::FLOAT)
3909 if (pFieldStart->GetSize() == 4)
3911 size = STRUCT_FLOAT_FIELD_ONLY_ONE;
3913 else if (pFieldStart->GetSize() == 8)
3915 size = STRUCT_FLOAT_FIELD_ONLY_ONE | STRUCT_FIRST_FIELD_SIZE_IS8;
3920 else if (numIntroducedFields == 2)
3922 pFieldStart = pMethodTable->GetApproxFieldDescListRaw();
3924 if (pFieldStart->GetSize() > 8)
3929 if (pFieldStart->GetOffset() || !pFieldStart[1].GetOffset() || (pFieldStart[0].GetSize() > pFieldStart[1].GetOffset()))
3934 CorElementType fieldType = pFieldStart[0].GetFieldType();
3935 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3937 if (fieldType == ELEMENT_TYPE_R4)
3939 size = STRUCT_FLOAT_FIELD_FIRST;
3941 else if (fieldType == ELEMENT_TYPE_R8)
3943 size = STRUCT_FIRST_FIELD_DOUBLE;
3945 else if (pFieldStart[0].GetSize() == 8)
3947 size = STRUCT_FIRST_FIELD_SIZE_IS8;
3950 fieldType = pFieldStart[1].GetFieldType();
3951 if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
3953 if (fieldType == ELEMENT_TYPE_R4)
3955 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
3957 else if (fieldType == ELEMENT_TYPE_R8)
3959 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
3961 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
3963 size = STRUCT_NO_FLOAT_FIELD;
3965 else if (pFieldStart[1].GetSize() == 8)
3967 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
3972 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
3974 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
3976 NativeFieldCategory nfc = pNativeFieldDescs->GetCategory();
3978 if (nfc == NativeFieldCategory::NESTED)
3980 if (pNativeFieldDescs->GetNumElements() != 1)
3982 size = STRUCT_NO_FLOAT_FIELD;
3986 MethodTable* pMethodTable2 = pNativeFieldDescs->GetNestedNativeMethodTable();
3988 if (!IsRiscV64OnlyOneField(pMethodTable2))
3990 size = STRUCT_NO_FLOAT_FIELD;
3994 size = GetRiscV64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable2);
3995 if ((size & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
3997 if (pFieldStart->GetSize() == 8)
3999 size = STRUCT_FIRST_FIELD_DOUBLE;
4003 size = STRUCT_FLOAT_FIELD_FIRST;
4006 else if (pFieldStart->GetSize() == 8)
4008 size = STRUCT_FIRST_FIELD_SIZE_IS8;
4012 size = STRUCT_NO_FLOAT_FIELD;
4016 else if (nfc == NativeFieldCategory::FLOAT)
4018 if (pFieldStart[0].GetSize() == 4)
4020 size = STRUCT_FLOAT_FIELD_FIRST;
4022 else if (pFieldStart[0].GetSize() == 8)
4024 _ASSERTE((pMethodTable->GetNativeSize() == 8) || (pMethodTable->GetNativeSize() == 16));
4025 size = STRUCT_FIRST_FIELD_DOUBLE;
4028 else if (pFieldStart[0].GetSize() == 8)
4030 size = STRUCT_FIRST_FIELD_SIZE_IS8;
4033 else if (pFieldStart[0].GetSize() == 8)
4035 size = STRUCT_FIRST_FIELD_SIZE_IS8;
4038 fieldType = pFieldStart[1].GetFieldType();
4039 if (pFieldStart[1].GetSize() > 8)
4041 size = STRUCT_NO_FLOAT_FIELD;
4044 else if (CorTypeInfo::IsPrimitiveType_NoThrow(fieldType))
4046 if (fieldType == ELEMENT_TYPE_R4)
4048 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
4050 else if (fieldType == ELEMENT_TYPE_R8)
4052 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
4054 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
4056 size = STRUCT_NO_FLOAT_FIELD;
4058 else if (pFieldStart[1].GetSize() == 8)
4060 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
4063 // Pass with two integer registers in `struct {int a, int b, float/double c}` cases
4064 if ((size | STRUCT_FIRST_FIELD_SIZE_IS8 | STRUCT_FLOAT_FIELD_SECOND) == size)
4066 size = STRUCT_NO_FLOAT_FIELD;
4069 else if (fieldType == ELEMENT_TYPE_VALUETYPE)
4071 const NativeFieldDescriptor *pNativeFieldDescs = pMethodTable->GetNativeLayoutInfo()->GetNativeFieldDescriptors();
4072 NativeFieldCategory nfc = pNativeFieldDescs[1].GetCategory();
4074 if (nfc == NativeFieldCategory::NESTED)
4076 if (pNativeFieldDescs[1].GetNumElements() != 1)
4078 size = STRUCT_NO_FLOAT_FIELD;
4082 MethodTable* pMethodTable2 = pNativeFieldDescs[1].GetNestedNativeMethodTable();
4084 if (!IsRiscV64OnlyOneField(pMethodTable2))
4086 size = STRUCT_NO_FLOAT_FIELD;
4090 if ((GetRiscV64PassStructInRegisterFlags((CORINFO_CLASS_HANDLE)pMethodTable2) & STRUCT_FLOAT_FIELD_ONLY_ONE) != 0)
4092 if (pFieldStart[1].GetSize() == 4)
4094 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
4096 else if (pFieldStart[1].GetSize() == 8)
4098 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
4101 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
4103 size = STRUCT_NO_FLOAT_FIELD;
4105 else if (pFieldStart[1].GetSize() == 8)
4107 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
4110 else if (nfc == NativeFieldCategory::FLOAT)
4112 if (pFieldStart[1].GetSize() == 4)
4114 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND) : (size | STRUCT_FLOAT_FIELD_SECOND);
4116 else if (pFieldStart[1].GetSize() == 8)
4118 size = size & STRUCT_FLOAT_FIELD_FIRST ? (size ^ STRUCT_MERGE_FIRST_SECOND_8) : (size | STRUCT_SECOND_FIELD_DOUBLE);
4121 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
4123 size = STRUCT_NO_FLOAT_FIELD;
4125 else if (pFieldStart[1].GetSize() == 8)
4127 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
4130 else if ((size & STRUCT_FLOAT_FIELD_FIRST) == 0)
4132 size = STRUCT_NO_FLOAT_FIELD;
4134 else if (pFieldStart[1].GetSize() == 8)
4136 size |= STRUCT_SECOND_FIELD_SIZE_IS8;
4147 #if !defined(DACCESS_COMPILE)
4148 //==========================================================================================
4149 void MethodTable::AllocateRegularStaticBoxes()
4155 PRECONDITION(!ContainsGenericVariables());
4156 PRECONDITION(HasBoxedRegularStatics());
4161 LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Instantiating static handles for %s\n", GetDebugClassName()));
4165 PTR_BYTE pStaticBase = GetGCStaticsBasePointer();
4167 GCPROTECT_BEGININTERIOR(pStaticBase);
4170 FieldDesc *pField = HasGenericsStaticsInfo() ?
4171 GetGenericsStaticFieldDescs() : (GetApproxFieldDescListRaw() + GetNumIntroducedInstanceFields());
4172 FieldDesc *pFieldEnd = pField + GetNumStaticFields();
4174 while (pField < pFieldEnd)
4176 _ASSERTE(pField->IsStatic());
4178 if (!pField->IsSpecialStatic() && pField->IsByValue())
4180 AllocateRegularStaticBox(pField, (Object**)(pStaticBase + pField->GetOffset()));
4189 void MethodTable::AllocateRegularStaticBox(FieldDesc* pField, Object** boxedStaticHandle)
4199 _ASSERT(pField->IsStatic() && !pField->IsSpecialStatic() && pField->IsByValue());
4201 // Static fields are not pinned in collectible types so we need to protect the address
4202 GCPROTECT_BEGININTERIOR(boxedStaticHandle);
4203 if (VolatileLoad(boxedStaticHandle) == nullptr)
4205 // Grab field's type handle before we enter lock
4206 MethodTable* pFieldMT = pField->GetFieldTypeHandleThrowing().GetMethodTable();
4207 bool hasFixedAddr = HasFixedAddressVTStatics();
4209 // Taking a lock since we might come here from multiple threads/places
4210 CrstHolder crst(GetAppDomain()->GetStaticBoxInitLock());
4212 // double-checked locking
4213 if (VolatileLoad(boxedStaticHandle) == nullptr)
4215 LOG((LF_CLASSLOADER, LL_INFO10000, "\tInstantiating static of type %s\n", pFieldMT->GetDebugClassName()));
4216 const bool canBeFrozen = !pFieldMT->ContainsPointers() && !Collectible();
4217 OBJECTREF obj = AllocateStaticBox(pFieldMT, hasFixedAddr, NULL, canBeFrozen);
4218 SetObjectReference((OBJECTREF*)(boxedStaticHandle), obj);
4224 //==========================================================================================
4225 OBJECTREF MethodTable::AllocateStaticBox(MethodTable* pFieldMT, BOOL fPinned, OBJECTHANDLE* pHandle, bool canBeFrozen)
4235 _ASSERTE(pFieldMT->IsValueType());
4237 // Activate any dependent modules if necessary
4238 pFieldMT->EnsureInstanceActive();
4240 OBJECTREF obj = NULL;
4243 // In case if we don't plan to collect this handle we may try to allocate it on FOH
4244 _ASSERT(!pFieldMT->ContainsPointers());
4245 _ASSERT(pHandle == nullptr);
4246 FrozenObjectHeapManager* foh = SystemDomain::GetFrozenObjectHeapManager();
4247 obj = ObjectToOBJECTREF(foh->TryAllocateObject(pFieldMT, pFieldMT->GetBaseSize()));
4248 // obj can be null in case if struct is huge (>64kb)
4255 obj = AllocateObject(pFieldMT);
4257 // Pin the object if necessary
4260 LOG((LF_CLASSLOADER, LL_INFO10000, "\tSTATICS:Pinning static (VT fixed address attribute) of type %s\n", pFieldMT->GetDebugClassName()));
4261 OBJECTHANDLE oh = GetAppDomain()->CreatePinningHandle(obj);
4278 //==========================================================================================
4279 BOOL MethodTable::RunClassInitEx(OBJECTREF *pThrowable)
4286 PRECONDITION(IsFullyLoaded());
4287 PRECONDITION(IsProtectedByGCFrame(pThrowable));
4291 // A somewhat unusual function, can both return throwable and throw.
4292 // The difference is, we throw on restartable operations and just return throwable
4293 // on exceptions fatal for the .cctor
4294 // (Of course in the latter case the caller is supposed to throw pThrowable)
4295 // Doing the opposite ( i.e. throwing on fatal and returning on nonfatal)
4296 // would be more intuitive but it's more convenient the way it is
4300 // During the <clinit>, this thread must not be asynchronously
4301 // stopped or interrupted. That would leave the class unavailable
4302 // and is therefore a security hole. We don't have to worry about
4303 // multithreading, since we only manipulate the current thread's count.
4304 ThreadPreventAsyncHolder preventAsync;
4306 // If the static initialiser throws an exception that it doesn't catch, it has failed
4309 // Activate our module if necessary
4310 EnsureInstanceActive();
4312 STRESS_LOG1(LF_CLASSLOADER, LL_INFO1000, "RunClassInit: Calling class constructor for type %pT\n", this);
4314 MethodTable * pCanonMT = GetCanonicalMethodTable();
4316 // Call the code method without touching MethodDesc if possible
4317 PCODE pCctorCode = pCanonMT->GetSlot(pCanonMT->GetClassConstructorSlot());
4319 if (pCanonMT->IsSharedByGenericInstantiations())
4321 PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(pCctorCode);
4322 DECLARE_ARGHOLDER_ARRAY(args, 1);
4323 args[ARGNUM_0] = PTR_TO_ARGHOLDER(this);
4324 CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE;
4325 CALL_MANAGED_METHOD_NORET(args);
4329 PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(pCctorCode);
4330 DECLARE_ARGHOLDER_ARRAY(args, 0);
4331 CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE;
4332 CALL_MANAGED_METHOD_NORET(args);
4335 STRESS_LOG1(LF_CLASSLOADER, LL_INFO100000, "RunClassInit: Returned Successfully from class constructor for type %pT\n", this);
4341 // Exception set by parent
4342 // <TODO>@TODO: We should make this an ExceptionInInitializerError if the exception thrown is not
4343 // a subclass of Error</TODO>
4344 *pThrowable = GET_THROWABLE();
4345 _ASSERTE(fRet == FALSE);
4347 EX_END_CATCH(SwallowAllExceptions)
4352 //==========================================================================================
4353 void MethodTable::DoRunClassInitThrowing()
4365 // This is a fairly aggressive policy. Merely asking that the class be initialized is grounds for kicking you out.
4366 // Alternately, we could simply NOP out the class initialization. Since the aggressive policy is also the more secure
4367 // policy, keep this unless it proves intractable to remove all premature classinits in the system.
4370 Thread* pThread = GetThread();
4372 AppDomain *pDomain = GetAppDomain();
4374 HRESULT hrResult = E_FAIL;
4375 const char *description;
4376 STRESS_LOG2(LF_CLASSLOADER, LL_INFO100000, "DoRunClassInit: Request to init %pT in appdomain %p\n", this, pDomain);
4379 // Take the global lock
4382 ListLock *_pLock = pDomain->GetClassInitLock();
4384 ListLockHolder pInitLock(_pLock);
4387 if (IsClassInited())
4391 // Handle cases where the .cctor has already tried to run but failed.
4397 // Some error occurred trying to init this class
4398 ListLockEntry* pEntry= (ListLockEntry *) _pLock->Find(this);
4399 _ASSERTE(pEntry!=NULL);
4400 _ASSERTE(pEntry->m_pLoaderAllocator == GetLoaderAllocator());
4402 // If this isn't a TypeInitializationException, then its creation failed
4403 // somehow previously, so we should make one last attempt to create it. If
4404 // that fails, just throw the exception that was originally thrown.
4405 // Primarily, this deals with the problem that the exception is a
4406 // ThreadAbortException, because this must be executing on a different
4407 // thread. If in fact this thread is also aborting, then rethrowing the
4408 // other thread's exception will not do any worse.
4410 // If we need to create the type init exception object, we'll need to
4411 // GC protect these, so might as well create the structure now.
4413 OBJECTREF pInitException;
4414 OBJECTREF pNewInitException;
4415 OBJECTREF pThrowable;
4418 gc.pInitException = pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException);
4419 gc.pNewInitException = NULL;
4420 gc.pThrowable = NULL;
4422 GCPROTECT_BEGIN(gc);
4424 // We need to release this lock because CreateTypeInitializationExceptionObject and fetching the TypeLoad exception can cause
4425 // managed code to re-enter into this codepath, causing a locking order violation.
4426 pInitLock.Release();
4428 if (CoreLibBinder::GetException(kTypeInitializationException) != gc.pInitException->GetMethodTable())
4430 DefineFullyQualifiedNameForClassWOnStack();
4431 LPCWSTR wszName = GetFullyQualifiedNameForClassW(this);
4433 CreateTypeInitializationExceptionObject(wszName, &gc.pInitException, &gc.pNewInitException, &gc.pThrowable);
4435 LOADERHANDLE hOrigInitException = pEntry->m_hInitException;
4436 if (!CLRException::IsPreallocatedExceptionObject(pEntry->m_pLoaderAllocator->GetHandleValue(hOrigInitException)))
4438 // Now put the new init exception in the handle. If another thread beat us (because we released the
4439 // lock above), then we'll just let the extra init exception object get collected later.
4440 pEntry->m_pLoaderAllocator->CompareExchangeValueInHandle(pEntry->m_hInitException, gc.pNewInitException, gc.pInitException);
4442 // if the stored exception is a preallocated one we cannot store the new Exception object in it.
4443 // we'll attempt to create a new handle for the new TypeInitializationException object
4444 LOADERHANDLE hNewInitException = NULL;
4445 // CreateHandle can throw due to OOM. We need to catch this so that we make sure to set the
4446 // init error. Whatever exception was thrown will be rethrown below, so no worries.
4448 hNewInitException = pEntry->m_pLoaderAllocator->AllocateHandle(gc.pNewInitException);
4450 // If we failed to create the handle we'll just leave the originally alloc'd one in place.
4451 } EX_END_CATCH(SwallowAllExceptions);
4453 // if two threads are racing to set m_hInitException, clear the handle created by the loser
4454 if (hNewInitException != NULL &&
4455 InterlockedCompareExchangeT((&pEntry->m_hInitException), hNewInitException, hOrigInitException) != hOrigInitException)
4457 pEntry->m_pLoaderAllocator->FreeHandle(hNewInitException);
4462 gc.pThrowable = gc.pInitException;
4467 // Throw the saved exception. Since we may be rethrowing a previously cached exception, must clear the stack trace first.
4468 // Rethrowing a previously cached exception is distasteful but is required for appcompat with Everett.
4470 // (The IsException() is probably more appropriate as an assert but as this isn't a heavily tested code path,
4471 // I prefer to be defensive here.)
4472 if (IsException(gc.pThrowable->GetMethodTable()))
4474 ((EXCEPTIONREF)(gc.pThrowable))->ClearStackTraceForThrow();
4477 COMPlusThrow(gc.pThrowable);
4480 description = ".cctor lock";
4482 description = GetDebugClassName();
4487 //nontrivial holder, might take a lock in destructor
4488 ListLockEntryHolder pEntry(ListLockEntry::Find(pInitLock, this, description));
4490 ListLockEntryLockHolder pLock(pEntry, FALSE);
4492 // We have a list entry, we can release the global lock now
4493 pInitLock.Release();
4495 if (pLock.DeadlockAwareAcquire())
4497 if (pEntry->m_hrResultCode == S_FALSE)
4499 if (HasBoxedRegularStatics())
4501 // First, instantiate any objects needed for value type statics
4502 AllocateRegularStaticBoxes();
4505 // Nobody has run the .cctor yet
4506 if (HasClassConstructor())
4509 OBJECTREF pInnerException;
4510 OBJECTREF pInitException;
4511 OBJECTREF pThrowable;
4513 gc.pInnerException = NULL;
4514 gc.pInitException = NULL;
4515 gc.pThrowable = NULL;
4516 GCPROTECT_BEGIN(gc);
4518 if (!RunClassInitEx(&gc.pInnerException))
4520 // The .cctor failed and we want to store the exception that resulted
4521 // in the entry. Increment the ref count to keep the entry alive for
4522 // subsequent attempts to run the .cctor.
4524 // For collectible types, register the entry for cleanup.
4525 if (GetLoaderAllocator()->IsCollectible())
4527 GetLoaderAllocator()->RegisterFailedTypeInitForCleanup(pEntry);
4530 _ASSERTE(g_pThreadAbortExceptionClass == CoreLibBinder::GetException(kThreadAbortException));
4532 if(gc.pInnerException->GetMethodTable() == g_pThreadAbortExceptionClass)
4534 gc.pThrowable = gc.pInnerException;
4535 gc.pInitException = gc.pInnerException;
4536 gc.pInnerException = NULL;
4540 DefineFullyQualifiedNameForClassWOnStack();
4541 LPCWSTR wszName = GetFullyQualifiedNameForClassW(this);
4543 // Note that this may not succeed due to problems creating the exception
4544 // object. On failure, it will first try to
4545 CreateTypeInitializationExceptionObject(
4546 wszName, &gc.pInnerException, &gc.pInitException, &gc.pThrowable);
4549 pEntry->m_pLoaderAllocator = GetLoaderAllocator();
4551 // CreateHandle can throw due to OOM. We need to catch this so that we make sure to set the
4552 // init error. Whatever exception was thrown will be rethrown below, so no worries.
4554 // Save the exception object, and return to caller as well.
4555 pEntry->m_hInitException = pEntry->m_pLoaderAllocator->AllocateHandle(gc.pInitException);
4557 // If we failed to create the handle (due to OOM), we'll just store the preallocated OOM
4558 // handle here instead.
4559 pEntry->m_hInitException = pEntry->m_pLoaderAllocator->AllocateHandle(CLRException::GetPreallocatedOutOfMemoryException());
4560 } EX_END_CATCH(SwallowAllExceptions);
4562 pEntry->m_hrResultCode = E_FAIL;
4563 SetClassInitError();
4565 COMPlusThrow(gc.pThrowable);
4571 pEntry->m_hrResultCode = S_OK;
4573 // Set the initialization flags in the DLS and on domain-specific types.
4574 // Note we also set the flag for dynamic statics, which use the DynamicStatics part
4575 // of the DLS irrespective of whether the type is domain neutral or not.
4581 // Use previous result
4583 hrResult = pEntry->m_hrResultCode;
4584 if(FAILED(hrResult))
4586 // An exception may have occurred in the cctor. DoRunClassInit() should return FALSE in that
4588 _ASSERTE(pEntry->m_hInitException);
4589 _ASSERTE(pEntry->m_pLoaderAllocator == GetLoaderAllocator());
4590 _ASSERTE(IsInitError());
4592 // Throw the saved exception. Since we are rethrowing a previously cached exception, must clear the stack trace first.
4593 // Rethrowing a previously cached exception is distasteful but is required for appcompat with Everett.
4595 // (The IsException() is probably more appropriate as an assert but as this isn't a heavily tested code path,
4596 // I prefer to be defensive here.)
4597 if (IsException(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)->GetMethodTable()))
4599 ((EXCEPTIONREF)(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)))->ClearStackTraceForThrow();
4601 COMPlusThrow(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException));
4608 // Notify any entries waiting on the current entry and wait for the required entries.
4611 // We need to take the global lock before we play with the list of entries.
4613 STRESS_LOG2(LF_CLASSLOADER, LL_INFO100000, "DoRunClassInit: returning SUCCESS for init %pT in appdomain %p\n", this, pDomain);
4614 // No need to set pThrowable in case of error it will already have been set.
4619 //==========================================================================================
4620 void MethodTable::CheckRunClassInitThrowing()
4626 INJECT_FAULT(COMPlusThrowOM());
4627 PRECONDITION(IsFullyLoaded());
4631 { // Debug-only code causes SO volation, so add exception.
4632 CONSISTENCY_CHECK(CheckActivated());
4635 // To find GC hole easier...
4638 if (IsClassPreInited())
4641 // Don't initialize shared generic instantiations (e.g. MyClass<__Canon>)
4642 if (IsSharedByGenericInstantiations())
4645 DomainLocalModule *pLocalModule = GetDomainLocalModule();
4646 _ASSERTE(pLocalModule);
4648 DWORD iClassIndex = GetClassIndex();
4650 // Check to see if we have already run the .cctor for this class.
4651 if (!pLocalModule->IsClassAllocated(this, iClassIndex))
4652 pLocalModule->PopulateClass(this);
4654 if (!pLocalModule->IsClassInitialized(this, iClassIndex))
4655 DoRunClassInitThrowing();
4658 //==========================================================================================
4659 void MethodTable::CheckRunClassInitAsIfConstructingThrowing()
4668 if (HasPreciseInitCctors())
4670 MethodTable *pMTCur = this;
4671 while (pMTCur != NULL)
4673 if (!pMTCur->GetClass()->IsBeforeFieldInit())
4674 pMTCur->CheckRunClassInitThrowing();
4676 pMTCur = pMTCur->GetParentMethodTable();
4681 //==========================================================================================
4682 OBJECTREF MethodTable::Allocate()
4692 CONSISTENCY_CHECK(IsFullyLoaded());
4694 EnsureInstanceActive();
4696 if (HasPreciseInitCctors())
4698 CheckRunClassInitAsIfConstructingThrowing();
4701 return AllocateObject(this);
4704 //==========================================================================================
4705 // box 'data' creating a new object and return it. This routine understands the special
4706 // handling needed for Nullable values.
4707 // see code:Nullable#NullableVerification
4709 OBJECTREF MethodTable::Box(void* data)
4716 PRECONDITION(IsValueType());
4722 GCPROTECT_BEGININTERIOR (data);
4726 // We should never box a type that contains stack pointers.
4727 COMPlusThrow(kInvalidOperationException, W("InvalidOperation_TypeCannotBeBoxed"));
4730 ref = FastBox(&data);
4735 OBJECTREF MethodTable::FastBox(void** data)
4742 PRECONDITION(IsValueType());
4746 // See code:Nullable#NullableArchitecture for more
4748 return Nullable::Box(*data, this);
4750 OBJECTREF ref = Allocate();
4751 CopyValueClass(ref->UnBox(), *data, this);
4755 #if TARGET_X86 || TARGET_AMD64
4756 //==========================================================================================
4757 static void FastCallFinalize(Object *obj, PCODE funcPtr, BOOL fCriticalCall)
4759 STATIC_CONTRACT_THROWS;
4760 STATIC_CONTRACT_GC_TRIGGERS;
4761 STATIC_CONTRACT_MODE_COOPERATIVE;
4763 BEGIN_CALL_TO_MANAGEDEX(fCriticalCall ? EEToManagedCriticalCall : EEToManagedDefault);
4765 #if defined(TARGET_X86)
4771 INDEBUG(nop) // Mark the fact that we can call managed code
4776 FastCallFinalizeWorker(obj, funcPtr);
4778 #endif // TARGET_X86
4780 END_CALL_TO_MANAGED();
4783 #endif // TARGET_X86 || TARGET_AMD64
4785 void CallFinalizerOnThreadObject(Object *obj)
4787 STATIC_CONTRACT_MODE_COOPERATIVE;
4789 THREADBASEREF refThis = (THREADBASEREF)ObjectToOBJECTREF(obj);
4790 Thread* thread = refThis->GetInternal();
4792 // Prevent multiple calls to Finalize
4793 // Objects can be resurrected after being finalized. However, there is no
4794 // race condition here. We always check whether an exposed thread object is
4795 // still attached to the internal Thread object, before proceeding.
4798 refThis->ResetStartHelper();
4800 // During process shutdown, we finalize even reachable objects. But if we break
4801 // the link between the System.Thread and the internal Thread object, the runtime
4802 // may not work correctly. In particular, we won't be able to transition between
4803 // contexts and domains to finalize other objects. Since the runtime doesn't
4804 // require that Threads finalize during shutdown, we need to disable this. If
4805 // we wait until phase 2 of shutdown finalization (when the EE is suspended and
4806 // will never resume) then we can simply skip the side effects of Thread
4808 if ((g_fEEShutDown & ShutDown_Finalize2) == 0)
4810 if (GetThreadNULLOk() != thread)
4812 refThis->ClearInternal();
4815 thread->SetThreadState(Thread::TS_Finalized);
4816 Thread::SetCleanupNeededForFinalizedThread();
4821 //==========================================================================================
4822 // From the GC finalizer thread, invoke the Finalize() method on an object.
4823 void MethodTable::CallFinalizer(Object *obj)
4830 PRECONDITION(obj->GetMethodTable()->HasFinalizer());
4834 MethodTable *pMT = obj->GetMethodTable();
4836 // Check for precise init class constructors that have failed, if any have failed, then we didn't run the
4837 // constructor for the object, and running the finalizer for the object would violate the CLI spec by running
4838 // instance code without having successfully run the precise-init class constructor.
4839 if (pMT->HasPreciseInitCctors())
4841 MethodTable *pMTCur = pMT;
4844 if ((!pMTCur->GetClass()->IsBeforeFieldInit()) && pMTCur->IsInitError())
4846 // Precise init Type Initializer for type failed... do not run finalizer
4850 pMTCur = pMTCur->GetParentMethodTable();
4852 while (pMTCur != NULL);
4855 if (pMT == g_pThreadClass)
4857 // Finalizing Thread object requires ThreadStoreLock. It is expensive if
4858 // we keep taking ThreadStoreLock. This is very bad if we have high retiring
4859 // rate of Thread objects.
4860 // To avoid taking ThreadStoreLock multiple times, we mark Thread with TS_Finalized
4861 // and clean up a batch of them when we take ThreadStoreLock next time.
4863 // To avoid possible hierarchy requirement between critical finalizers, we call cleanup
4865 CallFinalizerOnThreadObject(obj);
4870 // Determine if the object has a critical or normal finalizer.
4871 BOOL fCriticalFinalizer = pMT->HasCriticalFinalizer();
4873 // There's no reason to actually set up a frame here. If we crawl out of the
4874 // Finalize() method on this thread, we will see FRAME_TOP which indicates
4875 // that the crawl should terminate. This is analogous to how KickOffThread()
4876 // starts new threads in the runtime.
4877 PCODE funcPtr = pMT->GetRestoredSlot(g_pObjectFinalizerMD->GetSlot());
4880 if (fCriticalFinalizer)
4882 STRESS_LOG1(LF_GCALLOC, LL_INFO100, "Finalizing CriticalFinalizer %pM\n",
4887 #if defined(TARGET_X86) || defined(TARGET_AMD64)
4889 #ifdef DEBUGGING_SUPPORTED
4890 if (CORDebuggerTraceCall())
4891 g_pDebugInterface->TraceCall((const BYTE *) funcPtr);
4892 #endif // DEBUGGING_SUPPORTED
4894 FastCallFinalize(obj, funcPtr, fCriticalFinalizer);
4896 #else // defined(TARGET_X86) || defined(TARGET_AMD64)
4898 PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(funcPtr);
4900 DECLARE_ARGHOLDER_ARRAY(args, 1);
4902 args[ARGNUM_0] = PTR_TO_ARGHOLDER(obj);
4904 if (fCriticalFinalizer)
4909 CALL_MANAGED_METHOD_NORET(args);
4911 #endif // (defined(TARGET_X86) && defined(TARGET_AMD64)
4914 if (fCriticalFinalizer)
4916 STRESS_LOG1(LF_GCALLOC, LL_INFO100, "Finalized CriticalFinalizer %pM without exception\n",
4922 //==========================================================================
4923 // If the MethodTable doesn't yet know the Exposed class that represents it via
4924 // Reflection, acquire that class now. Regardless, return it to the caller.
4925 //==========================================================================
4926 OBJECTREF MethodTable::GetManagedClassObject()
4928 CONTRACT(OBJECTREF) {
4933 INJECT_FAULT(COMPlusThrowOM());
4934 POSTCONDITION(GetWriteableData()->m_hExposedClassObject != 0);
4940 // Force a GC here because GetManagedClassObject could trigger GC nondeterministicaly
4941 GCStress<cfg_any, PulseGcTriggerPolicy>::MaybeTrigger();
4944 if (GetWriteableData()->m_hExposedClassObject == NULL)
4946 // Make sure that we have been restored
4948 TypeHandle(this).AllocateManagedClassObject(&GetWriteableDataForWrite()->m_hExposedClassObject);
4950 RETURN(GetManagedClassObjectIfExists());
4953 #endif //!DACCESS_COMPILE
4955 //==========================================================================================
4956 // This needs to stay consistent with AllocateNewMT() and MethodTable::Save()
4958 // <TODO> protect this via some asserts as we've had one hard-to-track-down
4959 // bug already </TODO>
4961 void MethodTable::GetSavedExtent(TADDR *pStart, TADDR *pEnd)
4972 if (ContainsPointers())
4973 start = dac_cast<TADDR>(this) - CGCDesc::GetCGCDescFromMT(this)->GetSize();
4975 start = dac_cast<TADDR>(this);
4977 TADDR end = dac_cast<TADDR>(this) + GetEndOffsetOfOptionalMembers();
4979 _ASSERTE(start && end && (start < end));
4984 //==========================================================================================
4985 void MethodTable::CheckRestore()
4989 if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS;
4990 if (FORBIDGC_LOADER_USE_ENABLED()) GC_NOTRIGGER; else GC_TRIGGERS;
4994 if (!IsFullyLoaded())
4996 ClassLoader::EnsureLoaded(this);
4997 _ASSERTE(IsFullyLoaded());
5001 #ifndef DACCESS_COMPILE
5003 BOOL SatisfiesClassConstraints(TypeHandle instanceTypeHnd, TypeHandle typicalTypeHnd,
5004 const InstantiationContext *pInstContext);
5006 static VOID DoAccessibilityCheck(MethodTable *pAskingMT, MethodTable *pTargetMT, UINT resIDWhy)
5015 AccessCheckContext accessContext(NULL, pAskingMT);
5017 if (!ClassLoader::CanAccessClass(&accessContext,
5018 pTargetMT, //the desired class
5019 pTargetMT->GetAssembly(), //the desired class's assembly
5020 *AccessCheckOptions::s_pNormalAccessChecks
5023 SString displayName;
5024 pAskingMT->GetAssembly()->GetDisplayName(displayName);
5027 // Error string is either E_ACCESSDENIED which requires the type name of the target, vs
5028 // a more normal TypeLoadException which displays the requesting type.
5029 _ASSERTE((resIDWhy == (UINT)E_ACCESSDENIED) || (resIDWhy == (UINT)IDS_CLASSLOAD_INTERFACE_NO_ACCESS));
5030 TypeString::AppendType(targetName, TypeHandle((resIDWhy == (UINT)E_ACCESSDENIED) ? pTargetMT : pAskingMT));
5032 COMPlusThrow(kTypeLoadException, resIDWhy, targetName.GetUnicode(), displayName.GetUnicode());
5037 VOID DoAccessibilityCheckForConstraint(MethodTable *pAskingMT, TypeHandle thConstraint, UINT resIDWhy)
5046 if (thConstraint.IsArray())
5048 DoAccessibilityCheckForConstraint(pAskingMT, thConstraint.GetArrayElementTypeHandle(), resIDWhy);
5050 else if (thConstraint.IsTypeDesc())
5052 TypeDesc *pTypeDesc = thConstraint.AsTypeDesc();
5054 if (pTypeDesc->IsGenericVariable())
5056 // since the metadata respresents a generic type param constraint as an index into
5057 // the declaring type's list of generic params, it is structurally impossible
5058 // to express a violation this way. So there's no check to be done here.
5061 if (pTypeDesc->HasTypeParam())
5063 DoAccessibilityCheckForConstraint(pAskingMT, pTypeDesc->GetTypeParam(), resIDWhy);
5067 COMPlusThrow(kTypeLoadException, E_ACCESSDENIED);
5073 DoAccessibilityCheck(pAskingMT, thConstraint.GetMethodTable(), resIDWhy);
5078 VOID DoAccessibilityCheckForConstraints(MethodTable *pAskingMT, TypeVarTypeDesc *pTyVar, UINT resIDWhy)
5087 DWORD numConstraints;
5088 TypeHandle *pthConstraints = pTyVar->GetCachedConstraints(&numConstraints);
5089 for (DWORD cidx = 0; cidx < numConstraints; cidx++)
5091 TypeHandle thConstraint = pthConstraints[cidx];
5093 DoAccessibilityCheckForConstraint(pAskingMT, thConstraint, resIDWhy);
5098 // Recursive worker that pumps the transitive closure of a type's dependencies to the specified target level.
5099 // Dependencies include:
5103 // - canonical type, for non-canonical instantiations
5104 // - typical type, for non-typical instantiations
5108 // pVisited - used to prevent endless recursion in the case of cyclic dependencies
5110 // level - target level to pump to - must be CLASS_DEPENDENCIES_LOADED or CLASS_LOADED
5112 // if CLASS_DEPENDENCIES_LOADED, all transitive dependencies are resolved to their
5115 // if CLASS_LOADED, all type-safety checks are done on the type and all its transitive
5116 // dependencies. Note that for the CLASS_LOADED case, some types may be left
5117 // on the pending list rather that pushed to CLASS_LOADED in the case of cyclic
5118 // dependencies - the root caller must handle this.
5120 // pfBailed - if we or one of our dependencies bails early due to cyclic dependencies, we
5121 // must set *pfBailed to TRUE. Otherwise, we must *leave it unchanged* (thus, the
5122 // boolean acts as a cumulative OR.)
5124 // pPending - if one of our dependencies bailed, the type cannot yet be promoted to CLASS_LOADED
5125 // as the dependencies will be checked later and may fail a security check then.
5126 // Instead, DoFullyLoad() will add the type to the pending list - the root caller
5127 // is responsible for promoting the type after the full transitive closure has been
5128 // walked. Note that it would be just as correct to always defer to the pending list -
5129 // however, that is a little less performant.
5133 // Closure of locals necessary for implementing CheckForEquivalenceAndFullyLoadType.
5134 // Used so that we can have one valuetype walking algorithm used for type equivalence walking of the parameters of the method.
5135 struct DoFullyLoadLocals
5137 DoFullyLoadLocals(DFLPendingList *pPendingParam, ClassLoadLevel levelParam, MethodTable *pMT, Generics::RecursionGraph *pVisited)
5138 : newVisited(pVisited, TypeHandle(pMT))
5139 , pPending(pPendingParam)
5142 #ifdef FEATURE_TYPEEQUIVALENCE
5143 , fHasEquivalentStructParameter(FALSE)
5146 LIMITED_METHOD_CONTRACT;
5149 Generics::RecursionGraph newVisited;
5150 DFLPendingList * const pPending;
5151 const ClassLoadLevel level;
5153 #ifdef FEATURE_TYPEEQUIVALENCE
5154 BOOL fHasEquivalentStructParameter;
5158 #if defined(FEATURE_TYPEEQUIVALENCE) && !defined(DACCESS_COMPILE)
5159 static void CheckForEquivalenceAndFullyLoadType(Module *pModule, mdToken token, Module *pDefModule, mdToken defToken, const SigParser *ptr, SigTypeContext *pTypeContext, void *pData)
5168 SigPointer sigPtr(*ptr);
5170 DoFullyLoadLocals *pLocals = (DoFullyLoadLocals *)pData;
5172 if (IsTypeDefEquivalent(defToken, pDefModule))
5174 TypeHandle th = sigPtr.GetTypeHandleThrowing(pModule, pTypeContext, ClassLoader::LoadTypes, (ClassLoadLevel)(pLocals->level - 1));
5175 CONSISTENCY_CHECK(!th.IsNull());
5177 th.DoFullyLoad(&pLocals->newVisited, pLocals->level, pLocals->pPending, &pLocals->fBailed, NULL);
5178 pLocals->fHasEquivalentStructParameter = TRUE;
5182 #endif // defined(FEATURE_TYPEEQUIVALENCE) && !defined(DACCESS_COMPILE)
5184 #endif //!DACCESS_COMPILE
5186 void MethodTable::DoFullyLoad(Generics::RecursionGraph * const pVisited, const ClassLoadLevel level, DFLPendingList * const pPending,
5187 BOOL * const pfBailed, const InstantiationContext * const pInstContext)
5189 STANDARD_VM_CONTRACT;
5191 _ASSERTE(level == CLASS_LOADED || level == CLASS_DEPENDENCIES_LOADED);
5192 _ASSERTE(pfBailed != NULL);
5193 _ASSERTE(!(level == CLASS_LOADED && pPending == NULL));
5196 #ifndef DACCESS_COMPILE
5198 if (Generics::RecursionGraph::HasSeenType(pVisited, TypeHandle(this)))
5204 if (GetLoadLevel() >= level)
5209 if (level == CLASS_LOADED)
5211 UINT numTH = pPending->Count();
5212 TypeHandle *pTypeHndPending = pPending->Table();
5213 for (UINT idxPending = 0; idxPending < numTH; idxPending++)
5215 if (pTypeHndPending[idxPending] == this)
5224 // First ensure that we're loaded to just below CLASS_DEPENDENCIES_LOADED
5225 ClassLoader::EnsureLoaded(this, (ClassLoadLevel) (level-1));
5227 CONSISTENCY_CHECK(IsRestored());
5228 CONSISTENCY_CHECK(!HasApproxParent());
5230 if ((level == CLASS_LOADED) && !IsSharedByGenericInstantiations())
5232 _ASSERTE(GetLoadLevel() >= CLASS_DEPENDENCIES_LOADED);
5233 ClassLoader::ValidateMethodsWithCovariantReturnTypes(this);
5238 Generics::RecursionGraph newVisited(pVisited, TypeHandle(this));
5240 // Fully load the element type
5241 GetArrayElementTypeHandle().DoFullyLoad(&newVisited, level, pPending, pfBailed, pInstContext);
5244 DoFullyLoadLocals locals(pPending, level, this, pVisited);
5246 bool fNeedsSanityChecks = true;
5248 #ifdef FEATURE_READYTORUN
5249 Module * pModule = GetModule();
5251 // No sanity checks for ready-to-run compiled images if possible
5252 if (pModule->IsSystem() || (pModule->IsReadyToRun() && pModule->GetReadyToRunInfo()->SkipTypeValidation()))
5253 fNeedsSanityChecks = false;
5256 bool fNeedAccessChecks = (level == CLASS_LOADED) &&
5257 fNeedsSanityChecks &&
5258 IsTypicalTypeDefinition();
5260 TypeHandle typicalTypeHnd;
5262 // Fully load the typical instantiation. Make sure that this is done before loading other dependencies
5263 // as the recursive generics detection algorithm needs to examine typical instantiations of the types
5265 if (!IsTypicalTypeDefinition())
5267 typicalTypeHnd = ClassLoader::LoadTypeDefThrowing(GetModule(), GetCl(),
5268 ClassLoader::ThrowIfNotFound, ClassLoader::PermitUninstDefOrRef, tdNoTypes,
5269 (ClassLoadLevel) (level - 1));
5270 CONSISTENCY_CHECK(!typicalTypeHnd.IsNull());
5271 typicalTypeHnd.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5273 else if (level == CLASS_DEPENDENCIES_LOADED && HasInstantiation())
5275 // This is a typical instantiation of a generic type. When attaining CLASS_DEPENDENCIES_LOADED, the
5276 // recursive inheritance graph (ECMA part.II Section 9.2) will be constructed and checked for "expanding
5277 // cycles" to detect infinite recursion, e.g. A<T> : B<A<A<T>>>.
5279 // The dependencies loaded by this method (parent type, implemented interfaces, generic arguments)
5280 // ensure that we will generate the finite instantiation closure as defined in ECMA. This load level
5281 // is not being attained under lock so it's not possible to use TypeVarTypeDesc to represent graph
5282 // nodes because multiple threads trying to fully load types from the closure at the same time would
5283 // interfere with each other. In addition, the graph is only used for loading and can be discarded
5284 // when the closure is fully loaded (TypeVarTypeDesc need to stay).
5286 // The graph is represented by Generics::RecursionGraph instances organized in a linked list with
5287 // each of them holding part of the graph. They live on the stack and are cleaned up automatically
5288 // before returning from DoFullyLoad.
5290 if (locals.newVisited.CheckForIllegalRecursion())
5292 // An expanding cycle was detected, this type is part of a closure that is defined recursively.
5293 IMDInternalImport* pInternalImport = GetModule()->GetMDImport();
5294 GetModule()->GetAssembly()->ThrowTypeLoadException(pInternalImport, GetCl(), IDS_CLASSLOAD_GENERICTYPE_RECURSIVE);
5298 // Fully load the parent
5299 MethodTable *pParentMT = GetParentMethodTable();
5303 pParentMT->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5305 if (fNeedAccessChecks)
5307 if (!IsComObjectType()) //RCW's are special - they are manufactured by the runtime and derive from the non-public type System.__ComObject
5309 // A transparent type should not be allowed to derive from a critical type.
5310 // However since this has never been enforced before we have many classes that
5311 // violate this rule. Enforcing it now will be a breaking change.
5312 DoAccessibilityCheck(this, pParentMT, E_ACCESSDENIED);
5317 // Fully load the interfaces
5318 MethodTable::InterfaceMapIterator it = IterateInterfaceMap();
5321 MethodTable* pItf = it.GetInterfaceApprox();
5322 pItf->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5324 if (fNeedAccessChecks)
5326 if (IsInterfaceDeclaredOnClass(it.GetIndex())) // only test directly implemented interfaces (it's
5327 // legal for an inherited interface to be private.)
5329 // A transparent type should not be allowed to implement a critical interface.
5330 // However since this has never been enforced before we have many classes that
5331 // violate this rule. Enforcing it now will be a breaking change.
5332 DoAccessibilityCheck(this, pItf, IDS_CLASSLOAD_INTERFACE_NO_ACCESS);
5337 // Fully load the generic arguments
5338 Instantiation inst = GetInstantiation();
5339 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
5341 inst[i].DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5344 // Fully load the canonical methodtable
5345 if (!IsCanonicalMethodTable())
5347 GetCanonicalMethodTable()->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, NULL);
5350 if (fNeedsSanityChecks)
5352 // Fully load the exact field types for value type fields
5353 // Note that MethodTableBuilder::InitializeFieldDescs() loads the type of the
5354 // field only upto level CLASS_LOAD_APPROXPARENTS.
5355 FieldDesc *pField = GetApproxFieldDescListRaw();
5356 FieldDesc *pFieldEnd = pField + GetNumStaticFields() + GetNumIntroducedInstanceFields();
5358 while (pField < pFieldEnd)
5360 if (pField->GetFieldType() == ELEMENT_TYPE_VALUETYPE)
5362 TypeHandle th = pField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level - 1));
5363 CONSISTENCY_CHECK(!th.IsNull());
5365 th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5367 if (fNeedAccessChecks)
5369 DoAccessibilityCheck(this, th.GetMethodTable(), E_ACCESSDENIED);
5376 // Fully load the exact field types for generic value type fields
5377 if (HasGenericsStaticsInfo())
5379 FieldDesc *pGenStaticField = GetGenericsStaticFieldDescs();
5380 FieldDesc *pGenStaticFieldEnd = pGenStaticField + GetNumStaticFields();
5381 while (pGenStaticField < pGenStaticFieldEnd)
5383 if (pGenStaticField->GetFieldType() == ELEMENT_TYPE_VALUETYPE)
5385 TypeHandle th = pGenStaticField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level - 1));
5386 CONSISTENCY_CHECK(!th.IsNull());
5388 th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5390 // The accessibility check is not necessary for generic fields. The generic fields are copy
5391 // of the regular fields, the only difference is that they have the exact type.
5398 // Fully load exact parameter types for value type parameters opted into equivalence. This is required in case GC is
5399 // triggered during prestub. GC needs to know where references are on the stack and if the parameter (as read from
5400 // the method signature) is a structure, it relies on the loaded type to get the layout information from. For ordinary
5401 // structures we are guaranteed to have loaded the type before entering prestub - the caller must have loaded it.
5402 // However due to type equivalence, the caller may work with a different type than what's in the method signature.
5404 // We deal with situation by eagerly loading types that may cause these problems, i.e. value types in signatures of
5405 // methods introduced by this type. To avoid the perf hit for scenarios without type equivalence, we only preload
5406 // structures that marked as type equivalent. In the no-PIA world
5407 // these structures are called "local types" and are usually generated automatically by the compiler. Note that there
5408 // is a related logic in code:CompareTypeDefsForEquivalence that declares two tokens corresponding to structures as
5409 // equivalent based on an extensive set of equivalency checks.
5411 #ifdef FEATURE_TYPEEQUIVALENCE
5412 if ((level == CLASS_LOADED)
5413 && (GetCl() != mdTypeDefNil)
5414 && !ContainsGenericVariables())
5416 MethodTable::IntroducedMethodIterator itMethods(this, FALSE);
5417 for (; itMethods.IsValid(); itMethods.Next())
5419 MethodDesc * pMD = itMethods.GetMethodDesc();
5420 if (!pMD->DoesNotHaveEquivalentValuetypeParameters() && pMD->IsVirtual())
5422 locals.fHasEquivalentStructParameter = FALSE;
5423 pMD->WalkValueTypeParameters(this, CheckForEquivalenceAndFullyLoadType, &locals);
5424 if (!locals.fHasEquivalentStructParameter)
5425 pMD->SetDoesNotHaveEquivalentValuetypeParameters();
5429 #endif //FEATURE_TYPEEQUIVALENCE
5431 // The rules for constraint cycles are same as rules for access checks
5432 if (fNeedAccessChecks)
5434 // Check for cyclical class constraints
5436 Instantiation formalParams = GetInstantiation();
5438 for (DWORD i = 0; i < formalParams.GetNumArgs(); i++)
5440 BOOL Bounded(TypeVarTypeDesc *tyvar, DWORD depth);
5442 TypeVarTypeDesc *pTyVar = formalParams[i].AsGenericVariable();
5443 pTyVar->LoadConstraints(CLASS_DEPENDENCIES_LOADED);
5444 if (!Bounded(pTyVar, formalParams.GetNumArgs()))
5446 COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_VAR_CONSTRAINTS);
5449 DoAccessibilityCheckForConstraints(this, pTyVar, E_ACCESSDENIED);
5453 // Check for cyclical method constraints
5455 if (GetCl() != mdTypeDefNil) // Make sure this is actually a metadata type!
5457 MethodTable::IntroducedMethodIterator itMethods(this, FALSE);
5458 for (; itMethods.IsValid(); itMethods.Next())
5460 MethodDesc * pMD = itMethods.GetMethodDesc();
5462 if (pMD->IsGenericMethodDefinition() && pMD->IsTypicalMethodDefinition())
5464 BOOL fHasCircularClassConstraints = TRUE;
5465 BOOL fHasCircularMethodConstraints = TRUE;
5467 pMD->LoadConstraintsForTypicalMethodDefinition(&fHasCircularClassConstraints, &fHasCircularMethodConstraints, CLASS_DEPENDENCIES_LOADED);
5469 if (fHasCircularClassConstraints)
5471 COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_VAR_CONSTRAINTS);
5473 if (fHasCircularMethodConstraints)
5475 COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_MVAR_CONSTRAINTS);
5486 if (LoggingOn(LF_CLASSLOADER, LL_INFO10000))
5489 TypeString::AppendTypeDebug(name, this);
5490 LOG((LF_CLASSLOADER, LL_INFO10000, "PHASEDLOAD: Completed full dependency load of type %s\n", name.GetUTF8()));
5496 case CLASS_DEPENDENCIES_LOADED:
5497 SetIsDependenciesLoaded();
5501 if (!IsTypicalTypeDefinition() &&
5502 !IsSharedByGenericInstantiations())
5504 TypeHandle thThis = TypeHandle(this);
5506 // If we got here, we about to mark a generic instantiation as fully loaded. Before we do so,
5507 // check to see if has constraints that aren't being satisfied.
5508 SatisfiesClassConstraints(thThis, typicalTypeHnd, pInstContext);
5512 // Validate implementation of virtual static methods on all implemented interfaces unless:
5513 // 1) The type resides in a module where sanity checks are disabled (such as System.Private.CoreLib, or an
5514 // R2R module with type checks disabled)
5515 // 2) There are no virtual static methods defined on any of the interfaces implemented by this type;
5516 // 3) The type is abstract in which case it's allowed to leave some virtual static methods unimplemented
5517 // akin to equivalent behavior of virtual instance method overriding in abstract classes;
5518 // 4) The type is a not the typical type definition. (The typical type is always checked)
5520 if (fNeedsSanityChecks &&
5521 IsTypicalTypeDefinition() &&
5524 if (HasVirtualStaticMethods())
5525 VerifyThatAllVirtualStaticMethodsAreImplemented();
5530 // We couldn't complete security checks on some dependency because it is already being processed by one of our callers.
5531 // Do not mark this class fully loaded yet. Put it on the pending list and it will be marked fully loaded when
5532 // everything unwinds.
5536 TypeHandle *pTHPending = pPending->AppendThrowing();
5537 *pTHPending = TypeHandle(this);
5541 // Finally, mark this method table as fully loaded
5547 _ASSERTE(!"Can't get here.");
5551 #endif //!DACCESS_COMPILE
5552 } //MethodTable::DoFullyLoad
5555 #ifndef DACCESS_COMPILE
5557 #ifdef FEATURE_COMINTEROP
5559 //==========================================================================================
5560 BOOL MethodTable::IsExtensibleRCW()
5562 WRAPPER_NO_CONTRACT;
5563 _ASSERTE(GetClass());
5564 return IsComObjectType() && !GetClass()->IsComImport();
5567 //==========================================================================================
5568 OBJECTHANDLE MethodTable::GetOHDelegate()
5570 WRAPPER_NO_CONTRACT;
5571 _ASSERTE(GetClass());
5572 return GetClass()->GetOHDelegate();
5575 //==========================================================================================
5576 void MethodTable::SetOHDelegate (OBJECTHANDLE _ohDelegate)
5578 LIMITED_METHOD_CONTRACT;
5579 _ASSERTE(GetClass());
5580 GetClass()->SetOHDelegate(_ohDelegate);
5583 //==========================================================================================
5584 // Helper to skip over COM class in the hierarchy
5585 MethodTable* MethodTable::GetComPlusParentMethodTable()
5595 MethodTable* pParent = GetParentMethodTable();
5597 if (pParent && pParent->IsComImport())
5599 // Skip the single ComImport class we expect
5600 _ASSERTE(pParent->GetParentMethodTable() != NULL);
5601 pParent = pParent->GetParentMethodTable();
5602 _ASSERTE(!pParent->IsComImport());
5604 // Skip over System.__ComObject, expect System.MarshalByRefObject
5605 pParent=pParent->GetParentMethodTable();
5606 _ASSERTE(pParent != NULL);
5607 _ASSERTE(pParent->GetParentMethodTable() != NULL);
5608 _ASSERTE(pParent->GetParentMethodTable() == g_pObjectClass);
5614 #endif // FEATURE_COMINTEROP
5616 #endif // !DACCESS_COMPILE
5618 //==========================================================================================
5619 // Return a pointer to the dictionary for an instantiated type
5620 // Return NULL if not instantiated
5621 PTR_Dictionary MethodTable::GetDictionary()
5623 LIMITED_METHOD_DAC_CONTRACT;
5625 if (HasInstantiation())
5627 // The instantiation for this class is stored in the type slots table
5628 // *after* any inherited slots
5629 return GetPerInstInfo()[GetNumDicts()-1];
5637 //==========================================================================================
5638 // As above, but assert if an instantiated type is not restored
5639 Instantiation MethodTable::GetInstantiation()
5641 LIMITED_METHOD_CONTRACT;
5643 if (HasInstantiation())
5645 PTR_GenericsDictInfo pDictInfo = GetGenericsDictInfo();
5646 return Instantiation(GetPerInstInfo()[pDictInfo->m_wNumDicts-1]->GetInstantiation(), pDictInfo->m_wNumTyPars);
5650 return Instantiation();
5654 //==========================================================================================
5655 // Obtain instantiation from an instantiated type or a pointer to the
5656 // element type of an array
5657 Instantiation MethodTable::GetClassOrArrayInstantiation()
5659 LIMITED_METHOD_CONTRACT;
5662 return GetArrayInstantiation();
5665 return GetInstantiation();
5669 //==========================================================================================
5670 Instantiation MethodTable::GetArrayInstantiation()
5672 LIMITED_METHOD_CONTRACT;
5674 _ASSERTE(IsArray());
5675 return Instantiation((TypeHandle *)&m_ElementTypeHnd, 1);
5678 //==========================================================================================
5679 CorElementType MethodTable::GetInternalCorElementType()
5681 LIMITED_METHOD_CONTRACT;
5684 // This should not touch the EEClass, at least not in the
5685 // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE.
5688 switch (GetFlag(enum_flag_Category_ElementTypeMask))
5690 case enum_flag_Category_Array:
5691 ret = ELEMENT_TYPE_ARRAY;
5694 case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray:
5695 ret = ELEMENT_TYPE_SZARRAY;
5698 case enum_flag_Category_ValueType:
5699 ret = ELEMENT_TYPE_VALUETYPE;
5702 case enum_flag_Category_PrimitiveValueType:
5703 // This path should only be taken for the builtin CoreLib types
5704 // and primitive valuetypes
5705 ret = GetClass()->GetInternalCorElementType();
5706 _ASSERTE((ret != ELEMENT_TYPE_CLASS) &&
5707 (ret != ELEMENT_TYPE_VALUETYPE));
5711 ret = ELEMENT_TYPE_CLASS;
5715 // DAC may be targeting a dump; dumps do not guarantee you can retrieve the EEClass from
5716 // the MethodTable so this is not expected to work in a DAC build.
5717 #if defined(_DEBUG) && !defined(DACCESS_COMPILE)
5720 PTR_EEClass pClass = GetClass();
5721 if (ret != pClass->GetInternalCorElementType())
5723 _ASSERTE(!"Mismatched results in MethodTable::GetInternalCorElementType");
5726 #endif // defined(_DEBUG) && !defined(DACCESS_COMPILE)
5730 //==========================================================================================
5731 CorElementType MethodTable::GetVerifierCorElementType()
5733 LIMITED_METHOD_CONTRACT;
5736 // This should not touch the EEClass, at least not in the
5737 // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE.
5740 switch (GetFlag(enum_flag_Category_ElementTypeMask))
5742 case enum_flag_Category_Array:
5743 ret = ELEMENT_TYPE_ARRAY;
5746 case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray:
5747 ret = ELEMENT_TYPE_SZARRAY;
5750 case enum_flag_Category_ValueType:
5751 ret = ELEMENT_TYPE_VALUETYPE;
5754 case enum_flag_Category_PrimitiveValueType:
5756 // This is the only difference from MethodTable::GetInternalCorElementType()
5758 if (IsTruePrimitive() || IsEnum())
5759 ret = GetClass()->GetInternalCorElementType();
5761 ret = ELEMENT_TYPE_VALUETYPE;
5765 ret = ELEMENT_TYPE_CLASS;
5772 //==========================================================================================
5773 CorElementType MethodTable::GetSignatureCorElementType()
5775 LIMITED_METHOD_CONTRACT;
5778 // This should not touch the EEClass, at least not in the
5779 // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE.
5782 switch (GetFlag(enum_flag_Category_ElementTypeMask))
5784 case enum_flag_Category_Array:
5785 ret = ELEMENT_TYPE_ARRAY;
5788 case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray:
5789 ret = ELEMENT_TYPE_SZARRAY;
5792 case enum_flag_Category_ValueType:
5793 ret = ELEMENT_TYPE_VALUETYPE;
5796 case enum_flag_Category_PrimitiveValueType:
5798 // This is the only difference from MethodTable::GetInternalCorElementType()
5800 if (IsTruePrimitive())
5801 ret = GetClass()->GetInternalCorElementType();
5803 ret = ELEMENT_TYPE_VALUETYPE;
5807 ret = ELEMENT_TYPE_CLASS;
5814 #ifndef DACCESS_COMPILE
5816 //==========================================================================================
5817 void MethodTable::SetInternalCorElementType (CorElementType _NormType)
5819 WRAPPER_NO_CONTRACT;
5823 case ELEMENT_TYPE_CLASS:
5824 _ASSERTE(!IsArray());
5827 case ELEMENT_TYPE_VALUETYPE:
5828 SetFlag(enum_flag_Category_ValueType);
5829 _ASSERTE(GetFlag(enum_flag_Category_Mask) == enum_flag_Category_ValueType);
5832 SetFlag(enum_flag_Category_PrimitiveValueType);
5833 _ASSERTE(GetFlag(enum_flag_Category_Mask) == enum_flag_Category_PrimitiveValueType);
5837 GetClass()->SetInternalCorElementType(_NormType);
5838 _ASSERTE(GetInternalCorElementType() == _NormType);
5841 #endif // !DACCESS_COMPILE
5843 #ifdef FEATURE_TYPEEQUIVALENCE
5844 #ifndef DACCESS_COMPILE
5846 WORD GetEquivalentMethodSlot(MethodTable * pOldMT, MethodTable * pNewMT, WORD wMTslot, BOOL *pfFound)
5855 WORD wVTslot = wMTslot;
5857 #ifdef FEATURE_COMINTEROP
5858 // Get the COM vtable slot corresponding to the given MT slot
5859 if (pOldMT->IsSparseForCOMInterop())
5860 wVTslot = pOldMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(wMTslot);
5862 // If the other MT is not sparse, we can return the COM slot directly
5863 if (!pNewMT->IsSparseForCOMInterop())
5865 if (wVTslot < pNewMT->GetNumVirtuals())
5871 // Otherwise we iterate over all virtuals in the other MT trying to find a match
5872 for (WORD wSlot = 0; wSlot < pNewMT->GetNumVirtuals(); wSlot++)
5874 if (wVTslot == pNewMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(wSlot))
5881 _ASSERTE(!*pfFound);
5885 // No COM means there is no sparse interface
5886 if (wVTslot < pNewMT->GetNumVirtuals())
5891 #endif // FEATURE_COMINTEROP
5893 #endif // #ifdef DACCESS_COMPILE
5894 #endif // #ifdef FEATURE_TYPEEQUIVALENCE
5896 //==========================================================================================
5898 MethodTable::FindEncodedMapDispatchEntry(
5901 DispatchMapEntry * pEntry)
5904 // NOTE: LookupDispatchMapType may or may not throw. Currently, it
5905 // should never throw because lazy interface restore is disabled.
5909 PRECONDITION(CheckPointer(pEntry));
5910 PRECONDITION(typeID != TYPE_ID_THIS_CLASS);
5913 CONSISTENCY_CHECK(HasDispatchMap());
5915 MethodTable * dispatchTokenType = GetThread()->GetDomain()->LookupType(typeID);
5917 // Search for an exact type match.
5919 DispatchMap::EncodedMapIterator it(this);
5920 for (; it.IsValid(); it.Next())
5922 DispatchMapEntry * pCurEntry = it.Entry();
5923 if (pCurEntry->GetSlotNumber() == slotNumber)
5925 if (DispatchMapTypeMatchesMethodTable(pCurEntry->GetTypeID(), dispatchTokenType))
5927 *pEntry = *pCurEntry;
5934 // Repeat the search if any variance is involved, allowing a CanCastTo match. (We do
5935 // this in a separate pass because we want to avoid touching the type
5936 // to see if it has variance or not)
5938 // NOTE: CERs are not guaranteed for interfaces with co- and contra-variance involved.
5939 if (dispatchTokenType->HasVariance() || dispatchTokenType->HasTypeEquivalence())
5941 DispatchMap::EncodedMapIterator it(this);
5942 for (; it.IsValid(); it.Next())
5944 DispatchMapEntry * pCurEntry = it.Entry();
5945 if (pCurEntry->GetSlotNumber() == slotNumber)
5947 #ifndef DACCESS_COMPILE
5948 MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID());
5949 //@TODO: This is currently not guaranteed to work without throwing,
5950 //@TODO: even with lazy interface restore disabled.
5951 if (dispatchTokenType->HasVariance() &&
5952 pCurEntryType->CanCastByVarianceToInterfaceOrDelegate(dispatchTokenType, NULL))
5954 *pEntry = *pCurEntry;
5958 if (dispatchTokenType->HasInstantiation() && dispatchTokenType->HasTypeEquivalence())
5960 if (dispatchTokenType->IsEquivalentTo(pCurEntryType))
5962 *pEntry = *pCurEntry;
5966 #endif // !DACCESS_COMPILE
5968 #if !defined(DACCESS_COMPILE) && defined(FEATURE_TYPEEQUIVALENCE)
5969 if (this->HasTypeEquivalence() &&
5970 !dispatchTokenType->HasInstantiation() &&
5971 dispatchTokenType->HasTypeEquivalence() &&
5972 dispatchTokenType->GetClass()->IsEquivalentType())
5974 _ASSERTE(dispatchTokenType->IsInterface());
5975 MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID());
5977 if (pCurEntryType->IsEquivalentTo(dispatchTokenType))
5979 MethodDesc * pMD = dispatchTokenType->GetMethodDescForSlot(slotNumber);
5980 _ASSERTE(FitsIn<WORD>(slotNumber));
5981 BOOL fNewSlotFound = FALSE;
5982 DWORD newSlot = GetEquivalentMethodSlot(
5985 static_cast<WORD>(slotNumber),
5987 if (fNewSlotFound && (newSlot == pCurEntry->GetSlotNumber()))
5989 MethodDesc * pNewMD = pCurEntryType->GetMethodDescForSlot(newSlot);
5992 MetaSig msignew(pNewMD);
5994 if (MetaSig::CompareMethodSigs(msig, msignew, FALSE))
5996 *pEntry = *pCurEntry;
6006 } // MethodTable::FindEncodedMapDispatchEntry
6008 //==========================================================================================
6009 BOOL MethodTable::FindDispatchEntryForCurrentType(UINT32 typeID,
6011 DispatchMapEntry *pEntry)
6017 PRECONDITION(CheckPointer(pEntry));
6018 PRECONDITION(typeID != TYPE_ID_THIS_CLASS);
6023 if (HasDispatchMap())
6025 fRes = FindEncodedMapDispatchEntry(
6026 typeID, slotNumber, pEntry);
6032 //==========================================================================================
6033 BOOL MethodTable::FindDispatchEntry(UINT32 typeID,
6035 DispatchMapEntry *pEntry)
6042 POSTCONDITION(!RETVAL || pEntry->IsValid());
6043 PRECONDITION(typeID != TYPE_ID_THIS_CLASS);
6046 // Start at the current type and work up the inheritance chain
6047 MethodTable *pCurMT = this;
6048 UINT32 iCurInheritanceChainDelta = 0;
6049 while (pCurMT != NULL)
6051 if (pCurMT->FindDispatchEntryForCurrentType(
6052 typeID, slotNumber, pEntry))
6056 pCurMT = pCurMT->GetParentMethodTable();
6057 iCurInheritanceChainDelta++;
6062 #ifndef DACCESS_COMPILE
6064 void ThrowExceptionForAbstractOverride(
6065 MethodTable *pTargetClass,
6066 MethodTable *pInterfaceMT,
6067 MethodDesc *pInterfaceMD)
6069 LIMITED_METHOD_CONTRACT;
6071 SString assemblyName;
6073 pTargetClass->GetAssembly()->GetDisplayName(assemblyName);
6075 SString strInterfaceName;
6076 TypeString::AppendType(strInterfaceName, TypeHandle(pInterfaceMT));
6078 SString strMethodName;
6079 TypeString::AppendMethod(strMethodName, pInterfaceMD, pInterfaceMD->GetMethodInstantiation());
6081 SString strTargetClassName;
6082 TypeString::AppendType(strTargetClassName, pTargetClass);
6085 kEntryPointNotFoundException,
6086 IDS_CLASSLOAD_METHOD_NOT_IMPLEMENTED,
6093 #endif // !DACCESS_COMPILE
6095 //==========================================================================================
6097 // 1. Typed (interface) contract
6098 // a. To non-virtual implementation (NYI). Just
6099 // return the DispatchSlot as the implementation
6100 // b. Mapped virtually to virtual slot on 'this'. Need to
6101 // further resolve the new 'this' virtual slot.
6102 // 2. 'this' contract
6103 // a. To non-virtual implementation. Return the DispatchSlot
6104 // as the implementation.
6105 // b. Mapped virtually to another virtual slot. Need to further
6106 // resolve the new slot on 'this'.
6108 MethodTable::FindDispatchImpl(
6111 DispatchSlot * pImplSlot,
6112 BOOL throwOnConflict)
6119 PRECONDITION(CheckPointer(pImplSlot));
6120 POSTCONDITION(!RETVAL || !pImplSlot->IsNull() || IsComObjectType());
6123 LOG((LF_LOADER, LL_INFO10000, "SD: MT::FindDispatchImpl: searching %s.\n", GetClass()->GetDebugClassName()));
6125 ///////////////////////////////////
6126 // 1. Typed (interface) contract
6128 INDEBUG(MethodTable *dbg_pMTTok = NULL; dbg_pMTTok = this;)
6129 DispatchMapEntry declEntry;
6130 DispatchMapEntry implEntry;
6132 #ifndef DACCESS_COMPILE
6133 if (typeID != TYPE_ID_THIS_CLASS)
6135 INDEBUG(dbg_pMTTok = GetThread()->GetDomain()->LookupType(typeID));
6137 if (!FindDispatchEntry(typeID, slotNumber, &e))
6139 // Figure out the interface being called
6140 MethodTable *pIfcMT = GetThread()->GetDomain()->LookupType(typeID);
6142 // Figure out which method of the interface the caller requested.
6143 MethodDesc * pIfcMD = pIfcMT->GetMethodDescForSlot(slotNumber);
6145 // A call to an array thru IList<T> (or IEnumerable<T> or ICollection<T>) has to be handled specially.
6146 // These interfaces are "magic" (mostly due to working set concerned - they are created on demand internally
6147 // even though semantically, these are static interfaces.)
6149 // NOTE: CERs are not currently supported with generic array interfaces.
6152 // At this, we know that we're trying to cast an array to an interface and that the normal static lookup failed.
6154 // FindDispatchImpl assumes that the cast is legal so we should be able to assume now that it is a valid
6155 // IList<T> call thru an array.
6157 // Get the MT of IList<T> or IReadOnlyList<T>
6160 // Quick sanity check
6161 if (!(pIfcMT->HasInstantiation()))
6163 _ASSERTE(!"Should not have gotten here. If you did, it's probably because multiple interface instantiation hasn't been checked in yet. This code only works on top of that.");
6167 // Get the type of T (as in IList<T>)
6168 TypeHandle theT = pIfcMT->GetInstantiation()[0];
6170 // Retrieve the corresponding method of SZArrayHelper. This is what will actually execute.
6171 // This method will be an instantiation of a generic method. I.e. if the caller requested
6172 // IList<T>.Meth(), he will actually be diverted to SZArrayHelper.Meth<T>().
6173 MethodDesc * pActualImplementor = GetActualImplementationForArrayGenericIListOrIReadOnlyListMethod(pIfcMD, theT);
6175 // Now, construct a DispatchSlot to return in *pImplSlot
6176 DispatchSlot ds(pActualImplementor->GetMethodEntryPoint());
6178 if (pImplSlot != NULL)
6189 // See if we can find a default method from one of the implemented interfaces
6192 // Try exact match first
6193 MethodDesc *pDefaultMethod = NULL;
6195 FindDefaultInterfaceImplementationFlags flags = FindDefaultInterfaceImplementationFlags::InstantiateFoundMethodDesc;
6196 if (throwOnConflict)
6197 flags = flags | FindDefaultInterfaceImplementationFlags::ThrowOnConflict;
6199 BOOL foundDefaultInterfaceImplementation = FindDefaultInterfaceImplementation(
6200 pIfcMD, // the interface method being resolved
6201 pIfcMT, // the interface being resolved
6205 // If there's no exact match, try a variant match
6206 if (!foundDefaultInterfaceImplementation && pIfcMT->HasVariance())
6208 flags = flags | FindDefaultInterfaceImplementationFlags::AllowVariance;
6209 foundDefaultInterfaceImplementation = FindDefaultInterfaceImplementation(
6210 pIfcMD, // the interface method being resolved
6211 pIfcMT, // the interface being resolved
6216 if (foundDefaultInterfaceImplementation)
6219 // If the default implementation we found is abstract, we hit a reabstraction.
6221 // interface IFoo { void Frob() { ... } }
6222 // interface IBar { abstract void IFoo.Frob() }
6223 // class Foo : IBar { /* IFoo.Frob not implemented here */ }
6225 if (pDefaultMethod->IsAbstract())
6227 if (throwOnConflict)
6229 ThrowExceptionForAbstractOverride(this, pIfcMT, pIfcMD);
6234 // Now, construct a DispatchSlot to return in *pImplSlot
6235 DispatchSlot ds(pDefaultMethod->GetMethodEntryPoint());
6237 if (pImplSlot != NULL)
6247 // This contract is not implemented by this class or any parent class.
6252 /////////////////////////////////
6253 // 1.1. Update the typeID and slotNumber so that the full search can commense below
6254 typeID = TYPE_ID_THIS_CLASS;
6255 slotNumber = e.GetTargetSlotNumber();
6257 #endif // !DACCESS_COMPILE
6259 //////////////////////////////////
6260 // 2. 'this' contract
6262 // Just grab the target out of the vtable
6263 *pImplSlot = GetRestoredSlot(slotNumber);
6265 // Successfully determined the target for the given target
6269 #ifndef DACCESS_COMPILE
6271 struct MatchCandidate
6277 void ThrowExceptionForConflictingOverride(
6278 MethodTable *pTargetClass,
6279 MethodTable *pInterfaceMT,
6280 MethodDesc *pInterfaceMD)
6282 LIMITED_METHOD_CONTRACT;
6284 SString assemblyName;
6286 pTargetClass->GetAssembly()->GetDisplayName(assemblyName);
6288 SString strInterfaceName;
6289 TypeString::AppendType(strInterfaceName, TypeHandle(pInterfaceMT));
6291 SString strMethodName;
6292 TypeString::AppendMethod(strMethodName, pInterfaceMD, pInterfaceMD->GetMethodInstantiation());
6294 SString strTargetClassName;
6295 TypeString::AppendType(strTargetClassName, pTargetClass);
6298 kAmbiguousImplementationException,
6299 IDS_CLASSLOAD_AMBIGUOUS_OVERRIDE,
6308 bool TryGetCandidateImplementation(
6310 MethodDesc *interfaceMD,
6311 MethodTable *interfaceMT,
6312 FindDefaultInterfaceImplementationFlags findDefaultImplementationFlags,
6313 MethodDesc **candidateMD,
6314 ClassLoadLevel level)
6316 bool allowVariance = (findDefaultImplementationFlags & FindDefaultInterfaceImplementationFlags::AllowVariance) != FindDefaultInterfaceImplementationFlags::None;
6317 bool instantiateMethodInstantiation = (findDefaultImplementationFlags & FindDefaultInterfaceImplementationFlags::InstantiateFoundMethodDesc) != FindDefaultInterfaceImplementationFlags::None;
6319 *candidateMD = NULL;
6321 MethodDesc *candidateMaybe = NULL;
6322 if (pMT == interfaceMT)
6324 if (!interfaceMD->IsAbstract())
6327 candidateMaybe = interfaceMD;
6330 else if (pMT->CanCastToInterface(interfaceMT))
6332 if (pMT->HasSameTypeDefAs(interfaceMT))
6334 if (allowVariance && !interfaceMD->IsAbstract())
6336 // Generic variance match - we'll instantiate pCurMD with the right type arguments later
6337 candidateMaybe = interfaceMD;
6340 else if (!interfaceMD->IsStatic())
6343 // A more specific interface - search for an methodimpl for explicit override
6344 // Implicit override in default interface methods are not allowed
6346 MethodTable::MethodIterator methodIt(pMT);
6347 for (; methodIt.IsValid() && candidateMaybe == NULL; methodIt.Next())
6349 MethodDesc *pMD = methodIt.GetMethodDesc();
6350 int targetSlot = interfaceMD->GetSlot();
6352 if (pMD->IsMethodImpl())
6354 // We have a MethodImpl with slots - iterate over all the declarations it's implementing,
6355 // looking for the interface method we need.
6356 MethodImpl::Iterator it(pMD);
6357 for (; it.IsValid() && candidateMaybe == NULL; it.Next())
6359 MethodDesc *pDeclMD = it.GetMethodDesc();
6361 // Is this the right slot?
6362 if (pDeclMD->GetSlot() != targetSlot)
6365 // Is this the right interface?
6366 if (!pDeclMD->HasSameMethodDefAs(interfaceMD))
6369 if (interfaceMD->HasClassInstantiation())
6371 // pInterfaceMD will be in the canonical form, so we need to check the specific
6372 // instantiation against pInterfaceMT.
6374 // The parent of pDeclMD is unreliable for this purpose because it may or
6375 // may not be canonicalized. Let's go from the metadata.
6377 SigTypeContext typeContext = SigTypeContext(pMT);
6380 IfFailThrow(pMD->GetModule()->GetMDImport()->GetParentToken(it.GetToken(), &tkParent));
6382 MethodTable *pDeclMT = ClassLoader::LoadTypeDefOrRefOrSpecThrowing(
6385 &typeContext).AsMethodTable();
6387 // We do CanCastToInterface to also cover variance.
6388 // We already know this is a method on the same type definition as the (generic)
6389 // interface but we need to make sure the instantiations match.
6390 if ((allowVariance && pDeclMT->CanCastToInterface(interfaceMT))
6391 || pDeclMT == interfaceMT)
6394 candidateMaybe = pMD;
6399 // No generics involved. If the method definitions match, it's a match.
6400 candidateMaybe = pMD;
6408 // Static virtual methods don't record MethodImpl slots so they need special handling
6409 ResolveVirtualStaticMethodFlags resolveVirtualStaticMethodFlags = ResolveVirtualStaticMethodFlags::None;
6412 resolveVirtualStaticMethodFlags |= ResolveVirtualStaticMethodFlags::AllowVariantMatches;
6414 if (instantiateMethodInstantiation)
6416 resolveVirtualStaticMethodFlags |= ResolveVirtualStaticMethodFlags::InstantiateResultOverFinalMethodDesc;
6419 candidateMaybe = pMT->TryResolveVirtualStaticMethodOnThisType(
6422 resolveVirtualStaticMethodFlags,
6427 if (candidateMaybe == NULL)
6430 if (candidateMaybe->HasClassOrMethodInstantiation())
6432 // Instantiate the MethodDesc
6433 // We don't want generic dictionary from this pointer - we need pass secret type argument
6434 // from instantiating stubs to resolve ambiguity
6435 candidateMaybe = MethodDesc::FindOrCreateAssociatedMethodDesc(
6438 FALSE, // forceBoxedEntryPoint
6439 candidateMaybe->HasMethodInstantiation() ?
6440 candidateMaybe->AsInstantiatedMethodDesc()->IMD_GetMethodInstantiation() :
6441 Instantiation(), // for method themselves that are generic
6442 FALSE, // allowInstParam
6443 TRUE, // forceRemoteableMethod
6444 TRUE, // allowCreate
6449 *candidateMD = candidateMaybe;
6454 // Find the default interface implementation method for interface dispatch
6455 // It is either the interface method with default interface method implementation,
6456 // or an most specific interface with an explicit methodimpl overriding the method
6457 BOOL MethodTable::FindDefaultInterfaceImplementation(
6458 MethodDesc *pInterfaceMD,
6459 MethodTable *pInterfaceMT,
6460 MethodDesc **ppDefaultMethod,
6461 FindDefaultInterfaceImplementationFlags findDefaultImplementationFlags,
6462 ClassLoadLevel level
6470 PRECONDITION(CheckPointer(pInterfaceMD));
6471 PRECONDITION(CheckPointer(pInterfaceMT));
6472 PRECONDITION(CheckPointer(ppDefaultMethod));
6473 POSTCONDITION(!RETVAL || (*ppDefaultMethod) != nullptr);
6476 #ifdef FEATURE_DEFAULT_INTERFACES
6477 bool allowVariance = (findDefaultImplementationFlags & FindDefaultInterfaceImplementationFlags::AllowVariance) != FindDefaultInterfaceImplementationFlags::None;
6478 CQuickArray<MatchCandidate> candidates;
6479 unsigned candidatesCount = 0;
6481 // Check the current method table itself
6482 MethodDesc *candidateMaybe = NULL;
6483 if (IsInterface() && TryGetCandidateImplementation(this, pInterfaceMD, pInterfaceMT, findDefaultImplementationFlags, &candidateMaybe, level))
6485 _ASSERTE(candidateMaybe != NULL);
6487 candidates.AllocThrows(this->GetNumInterfaces() + 1);
6488 candidates[candidatesCount].pMT = this;
6489 candidates[candidatesCount].pMD = candidateMaybe;
6494 candidates.AllocThrows(this->GetNumInterfaces());
6498 // Walk interface from derived class to parent class
6499 // We went with a straight-forward implementation as in most cases the number of interfaces are small
6500 // and the result of the interface dispatch are already cached. If there are significant usage of default
6501 // interface methods in highly complex interface hierarchies we can revisit this
6503 MethodTable *pMT = this;
6507 MethodTable *pParentMT = pMT->GetParentMethodTable();
6508 unsigned dwParentInterfaces = 0;
6510 dwParentInterfaces = pParentMT->GetNumInterfaces();
6512 // Scanning only current class only if the current class have more interface than parent
6513 // (parent interface are laid out first in interface map)
6514 if (pMT->GetNumInterfaces() > dwParentInterfaces)
6516 // Only iterate the interfaceimpls on current class
6517 MethodTable::InterfaceMapIterator it = pMT->IterateInterfaceMapFrom(dwParentInterfaces);
6518 while (!it.Finished())
6520 MethodTable *pCurMT = it.GetInterface(pMT, level);
6522 MethodDesc *pCurMD = NULL;
6523 if (TryGetCandidateImplementation(pCurMT, pInterfaceMD, pInterfaceMT, findDefaultImplementationFlags, &pCurMD, level))
6526 // Found a match. But is it a more specific match (we want most specific interfaces)
6528 _ASSERTE(pCurMD != NULL);
6529 bool needToInsert = true;
6530 bool seenMoreSpecific = false;
6532 // We need to maintain the invariant that the candidates are always the most specific
6533 // in all path scaned so far. There might be multiple incompatible candidates
6534 for (unsigned i = 0; i < candidatesCount; ++i)
6536 MethodTable *pCandidateMT = candidates[i].pMT;
6537 if (pCandidateMT == NULL)
6540 if (pCandidateMT == pCurMT)
6542 // A dup - we are done
6543 needToInsert = false;
6547 if (allowVariance && pCandidateMT->HasSameTypeDefAs(pCurMT))
6549 // Variant match on the same type - this is a tie
6551 else if (pCurMT->CanCastToInterface(pCandidateMT))
6553 // pCurMT is a more specific choice than IFoo/IBar both overrides IBlah :
6554 if (!seenMoreSpecific)
6556 seenMoreSpecific = true;
6557 candidates[i].pMT = pCurMT;
6558 candidates[i].pMD = pCurMD;
6562 candidates[i].pMT = NULL;
6563 candidates[i].pMD = NULL;
6566 needToInsert = false;
6568 else if (pCandidateMT->CanCastToInterface(pCurMT))
6570 // pCurMT is less specific - we don't need to scan more entries as this entry can
6571 // represent pCurMT (other entries are incompatible with pCurMT)
6572 needToInsert = false;
6577 // pCurMT is incompatible - keep scanning
6583 ASSERT(candidatesCount < candidates.Size());
6584 candidates[candidatesCount].pMT = pCurMT;
6585 candidates[candidatesCount].pMD = pCurMD;
6597 // scan to see if there are any conflicts
6598 // If we are doing second pass (allowing variance), we know don't actually look for
6599 // a conflict anymore, but pick the first match.
6600 MethodTable *pBestCandidateMT = NULL;
6601 MethodDesc *pBestCandidateMD = NULL;
6602 for (unsigned i = 0; i < candidatesCount; ++i)
6604 if (candidates[i].pMT == NULL)
6607 if (pBestCandidateMT == NULL)
6609 pBestCandidateMT = candidates[i].pMT;
6610 pBestCandidateMD = candidates[i].pMD;
6612 // If this is a second pass lookup, we know this is a variant match. As such
6613 // we pick the first result as the winner and don't look for a conflict for instance methods.
6614 if (allowVariance && !pInterfaceMD->IsStatic())
6617 else if (pBestCandidateMT != candidates[i].pMT)
6619 bool throwOnConflict = (findDefaultImplementationFlags & FindDefaultInterfaceImplementationFlags::ThrowOnConflict) != FindDefaultInterfaceImplementationFlags::None;
6621 if (throwOnConflict)
6622 ThrowExceptionForConflictingOverride(this, pInterfaceMT, pInterfaceMD);
6624 *ppDefaultMethod = pBestCandidateMD;
6629 if (pBestCandidateMD != NULL)
6631 *ppDefaultMethod = pBestCandidateMD;
6635 *ppDefaultMethod = NULL;
6636 #endif // FEATURE_DEFAULT_INTERFACES
6640 #endif // DACCESS_COMPILE
6642 //==========================================================================================
6643 DispatchSlot MethodTable::FindDispatchSlot(UINT32 typeID, UINT32 slotNumber, BOOL throwOnConflict)
6653 DispatchSlot implSlot(NULL);
6654 FindDispatchImpl(typeID, slotNumber, &implSlot, throwOnConflict);
6658 #ifndef DACCESS_COMPILE
6660 //==========================================================================================
6661 DispatchSlot MethodTable::FindDispatchSlotForInterfaceMD(MethodDesc *pMD, BOOL throwOnConflict)
6663 WRAPPER_NO_CONTRACT;
6664 CONSISTENCY_CHECK(CheckPointer(pMD));
6665 CONSISTENCY_CHECK(pMD->IsInterface());
6666 return FindDispatchSlotForInterfaceMD(TypeHandle(pMD->GetMethodTable()), pMD, throwOnConflict);
6669 //==========================================================================================
6670 DispatchSlot MethodTable::FindDispatchSlotForInterfaceMD(TypeHandle ownerType, MethodDesc *pMD, BOOL throwOnConflict)
6672 WRAPPER_NO_CONTRACT;
6673 CONSISTENCY_CHECK(!ownerType.IsNull());
6674 CONSISTENCY_CHECK(CheckPointer(pMD));
6675 CONSISTENCY_CHECK(pMD->IsInterface());
6676 return FindDispatchSlot(ownerType.GetMethodTable()->GetTypeID(), pMD->GetSlot(), throwOnConflict);
6679 //==========================================================================================
6680 // This is used for reverse methodimpl lookups by ComPlusMethodCall MDs.
6681 // This assumes the following:
6682 // The methodimpl is for an interfaceToken->slotNumber
6683 // There is ONLY ONE such mapping for this slot number
6684 // The mapping exists in this type, not a parent type.
6685 MethodDesc * MethodTable::ReverseInterfaceMDLookup(UINT32 slotNumber)
6691 DispatchMap::Iterator it(this);
6692 for (; it.IsValid(); it.Next())
6694 if (it.Entry()->GetTargetSlotNumber() == slotNumber)
6696 DispatchMapTypeID typeID = it.Entry()->GetTypeID();
6697 _ASSERTE(!typeID.IsThisClass());
6698 UINT32 slotNum = it.Entry()->GetSlotNumber();
6699 MethodTable * pMTItf = LookupDispatchMapType(typeID);
6700 CONSISTENCY_CHECK(CheckPointer(pMTItf));
6702 MethodDesc *pCanonMD = pMTItf->GetMethodDescForSlot((DWORD)slotNum);
6703 return MethodDesc::FindOrCreateAssociatedMethodDesc(
6706 FALSE, // forceBoxedEntryPoint
6707 Instantiation(), // methodInst
6708 FALSE, // allowInstParam
6709 TRUE); // forceRemotableMethod
6715 //==========================================================================================
6716 UINT32 MethodTable::GetTypeID()
6723 PTR_MethodTable pMT = PTR_MethodTable(this);
6725 return GetDomain()->GetTypeID(pMT);
6728 //==========================================================================================
6729 UINT32 MethodTable::LookupTypeID()
6738 PTR_MethodTable pMT = PTR_MethodTable(this);
6740 return GetDomain()->LookupTypeID(pMT);
6743 //==========================================================================================
6744 BOOL MethodTable::ImplementsInterfaceWithSameSlotsAsParent(MethodTable *pItfMT, MethodTable *pParentMT)
6750 PRECONDITION(!IsInterface() && !pParentMT->IsInterface());
6751 PRECONDITION(pItfMT->IsInterface());
6754 MethodTable *pMT = this;
6757 DispatchMap::EncodedMapIterator it(pMT);
6758 for (; it.IsValid(); it.Next())
6760 DispatchMapEntry *pCurEntry = it.Entry();
6761 if (DispatchMapTypeMatchesMethodTable(pCurEntry->GetTypeID(), pItfMT))
6763 // this class and its parents up to pParentMT must have no mappings for the interface
6768 pMT = pMT->GetParentMethodTable();
6769 _ASSERTE(pMT != NULL);
6771 while (pMT != pParentMT);
6776 //==========================================================================================
6777 BOOL MethodTable::HasSameInterfaceImplementationAsParent(MethodTable *pItfMT, MethodTable *pParentMT)
6783 PRECONDITION(!IsInterface() && !pParentMT->IsInterface());
6784 PRECONDITION(pItfMT->IsInterface());
6787 if (!ImplementsInterfaceWithSameSlotsAsParent(pItfMT, pParentMT))
6789 // if the slots are not same, this class reimplements the interface
6793 // The target slots are the same, but they can still be overridden. We'll iterate
6794 // the dispatch map beginning with pParentMT up the hierarchy and for each pItfMT
6795 // entry check the target slot contents (pParentMT vs. this class). A mismatch
6796 // means that there is an override. We'll keep track of source (interface) slots
6797 // we have seen so that we can ignore entries higher in the hierarchy that are no
6798 // longer in effect at pParentMT level.
6801 WORD wSeenSlots = 0;
6802 WORD wTotalSlots = pItfMT->GetNumVtableSlots();
6804 MethodTable *pMT = pParentMT;
6807 DispatchMap::EncodedMapIterator it(pMT);
6808 for (; it.IsValid(); it.Next())
6810 DispatchMapEntry *pCurEntry = it.Entry();
6811 if (DispatchMapTypeMatchesMethodTable(pCurEntry->GetTypeID(), pItfMT))
6813 UINT32 ifaceSlot = pCurEntry->GetSlotNumber();
6814 if (!bitMask.TestBit(ifaceSlot))
6816 bitMask.SetBit(ifaceSlot);
6818 UINT32 targetSlot = pCurEntry->GetTargetSlotNumber();
6819 if (GetRestoredSlot(targetSlot) != pParentMT->GetRestoredSlot(targetSlot))
6821 // the target slot is overridden
6825 if (++wSeenSlots == wTotalSlots)
6827 // we've resolved all slots, no reason to continue
6833 pMT = pMT->GetParentMethodTable();
6835 while (pMT != NULL);
6840 #endif // !DACCESS_COMPILE
6842 //==========================================================================================
6843 #ifndef DACCESS_COMPILE
6844 MethodTable * MethodTable::LookupDispatchMapType(DispatchMapTypeID typeID)
6851 _ASSERTE(!typeID.IsThisClass());
6853 InterfaceMapIterator intIt = IterateInterfaceMapFrom(typeID.GetInterfaceNum());
6854 return intIt.GetInterface(this);
6856 #endif // DACCESS_COMPILE
6858 //==========================================================================================
6859 bool MethodTable::DispatchMapTypeMatchesMethodTable(DispatchMapTypeID typeID, MethodTable* pMT)
6866 _ASSERTE(!typeID.IsThisClass());
6867 InterfaceMapIterator intIt = IterateInterfaceMapFrom(typeID.GetInterfaceNum());
6868 return intIt.CurrentInterfaceMatches(this, pMT);
6871 //==========================================================================================
6872 MethodDesc * MethodTable::GetIntroducingMethodDesc(DWORD slotNumber)
6882 MethodDesc * pCurrentMD = GetMethodDescForSlot(slotNumber);
6883 DWORD dwSlot = pCurrentMD->GetSlot();
6884 MethodDesc * pIntroducingMD = NULL;
6886 MethodTable * pParentType = GetParentMethodTable();
6887 MethodTable * pPrevParentType = NULL;
6889 // Find this method in the parent.
6890 // If it does exist in the parent, it would be at the same vtable slot.
6891 while ((pParentType != NULL) &&
6892 (dwSlot < pParentType->GetNumVirtuals()))
6894 pPrevParentType = pParentType;
6895 pParentType = pParentType->GetParentMethodTable();
6898 if (pPrevParentType != NULL)
6900 pIntroducingMD = pPrevParentType->GetMethodDescForSlot(dwSlot);
6903 return pIntroducingMD;
6906 //==========================================================================================
6907 // There is a case where a method declared in a type can be explicitly
6908 // overridden by a methodImpl on another method within the same type. In
6909 // this case, we need to call the methodImpl target, and this will map
6910 // things appropriately for us.
6911 MethodDesc * MethodTable::MapMethodDeclToMethodImpl(MethodDesc * pMDDecl)
6913 STATIC_CONTRACT_THROWS;
6914 STATIC_CONTRACT_GC_TRIGGERS;
6916 MethodTable * pMT = pMDDecl->GetMethodTable();
6919 // Fast negative case check
6922 // If it's not virtual, then it could not have been methodImpl'd.
6923 if (!pMDDecl->IsVirtual() ||
6924 // Is it a non-virtual call to the instantiating stub
6925 (pMT->IsValueType() && !pMDDecl->IsUnboxingStub()))
6930 MethodDesc * pMDImpl = pMT->GetParallelMethodDesc(pMDDecl);
6932 // If the method is instantiated, then we need to resolve to the corresponding
6933 // instantiated MD for the new slot number.
6934 if (pMDDecl->HasMethodInstantiation())
6936 if (pMDDecl->GetSlot() != pMDImpl->GetSlot())
6938 if (!pMDDecl->IsGenericMethodDefinition())
6940 #ifndef DACCESS_COMPILE
6941 pMDImpl = pMDDecl->FindOrCreateAssociatedMethodDesc(
6944 pMDDecl->IsUnboxingStub(),
6945 pMDDecl->GetMethodInstantiation(),
6946 pMDDecl->IsInstantiatingStub());
6954 // Since the generic method definition is always in the actual
6955 // slot for the method table, and since the slot numbers for
6956 // the Decl and Impl MDs are the same, then the call to
6957 // FindOrCreateAssociatedMethodDesc would just result in the
6958 // same pMDDecl being returned. In this case, we can skip all
6964 CONSISTENCY_CHECK(CheckPointer(pMDImpl));
6965 CONSISTENCY_CHECK(!pMDImpl->IsGenericMethodDefinition());
6967 } // MethodTable::MapMethodDeclToMethodImpl
6970 //==========================================================================================
6971 HRESULT MethodTable::GetGuidNoThrow(GUID *pGuid, BOOL bGenerateIfNotFound, BOOL bClassic /*= TRUE*/)
6983 GetGuid(pGuid, bGenerateIfNotFound, bClassic);
6985 EX_CATCH_HRESULT(hr);
6987 // ensure we return a failure hr when pGuid is not filled in
6988 if (SUCCEEDED(hr) && (*pGuid == GUID_NULL))
6994 //==========================================================================================
6995 // Returns the GUID of this MethodTable.
6996 // If metadata does not specify GUID for the type, GUID_NULL is returned (if bGenerateIfNotFound
6997 // is FALSE) or a GUID is auto-generated on the fly from the name and members of the type
6998 // (bGenerateIfNotFound is TRUE).
6999 void MethodTable::GetGuid(GUID *pGuid, BOOL bGenerateIfNotFound, BOOL bClassic /*=TRUE*/)
7009 #ifdef DACCESS_COMPILE
7011 _ASSERTE(pGuid != NULL);
7012 PTR_GuidInfo pGuidInfo = GetClass()->GetGuidInfo();
7013 if (pGuidInfo != NULL)
7014 *pGuid = pGuidInfo->m_Guid;
7018 #else // DACCESS_COMPILE
7020 SIZE_T cchName = 0; // Length of the name (possibly after decoration).
7021 SIZE_T cbCur; // Current offset.
7022 LPCWSTR szName = NULL; // Name to turn to a guid.
7023 CQuickArray<BYTE> rName; // Buffer to accumulate signatures.
7024 BOOL bGenerated = FALSE; // A flag indicating if we generated the GUID from name.
7026 _ASSERTE(pGuid != NULL);
7027 _ASSERTE(!this->IsArray());
7029 GuidInfo *pInfo = GetClass()->GetGuidInfo();
7031 // First check to see if we have already cached the guid for this type.
7032 // We currently only cache guids on interfaces and WinRT delegates.
7033 // In classic mode, though, ensure we don't retrieve the GuidInfo for redirected interfaces
7034 if ((IsInterface()) && pInfo != NULL
7037 if (pInfo->m_bGeneratedFromName)
7039 // If the GUID was generated from the name then only return it
7040 // if bGenerateIfNotFound is set.
7041 if (bGenerateIfNotFound)
7042 *pGuid = pInfo->m_Guid;
7048 *pGuid = pInfo->m_Guid;
7053 if (GetClass()->HasNoGuid())
7059 // If there is a GUID in the metadata then return that.
7060 IfFailThrow(GetMDImport()->GetItemGuid(GetCl(), pGuid));
7062 if (*pGuid == GUID_NULL)
7064 // Remember that we didn't find the GUID, so we can skip looking during
7065 // future checks. (Note that this is a very important optimization in the
7067 GetClass()->SetHasNoGuid();
7071 if (*pGuid == GUID_NULL && bGenerateIfNotFound)
7073 // For interfaces, concatenate the signatures of the methods and fields.
7074 if (!IsNilToken(GetCl()) && IsInterface())
7076 // Retrieve the stringized interface definition.
7077 cbCur = GetStringizedItfDef(TypeHandle(this), rName);
7079 // Pad up to a whole WCHAR.
7080 if (cbCur % sizeof(WCHAR))
7082 SIZE_T cbDelta = sizeof(WCHAR) - (cbCur % sizeof(WCHAR));
7083 rName.ReSizeThrows(cbCur + cbDelta);
7084 memset(rName.Ptr() + cbCur, 0, cbDelta);
7088 // Point to the new buffer.
7089 cchName = cbCur / sizeof(WCHAR);
7090 szName = reinterpret_cast<LPWSTR>(rName.Ptr());
7094 // Get the name of the class.
7095 DefineFullyQualifiedNameForClassW();
7096 szName = GetFullyQualifiedNameForClassNestedAwareW(this);
7099 cchName = u16_strlen(szName);
7101 // Enlarge buffer for class name.
7102 cbCur = cchName * sizeof(WCHAR);
7103 rName.ReSizeThrows(cbCur + sizeof(WCHAR));
7104 wcscpy_s(reinterpret_cast<LPWSTR>(rName.Ptr()), cchName + 1, szName);
7106 // Add the assembly guid string to the class name.
7107 ULONG cbCurOUT = (ULONG)cbCur;
7108 IfFailThrow(GetStringizedTypeLibGuidForAssembly(GetAssembly(), rName, (ULONG)cbCur, &cbCurOUT));
7109 cbCur = (SIZE_T) cbCurOUT;
7111 // Pad to a whole WCHAR.
7112 if (cbCur % sizeof(WCHAR))
7114 rName.ReSizeThrows(cbCur + sizeof(WCHAR)-(cbCur%sizeof(WCHAR)));
7115 while (cbCur % sizeof(WCHAR))
7119 // Point to the new buffer.
7120 szName = reinterpret_cast<LPWSTR>(rName.Ptr());
7121 cchName = cbCur / sizeof(WCHAR);
7122 // Dont' want to have to pad.
7123 _ASSERTE((sizeof(GUID) % sizeof(WCHAR)) == 0);
7126 // Generate guid from name.
7127 CorGuidFromNameW(pGuid, szName, cchName);
7129 // Remeber we generated the guid from the type name.
7133 // Cache the guid in the type, if not already cached.
7134 // We currently only do this for interfaces.
7135 // Also, in classic mode do NOT cache GUID for redirected interfaces.
7136 if ((IsInterface()) && (pInfo == NULL) && (*pGuid != GUID_NULL))
7138 AllocMemTracker amTracker;
7140 // We will always store the GuidInfo on the methodTable.
7141 // Since the GUIDInfo will be stored on the EEClass,
7142 // the memory should be allocated on the loaderAllocator of the class.
7143 // The definining module and the loaded module could be different in some scenarios.
7144 // For example - in case of shared generic instantiations
7145 // a shared generic i.e. System.__Canon which would be loaded in shared domain
7146 // but the this->GetLoaderAllocator will be the loader allocator for the definining
7147 // module which can get unloaded anytime.
7148 _ASSERTE(GetClass());
7149 _ASSERTE(GetClass()->GetMethodTable());
7150 PTR_LoaderAllocator pLoaderAllocator = GetClass()->GetMethodTable()->GetLoaderAllocator();
7152 _ASSERTE(pLoaderAllocator);
7154 // Allocate the guid information.
7155 pInfo = (GuidInfo *)amTracker.Track(
7156 pLoaderAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(GuidInfo))));
7157 pInfo->m_Guid = *pGuid;
7158 pInfo->m_bGeneratedFromName = bGenerated;
7160 // Set the per-EEClass GuidInfo
7161 // The MethodTable may be NGENed and read-only - and there's no point in saving
7162 // classic GUIDs in non-WinRT MethodTables anyway.
7163 GetClass()->SetGuidInfo(pInfo);
7165 amTracker.SuppressRelease();
7167 #endif // !DACCESS_COMPILE
7171 //==========================================================================================
7172 MethodDesc* MethodTable::GetMethodDescForSlotAddress(PCODE addr, BOOL fSpeculative /*=FALSE*/)
7174 CONTRACT(MethodDesc *)
7178 POSTCONDITION(CheckPointer(RETVAL, NULL_NOT_OK));
7179 POSTCONDITION(RETVAL->m_pDebugMethodTable == NULL || // We must be in BuildMethdTableThrowing()
7180 RETVAL->SanityCheck());
7184 // If we see shared fcall implementation as an argument to this
7185 // function, it means that a vtable slot for the shared fcall
7186 // got backpatched when it shouldn't have. The reason we can't
7187 // backpatch this method is that it is an FCall that has many
7188 // MethodDescs for one implementation. If we backpatch delegate
7189 // constructors, this function will not be able to recover the
7190 // MethodDesc for the method.
7192 _ASSERTE_IMPL(!ECall::IsSharedFCallImpl(addr) &&
7193 "someone backpatched shared fcall implementation -- "
7194 "see comment in code");
7196 MethodDesc* pMethodDesc = ExecutionManager::GetCodeMethodDesc(addr);
7197 if (NULL != pMethodDesc)
7202 #ifdef FEATURE_INTERPRETER
7203 // I don't really know why this helps. Figure it out.
7204 #ifndef DACCESS_COMPILE
7205 // If we didn't find it above, try as an Interpretation stub...
7206 pMethodDesc = Interpreter::InterpretationStubToMethodInfo(addr);
7208 if (NULL != pMethodDesc)
7213 #endif // FEATURE_INTERPRETER
7216 pMethodDesc = ECall::MapTargetBackToMethod(addr);
7217 if (pMethodDesc != 0)
7222 pMethodDesc = MethodDesc::GetMethodDescFromStubAddr(addr, fSpeculative);
7226 RETURN(pMethodDesc);
7229 //==========================================================================================
7231 BOOL MethodTable::ComputeContainsGenericVariables(Instantiation inst)
7241 for (DWORD j = 0; j < inst.GetNumArgs(); j++)
7243 if (inst[j].ContainsGenericVariables())
7251 //==========================================================================================
7252 BOOL MethodTable::SanityCheck()
7254 LIMITED_METHOD_CONTRACT;
7257 // strings have component size2, all other non-arrays should have 0
7258 _ASSERTE((GetComponentSize() <= 2) || IsArray());
7260 if (m_pEEClass == NULL)
7265 EEClass * pClass = GetClass();
7266 MethodTable * pCanonMT = pClass->GetMethodTable();
7268 // Let's try to make sure we have a valid EEClass pointer.
7269 if (pCanonMT == NULL)
7272 if (GetNumGenericArgs() != 0)
7273 return (pCanonMT->GetClass() == pClass);
7275 return (pCanonMT == this) || IsArray();
7278 //==========================================================================================
7279 unsigned MethodTable::GetTypeDefRid()
7281 LIMITED_METHOD_DAC_CONTRACT;
7283 WORD token = m_wToken;
7285 if (token == METHODTABLE_TOKEN_OVERFLOW)
7286 return (unsigned)*GetTokenOverflowPtr();
7291 //==========================================================================================
7292 void MethodTable::SetCl(mdTypeDef token)
7294 LIMITED_METHOD_CONTRACT;
7296 unsigned rid = RidFromToken(token);
7297 if (rid >= METHODTABLE_TOKEN_OVERFLOW)
7299 m_wToken = METHODTABLE_TOKEN_OVERFLOW;
7300 *GetTokenOverflowPtr() = rid;
7304 _ASSERTE(FitsIn<WORD>(rid));
7305 m_wToken = (WORD)rid;
7308 _ASSERTE(GetCl() == token);
7311 //==========================================================================================
7312 MethodDesc * MethodTable::GetClassConstructor()
7321 return GetMethodDescForSlot(GetClassConstructorSlot());
7324 //==========================================================================================
7325 DWORD MethodTable::HasFixedAddressVTStatics()
7327 LIMITED_METHOD_CONTRACT;
7329 return GetClass()->HasFixedAddressVTStatics();
7332 //==========================================================================================
7333 BOOL MethodTable::HasOnlyAbstractMethods()
7335 LIMITED_METHOD_CONTRACT;
7337 return GetClass()->HasOnlyAbstractMethods();
7340 //==========================================================================================
7341 WORD MethodTable::GetNumHandleRegularStatics()
7343 LIMITED_METHOD_CONTRACT;
7345 return GetClass()->GetNumHandleRegularStatics();
7348 //==========================================================================================
7349 WORD MethodTable::GetNumBoxedRegularStatics()
7351 LIMITED_METHOD_CONTRACT;
7353 return GetClass()->GetNumBoxedRegularStatics();
7356 //==========================================================================================
7357 WORD MethodTable::GetNumBoxedThreadStatics ()
7359 LIMITED_METHOD_CONTRACT;
7361 return GetClass()->GetNumBoxedThreadStatics();
7365 //==========================================================================================
7366 // Returns true if pointer to the parent method table has been initialized/restored already.
7367 BOOL MethodTable::IsParentMethodTablePointerValid()
7369 LIMITED_METHOD_CONTRACT;
7372 // workaround: Type loader accesses partially initialized datastructures that interferes with IBC logging.
7373 // Once type loader is fixed to do not access partially initialized datastructures, this can go away.
7374 if (!GetWriteableData()->IsParentMethodTablePointerValid())
7382 //---------------------------------------------------------------------------------------
7384 // Ascends the parent class chain of "this", until a MethodTable is found whose typeDef
7385 // matches that of the specified pWhichParent. Why is this useful? See
7386 // code:MethodTable::GetInstantiationOfParentClass below and
7387 // code:Generics::GetExactInstantiationsOfMethodAndItsClassFromCallInformation for use
7391 // pWhichParent - MethodTable whose typeDef we're trying to match as we go up
7392 // "this"'s parent chain.
7395 // If a matching parent MethodTable is found, it is returned. Else, NULL is
7399 MethodTable * MethodTable::GetMethodTableMatchingParentClass(MethodTable * pWhichParent)
7405 PRECONDITION(CheckPointer(pWhichParent));
7406 PRECONDITION(IsRestored());
7407 PRECONDITION(pWhichParent->IsRestored());
7411 MethodTable *pMethodTableSearch = this;
7413 #ifdef DACCESS_COMPILE
7414 unsigned parentCount = 0;
7415 MethodTable *pOldMethodTable = NULL;
7416 #endif // DACCESS_COMPILE
7418 while (pMethodTableSearch != NULL)
7420 #ifdef DACCESS_COMPILE
7421 if (pMethodTableSearch == pOldMethodTable ||
7426 pOldMethodTable = pMethodTableSearch;
7428 #endif // DACCESS_COMPILE
7430 if (pMethodTableSearch->HasSameTypeDefAs(pWhichParent))
7432 return pMethodTableSearch;
7435 pMethodTableSearch = pMethodTableSearch->GetParentMethodTable();
7442 //==========================================================================================
7443 // Given D<T> : C<List<T>> and a type handle D<string> we sometimes
7444 // need to find the corresponding type handle
7445 // C<List<string>> (C may also be some type
7446 // further up the inheritance hierarchy). GetInstantiationOfParentClass
7447 // helps us do this by getting the corresponding instantiation of C, i.e.
7450 // pWhichParent: this is used identify which parent type we're interested in.
7451 // It must be a canonical EEClass, e.g. for C<ref>. This is used as a token for
7452 // C<List<T>>. This method can also be called with the minimal methodtable used
7453 // for dynamic methods. In that case, we need to return an empty instantiation.
7455 // Note this only works for parent classes, not parent interfaces.
7456 Instantiation MethodTable::GetInstantiationOfParentClass(MethodTable *pWhichParent)
7461 PRECONDITION(CheckPointer(pWhichParent));
7462 PRECONDITION(IsRestored());
7463 PRECONDITION(pWhichParent->IsRestored());
7468 MethodTable * pMatchingParent = GetMethodTableMatchingParentClass(pWhichParent);
7469 if (pMatchingParent != NULL)
7471 return pMatchingParent->GetInstantiation();
7474 // The parameter should always be a parent class or the dynamic method
7475 // class. Since there is no bit on the dynamicclass methodtable to indicate
7476 // that it is the dynamic method methodtable, we simply check the debug name
7477 // This is good enough for an assert.
7478 _ASSERTE(strcmp(pWhichParent->GetDebugClassName(), "dynamicClass") == 0);
7479 return Instantiation();
7482 #ifndef DACCESS_COMPILE
7484 #ifdef FEATURE_COMINTEROP
7487 // This is for COM Interop backwards compatibility
7490 //==========================================================================================
7491 // Returns the data pointer if present, NULL otherwise
7492 InteropMethodTableData *MethodTable::LookupComInteropData()
7494 WRAPPER_NO_CONTRACT;
7496 return GetLoaderAllocator()->LookupComInteropData(this);
7499 //==========================================================================================
7500 // Returns TRUE if successfully inserted, FALSE if this would be a duplicate entry
7501 BOOL MethodTable::InsertComInteropData(InteropMethodTableData *pData)
7503 WRAPPER_NO_CONTRACT;
7505 return GetLoaderAllocator()->InsertComInteropData(this, pData);
7508 //==========================================================================================
7509 InteropMethodTableData *MethodTable::CreateComInteropData(AllocMemTracker *pamTracker)
7513 PRECONDITION(GetParentMethodTable() == NULL || GetParentMethodTable()->LookupComInteropData() != NULL);
7516 ACQUIRE_STACKING_ALLOCATOR(pStackingAllocator);
7518 ClassCompat::MethodTableBuilder builder(this, pStackingAllocator);
7520 InteropMethodTableData *pData = builder.BuildInteropVTable(pamTracker);
7525 //==========================================================================================
7526 InteropMethodTableData *MethodTable::GetComInteropData()
7533 InteropMethodTableData *pData = LookupComInteropData();
7539 // Make sure that the parent's interop data has been created
7540 MethodTable *pParentMT = GetParentMethodTable();
7542 pParentMT->GetComInteropData();
7544 AllocMemTracker amTracker;
7546 pData = CreateComInteropData(&amTracker);
7547 if (InsertComInteropData(pData))
7549 amTracker.SuppressRelease();
7553 pData = LookupComInteropData();
7561 #endif // FEATURE_COMINTEROP
7563 //==========================================================================================
7564 ULONG MethodTable::MethodData::Release()
7566 LIMITED_METHOD_CONTRACT;
7567 //@TODO: Must adjust this to use an alternate allocator so that we don't
7568 //@TODO: potentially cause deadlocks on the debug thread.
7569 SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE;
7570 ULONG cRef = (ULONG) InterlockedDecrement((LONG*)&m_cRef);
7577 //==========================================================================================
7579 MethodTable::MethodData::ProcessMap(
7580 const DispatchMapTypeID * rgTypeIDs,
7583 UINT32 iCurrentChainDepth,
7584 MethodDataEntry * rgWorkingData,
7585 size_t cWorkingData)
7587 LIMITED_METHOD_CONTRACT;
7589 for (DispatchMap::EncodedMapIterator it(pMT); it.IsValid(); it.Next())
7591 for (UINT32 nTypeIDIndex = 0; nTypeIDIndex < cTypeIDs; nTypeIDIndex++)
7593 if (it.Entry()->GetTypeID() == rgTypeIDs[nTypeIDIndex])
7595 UINT32 curSlot = it.Entry()->GetSlotNumber();
7596 // This if check is defensive, and should never fail
7597 if (curSlot < cWorkingData)
7599 MethodDataEntry * pCurEntry = &rgWorkingData[curSlot];
7600 if (!pCurEntry->IsDeclInit() && !pCurEntry->IsImplInit())
7602 pCurEntry->SetImplData(it.Entry()->GetTargetSlotNumber());
7608 } // MethodTable::MethodData::ProcessMap
7610 //==========================================================================================
7611 UINT32 MethodTable::MethodDataObject::GetObjectSize(MethodTable *pMT)
7613 WRAPPER_NO_CONTRACT;
7614 UINT32 cb = sizeof(MethodTable::MethodDataObject);
7615 cb += pMT->GetCanonicalMethodTable()->GetNumMethods() * sizeof(MethodDataObjectEntry);
7619 //==========================================================================================
7620 // This will fill in all the MethodEntry slots present in the current MethodTable
7621 void MethodTable::MethodDataObject::Init(MethodData *pParentData)
7625 WRAPPER(GC_TRIGGERS);
7626 PRECONDITION(CheckPointer(m_pDeclMT));
7627 PRECONDITION(CheckPointer(pParentData, NULL_OK));
7628 PRECONDITION(!m_pDeclMT->IsInterface());
7629 PRECONDITION(pParentData == NULL ||
7630 (m_pDeclMT->ParentEquals(pParentData->GetDeclMethodTable()) &&
7631 m_pDeclMT->ParentEquals(pParentData->GetImplMethodTable())));
7634 m_iNextChainDepth = 0;
7635 m_containsMethodImpl = FALSE;
7637 ZeroMemory(GetEntryData(), sizeof(MethodDataObjectEntry) * GetNumMethods());
7638 } // MethodTable::MethodDataObject::Init
7640 //==========================================================================================
7641 BOOL MethodTable::MethodDataObject::PopulateNextLevel()
7643 LIMITED_METHOD_CONTRACT;
7645 // Get the chain depth to next decode.
7646 UINT32 iChainDepth = GetNextChainDepth();
7648 // If the chain depth is MAX_CHAIN_DEPTH, then we've already parsed every parent.
7649 if (iChainDepth == MAX_CHAIN_DEPTH) {
7652 // Now move up the chain to the target.
7653 MethodTable *pMTCur = m_pDeclMT;
7654 for (UINT32 i = 0; pMTCur != NULL && i < iChainDepth; i++) {
7655 pMTCur = pMTCur->GetParentMethodTable();
7658 // If we reached the end, then we're done.
7659 if (pMTCur == NULL) {
7660 SetNextChainDepth(MAX_CHAIN_DEPTH);
7664 FillEntryDataForAncestor(pMTCur);
7666 SetNextChainDepth(iChainDepth + 1);
7669 } // MethodTable::MethodDataObject::PopulateNextLevel
7671 //==========================================================================================
7672 void MethodTable::MethodDataObject::FillEntryDataForAncestor(MethodTable * pMT)
7674 LIMITED_METHOD_CONTRACT;
7676 // Since we traverse ancestors from lowest in the inheritance hierarchy
7677 // to highest, the first method we come across for a slot is normally
7678 // both the declaring and implementing method desc.
7680 // However if this slot is the target of a methodImpl, pMD is not
7681 // necessarily either. Rather than track this on a per-slot basis,
7682 // we conservatively avoid filling out virtual methods once we
7683 // have found that this inheritance chain contains a methodImpl.
7685 // Note that there may be a methodImpl higher in the inheritance chain
7686 // that we have not seen yet, and so we will fill out virtual methods
7687 // until we reach that level. We are safe doing that because the slots
7688 // we fill have been introduced/overridden by a subclass and so take
7689 // precedence over any inherited methodImpl.
7691 // Before we fill the entry data, find if the current ancestor has any methodImpls
7693 if (pMT->GetClass()->ContainsMethodImpls())
7694 m_containsMethodImpl = TRUE;
7696 if (m_containsMethodImpl && pMT != m_pDeclMT)
7699 unsigned nVirtuals = pMT->GetNumVirtuals();
7701 MethodTable::IntroducedMethodIterator it(pMT, FALSE);
7702 for (; it.IsValid(); it.Next())
7704 MethodDesc * pMD = it.GetMethodDesc();
7706 unsigned slot = pMD->GetSlot();
7707 if (slot == MethodTable::NO_SLOT)
7710 // We want to fill all methods introduced by the actual type we're gathering
7711 // data for, and the virtual methods of the parent and above
7712 if (pMT == m_pDeclMT)
7714 if (m_containsMethodImpl && slot < nVirtuals)
7719 if (slot >= nVirtuals)
7723 MethodDataObjectEntry * pEntry = GetEntry(slot);
7725 if (pEntry->GetDeclMethodDesc() == NULL)
7727 pEntry->SetDeclMethodDesc(pMD);
7730 if (pEntry->GetImplMethodDesc() == NULL)
7732 pEntry->SetImplMethodDesc(pMD);
7735 } // MethodTable::MethodDataObject::FillEntryDataForAncestor
7737 //==========================================================================================
7738 MethodDesc * MethodTable::MethodDataObject::GetDeclMethodDesc(UINT32 slotNumber)
7740 WRAPPER_NO_CONTRACT;
7741 _ASSERTE(slotNumber < GetNumMethods());
7743 MethodDataObjectEntry * pEntry = GetEntry(slotNumber);
7745 // Fill the entries one level of inheritance at a time,
7746 // stopping when we have filled the MD we are looking for.
7747 while (!pEntry->GetDeclMethodDesc() && PopulateNextLevel());
7749 MethodDesc * pMDRet = pEntry->GetDeclMethodDesc();
7752 pMDRet = GetImplMethodDesc(slotNumber)->GetDeclMethodDesc(slotNumber);
7753 _ASSERTE(CheckPointer(pMDRet));
7754 pEntry->SetDeclMethodDesc(pMDRet);
7758 _ASSERTE(pMDRet == GetImplMethodDesc(slotNumber)->GetDeclMethodDesc(slotNumber));
7763 //==========================================================================================
7764 DispatchSlot MethodTable::MethodDataObject::GetImplSlot(UINT32 slotNumber)
7766 WRAPPER_NO_CONTRACT;
7767 _ASSERTE(slotNumber < GetNumMethods());
7768 return DispatchSlot(m_pDeclMT->GetRestoredSlot(slotNumber));
7771 //==========================================================================================
7772 UINT32 MethodTable::MethodDataObject::GetImplSlotNumber(UINT32 slotNumber)
7774 WRAPPER_NO_CONTRACT;
7775 _ASSERTE(slotNumber < GetNumMethods());
7779 //==========================================================================================
7780 MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber)
7790 _ASSERTE(slotNumber < GetNumMethods());
7791 MethodDataObjectEntry *pEntry = GetEntry(slotNumber);
7793 // Fill the entries one level of inheritance at a time,
7794 // stopping when we have filled the MD we are looking for.
7795 while (!pEntry->GetImplMethodDesc() && PopulateNextLevel());
7797 MethodDesc *pMDRet = pEntry->GetImplMethodDesc();
7801 _ASSERTE(slotNumber < GetNumVirtuals());
7802 pMDRet = m_pDeclMT->GetMethodDescForSlot(slotNumber);
7803 _ASSERTE(CheckPointer(pMDRet));
7804 pEntry->SetImplMethodDesc(pMDRet);
7808 _ASSERTE(slotNumber >= GetNumVirtuals() || pMDRet == m_pDeclMT->GetMethodDescForSlot(slotNumber));
7814 //==========================================================================================
7815 void MethodTable::MethodDataObject::UpdateImplMethodDesc(MethodDesc* pMD, UINT32 slotNumber)
7817 WRAPPER_NO_CONTRACT;
7818 _ASSERTE(slotNumber < GetNumVirtuals());
7819 _ASSERTE(pMD->IsMethodImpl());
7821 MethodDataObjectEntry* pEntry = GetEntry(slotNumber);
7823 // Fill the entries one level of inheritance at a time,
7824 // stopping when we have filled the MD we are looking for.
7825 while (!pEntry->GetImplMethodDesc() && PopulateNextLevel());
7827 pEntry->SetImplMethodDesc(pMD);
7830 //==========================================================================================
7831 void MethodTable::MethodDataObject::InvalidateCachedVirtualSlot(UINT32 slotNumber)
7833 WRAPPER_NO_CONTRACT;
7834 _ASSERTE(slotNumber < GetNumVirtuals());
7836 MethodDataObjectEntry *pEntry = GetEntry(slotNumber);
7837 pEntry->SetImplMethodDesc(NULL);
7840 //==========================================================================================
7841 MethodDesc *MethodTable::MethodDataInterface::GetDeclMethodDesc(UINT32 slotNumber)
7843 WRAPPER_NO_CONTRACT;
7844 return m_pDeclMT->GetMethodDescForSlot(slotNumber);
7847 //==========================================================================================
7848 MethodDesc *MethodTable::MethodDataInterface::GetImplMethodDesc(UINT32 slotNumber)
7850 WRAPPER_NO_CONTRACT;
7851 return MethodTable::MethodDataInterface::GetDeclMethodDesc(slotNumber);
7854 //==========================================================================================
7855 void MethodTable::MethodDataInterface::InvalidateCachedVirtualSlot(UINT32 slotNumber)
7857 LIMITED_METHOD_CONTRACT;
7859 // MethodDataInterface does not store any cached MethodDesc values
7863 //==========================================================================================
7864 UINT32 MethodTable::MethodDataInterfaceImpl::GetObjectSize(MethodTable *pMTDecl)
7866 WRAPPER_NO_CONTRACT;
7867 UINT32 cb = sizeof(MethodDataInterfaceImpl);
7868 cb += pMTDecl->GetNumMethods() * sizeof(MethodDataEntry);
7872 //==========================================================================================
7873 // This will fill in all the MethodEntry slots present in the current MethodTable
7875 MethodTable::MethodDataInterfaceImpl::Init(
7876 const DispatchMapTypeID * rgDeclTypeIDs,
7877 UINT32 cDeclTypeIDs,
7883 WRAPPER(GC_TRIGGERS);
7884 PRECONDITION(CheckPointer(pDecl));
7885 PRECONDITION(CheckPointer(pImpl));
7886 PRECONDITION(pDecl->GetDeclMethodTable()->IsInterface());
7887 PRECONDITION(!pImpl->GetDeclMethodTable()->IsInterface());
7888 PRECONDITION(pDecl->GetDeclMethodTable() == pDecl->GetImplMethodTable());
7889 PRECONDITION(pImpl->GetDeclMethodTable() == pImpl->GetImplMethodTable());
7890 PRECONDITION(pDecl != pImpl);
7893 // Store and AddRef the decl and impl data.
7899 m_iNextChainDepth = 0;
7900 // Need side effects of the calls, but not the result.
7901 /* MethodTable *pDeclMT = */ pDecl->GetDeclMethodTable();
7902 /* MethodTable *pImplMT = */ pImpl->GetImplMethodTable();
7903 m_rgDeclTypeIDs = rgDeclTypeIDs;
7904 m_cDeclTypeIDs = cDeclTypeIDs;
7906 // Initialize each entry.
7907 for (UINT32 i = 0; i < GetNumMethods(); i++) {
7908 // Initialize the entry
7909 GetEntry(i)->Init();
7911 } // MethodTable::MethodDataInterfaceImpl::Init
7913 //==========================================================================================
7914 MethodTable::MethodDataInterfaceImpl::MethodDataInterfaceImpl(
7915 const DispatchMapTypeID * rgDeclTypeIDs,
7916 UINT32 cDeclTypeIDs,
7918 MethodData * pImpl) :
7919 MethodData(pImpl->GetDeclMethodTable(), pDecl->GetDeclMethodTable())
7921 WRAPPER_NO_CONTRACT;
7922 Init(rgDeclTypeIDs, cDeclTypeIDs, pDecl, pImpl);
7925 //==========================================================================================
7926 MethodTable::MethodDataInterfaceImpl::~MethodDataInterfaceImpl()
7928 WRAPPER_NO_CONTRACT;
7929 CONSISTENCY_CHECK(CheckPointer(m_pDecl));
7930 CONSISTENCY_CHECK(CheckPointer(m_pImpl));
7935 //==========================================================================================
7937 MethodTable::MethodDataInterfaceImpl::PopulateNextLevel()
7939 LIMITED_METHOD_CONTRACT;
7941 // Get the chain depth to next decode.
7942 UINT32 iChainDepth = GetNextChainDepth();
7944 // If the chain depth is MAX_CHAIN_DEPTH, then we've already parsed every parent.
7945 if (iChainDepth == MAX_CHAIN_DEPTH) {
7949 // Now move up the chain to the target.
7950 MethodTable *pMTCur = m_pImpl->GetImplMethodTable();
7951 for (UINT32 i = 0; pMTCur != NULL && i < iChainDepth; i++) {
7952 pMTCur = pMTCur->GetParentMethodTable();
7955 // If we reached the end, then we're done.
7956 if (pMTCur == NULL) {
7957 SetNextChainDepth(MAX_CHAIN_DEPTH);
7961 if (m_cDeclTypeIDs != 0)
7962 { // We got the TypeIDs from TypeLoader, use them
7963 ProcessMap(m_rgDeclTypeIDs, m_cDeclTypeIDs, pMTCur, iChainDepth, GetEntryData(), GetNumVirtuals());
7966 { // We should decode all interface duplicates of code:m_pDecl
7967 MethodTable * pDeclMT = m_pDecl->GetImplMethodTable();
7968 INDEBUG(BOOL dbg_fInterfaceFound = FALSE);
7970 // Call code:ProcessMap for every (duplicate) occurrence of interface code:pDeclMT in the interface
7971 // map of code:m_pImpl
7972 MethodTable::InterfaceMapIterator it = m_pImpl->GetImplMethodTable()->IterateInterfaceMap();
7975 if (it.CurrentInterfaceMatches(m_pImpl->GetImplMethodTable(), pDeclMT))
7976 { // We found the interface
7977 INDEBUG(dbg_fInterfaceFound = TRUE);
7978 DispatchMapTypeID declTypeID = DispatchMapTypeID::InterfaceClassID(it.GetIndex());
7980 ProcessMap(&declTypeID, 1, pMTCur, iChainDepth, GetEntryData(), GetNumVirtuals());
7983 // The interface code:m_Decl should be found at least once in the interface map of code:m_pImpl,
7984 // otherwise someone passed wrong information
7985 _ASSERTE(dbg_fInterfaceFound);
7988 SetNextChainDepth(iChainDepth + 1);
7991 } // MethodTable::MethodDataInterfaceImpl::PopulateNextLevel
7993 //==========================================================================================
7994 UINT32 MethodTable::MethodDataInterfaceImpl::MapToImplSlotNumber(UINT32 slotNumber)
7996 LIMITED_METHOD_CONTRACT;
7998 _ASSERTE(slotNumber < GetNumMethods());
8000 MethodDataEntry *pEntry = GetEntry(slotNumber);
8001 while (!pEntry->IsImplInit() && PopulateNextLevel()) {}
8002 if (pEntry->IsImplInit()) {
8003 return pEntry->GetImplSlotNum();
8006 return INVALID_SLOT_NUMBER;
8010 //==========================================================================================
8011 DispatchSlot MethodTable::MethodDataInterfaceImpl::GetImplSlot(UINT32 slotNumber)
8013 WRAPPER_NO_CONTRACT;
8014 UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber);
8015 if (implSlotNumber == INVALID_SLOT_NUMBER) {
8016 return DispatchSlot(NULL);
8018 return m_pImpl->GetImplSlot(implSlotNumber);
8021 //==========================================================================================
8022 UINT32 MethodTable::MethodDataInterfaceImpl::GetImplSlotNumber(UINT32 slotNumber)
8024 WRAPPER_NO_CONTRACT;
8025 return MapToImplSlotNumber(slotNumber);
8028 //==========================================================================================
8029 MethodDesc *MethodTable::MethodDataInterfaceImpl::GetImplMethodDesc(UINT32 slotNumber)
8031 WRAPPER_NO_CONTRACT;
8032 UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber);
8033 if (implSlotNumber == INVALID_SLOT_NUMBER) {
8036 return m_pImpl->GetImplMethodDesc(implSlotNumber);
8039 //==========================================================================================
8040 void MethodTable::MethodDataInterfaceImpl::InvalidateCachedVirtualSlot(UINT32 slotNumber)
8042 WRAPPER_NO_CONTRACT;
8043 UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber);
8044 if (implSlotNumber == INVALID_SLOT_NUMBER) {
8047 return m_pImpl->InvalidateCachedVirtualSlot(implSlotNumber);
8050 //==========================================================================================
8051 void MethodTable::CheckInitMethodDataCache()
8057 if (s_pMethodDataCache == NULL)
8059 UINT32 cb = MethodDataCache::GetObjectSize(8);
8060 NewArrayHolder<BYTE> hb(new BYTE[cb]);
8061 MethodDataCache *pCache = new (hb.GetValue()) MethodDataCache(8);
8062 if (InterlockedCompareExchangeT(
8063 &s_pMethodDataCache, pCache, NULL) == NULL)
8065 hb.SuppressRelease();
8067 // If somebody beat us, return and allow the holders to take care of cleanup.
8075 //==========================================================================================
8076 void MethodTable::ClearMethodDataCache()
8078 LIMITED_METHOD_CONTRACT;
8079 if (s_pMethodDataCache != NULL) {
8080 s_pMethodDataCache->Clear();
8084 //==========================================================================================
8085 MethodTable::MethodData *MethodTable::FindMethodDataHelper(MethodTable *pMTDecl, MethodTable *pMTImpl)
8090 CONSISTENCY_CHECK(s_fUseMethodDataCache);
8093 return s_pMethodDataCache->Find(pMTDecl, pMTImpl);
8096 //==========================================================================================
8097 MethodTable::MethodData *MethodTable::FindParentMethodDataHelper(MethodTable *pMT)
8106 MethodData *pData = NULL;
8107 if (s_fUseMethodDataCache && s_fUseParentMethodData) {
8108 if (!pMT->IsInterface()) {
8109 //@todo : this won't be correct for non-shared code
8110 MethodTable *pMTParent = pMT->GetParentMethodTable();
8111 if (pMTParent != NULL) {
8112 pData = FindMethodDataHelper(pMTParent, pMTParent);
8119 //==========================================================================================
8120 // This method does not cache the resulting MethodData object in the global MethodDataCache.
8121 // The TypeIDs (rgDeclTypeIDs with cDeclTypeIDs items) have to be sorted.
8122 MethodTable::MethodData *
8123 MethodTable::GetMethodDataHelper(
8124 const DispatchMapTypeID * rgDeclTypeIDs,
8125 UINT32 cDeclTypeIDs,
8126 MethodTable * pMTDecl,
8127 MethodTable * pMTImpl)
8131 WRAPPER(GC_TRIGGERS);
8132 PRECONDITION(CheckPointer(pMTDecl));
8133 PRECONDITION(CheckPointer(pMTImpl));
8136 //@TODO: Must adjust this to use an alternate allocator so that we don't
8137 //@TODO: potentially cause deadlocks on the debug thread.
8138 SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE;
8140 CONSISTENCY_CHECK(pMTDecl->IsInterface() && !pMTImpl->IsInterface());
8143 // Check that rgDeclTypeIDs are sorted, are valid interface indexes and reference only pMTDecl interface
8145 InterfaceInfo_t * rgImplInterfaceMap = pMTImpl->GetInterfaceMap();
8146 UINT32 cImplInterfaceMap = pMTImpl->GetNumInterfaces();
8147 // Verify that all types referenced by code:rgDeclTypeIDs are code:pMTDecl (declared interface)
8148 for (UINT32 nDeclTypeIDIndex = 0; nDeclTypeIDIndex < cDeclTypeIDs; nDeclTypeIDIndex++)
8150 if (nDeclTypeIDIndex > 0)
8151 { // Verify that interface indexes are sorted
8152 _ASSERTE(rgDeclTypeIDs[nDeclTypeIDIndex - 1].GetInterfaceNum() < rgDeclTypeIDs[nDeclTypeIDIndex].GetInterfaceNum());
8154 UINT32 nInterfaceIndex = rgDeclTypeIDs[nDeclTypeIDIndex].GetInterfaceNum();
8155 _ASSERTE(nInterfaceIndex <= cImplInterfaceMap);
8157 OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS);
8158 _ASSERTE(rgImplInterfaceMap[nInterfaceIndex].GetApproxMethodTable(pMTImpl->GetLoaderModule())->HasSameTypeDefAs(pMTDecl));
8164 // Can't cache, since this is a custom method used in BuildMethodTable
8165 MethodDataWrapper hDecl(GetMethodData(pMTDecl, FALSE));
8166 MethodDataWrapper hImpl(GetMethodData(pMTImpl, FALSE));
8168 MethodDataInterfaceImpl * pData = new ({ pMTDecl }) MethodDataInterfaceImpl(rgDeclTypeIDs, cDeclTypeIDs, hDecl, hImpl);
8171 } // MethodTable::GetMethodDataHelper
8173 //==========================================================================================
8174 // The fCanCache argument determines if the resulting MethodData object can
8175 // be added to the global MethodDataCache. This is used when requesting a
8176 // MethodData object for a type currently being built.
8177 MethodTable::MethodData *MethodTable::GetMethodDataHelper(MethodTable *pMTDecl,
8178 MethodTable *pMTImpl,
8183 WRAPPER(GC_TRIGGERS);
8184 PRECONDITION(CheckPointer(pMTDecl));
8185 PRECONDITION(CheckPointer(pMTImpl));
8186 PRECONDITION(pMTDecl == pMTImpl ||
8187 (pMTDecl->IsInterface() && !pMTImpl->IsInterface()));
8190 //@TODO: Must adjust this to use an alternate allocator so that we don't
8191 //@TODO: potentially cause deadlocks on the debug thread.
8192 SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE;
8194 if (s_fUseMethodDataCache) {
8195 MethodData *pData = FindMethodDataHelper(pMTDecl, pMTImpl);
8196 if (pData != NULL) {
8201 // If we get here, there are no entries in the cache.
8202 MethodData *pData = NULL;
8203 if (pMTDecl == pMTImpl) {
8204 if (pMTDecl->IsInterface()) {
8205 pData = new MethodDataInterface(pMTDecl);
8208 UINT32 cb = MethodDataObject::GetObjectSize(pMTDecl);
8209 MethodDataHolder h(FindParentMethodDataHelper(pMTDecl));
8210 pData = new ({ pMTDecl }) MethodDataObject(pMTDecl, h.GetValue());
8214 pData = GetMethodDataHelper(
8221 // Insert in the cache if it is active.
8222 if (fCanCache && s_fUseMethodDataCache) {
8223 s_pMethodDataCache->Insert(pData);
8226 // Do not AddRef, already initialized to 1.
8230 //==========================================================================================
8231 // The fCanCache argument determines if the resulting MethodData object can
8232 // be added to the global MethodDataCache. This is used when requesting a
8233 // MethodData object for a type currently being built.
8234 MethodTable::MethodData *MethodTable::GetMethodData(MethodTable *pMTDecl,
8235 MethodTable *pMTImpl,
8240 WRAPPER(GC_TRIGGERS);
8243 MethodDataWrapper hData(GetMethodDataHelper(pMTDecl, pMTImpl, fCanCache));
8244 hData.SuppressRelease();
8248 //==========================================================================================
8249 // This method does not cache the resulting MethodData object in the global MethodDataCache.
8250 MethodTable::MethodData *
8251 MethodTable::GetMethodData(
8252 const DispatchMapTypeID * rgDeclTypeIDs,
8253 UINT32 cDeclTypeIDs,
8254 MethodTable * pMTDecl,
8255 MethodTable * pMTImpl)
8259 WRAPPER(GC_TRIGGERS);
8260 PRECONDITION(pMTDecl != pMTImpl);
8261 PRECONDITION(pMTDecl->IsInterface());
8262 PRECONDITION(!pMTImpl->IsInterface());
8265 MethodDataWrapper hData(GetMethodDataHelper(rgDeclTypeIDs, cDeclTypeIDs, pMTDecl, pMTImpl));
8266 hData.SuppressRelease();
8270 //==========================================================================================
8271 // The fCanCache argument determines if the resulting MethodData object can
8272 // be added to the global MethodDataCache. This is used when requesting a
8273 // MethodData object for a type currently being built.
8274 MethodTable::MethodData *MethodTable::GetMethodData(MethodTable *pMT,
8277 WRAPPER_NO_CONTRACT;
8278 return GetMethodData(pMT, pMT, fCanCache);
8281 //==========================================================================================
8282 MethodTable::MethodIterator::MethodIterator(MethodTable *pMTDecl, MethodTable *pMTImpl)
8284 WRAPPER_NO_CONTRACT;
8285 Init(pMTDecl, pMTImpl);
8288 //==========================================================================================
8289 MethodTable::MethodIterator::MethodIterator(MethodTable *pMT)
8291 WRAPPER_NO_CONTRACT;
8295 //==========================================================================================
8296 MethodTable::MethodIterator::MethodIterator(MethodData *pMethodData)
8301 PRECONDITION(CheckPointer(pMethodData));
8304 m_pMethodData = pMethodData;
8305 m_pMethodData->AddRef();
8307 m_iMethods = (INT32)m_pMethodData->GetNumMethods();
8310 //==========================================================================================
8311 MethodTable::MethodIterator::MethodIterator(const MethodIterator &it)
8313 WRAPPER_NO_CONTRACT;
8314 m_pMethodData = it.m_pMethodData;
8315 m_pMethodData->AddRef();
8317 m_iMethods = it.m_iMethods;
8320 //==========================================================================================
8321 void MethodTable::MethodIterator::Init(MethodTable *pMTDecl, MethodTable *pMTImpl)
8325 WRAPPER(GC_TRIGGERS);
8326 INJECT_FAULT(COMPlusThrowOM());
8327 PRECONDITION(CheckPointer(pMTDecl));
8328 PRECONDITION(CheckPointer(pMTImpl));
8331 LOG((LF_LOADER, LL_INFO10000, "MT::MethodIterator created for %s.\n", pMTDecl->GetDebugClassName()));
8333 m_pMethodData = MethodTable::GetMethodData(pMTDecl, pMTImpl);
8334 CONSISTENCY_CHECK(CheckPointer(m_pMethodData));
8336 m_iMethods = (INT32)m_pMethodData->GetNumMethods();
8338 #endif // !DACCESS_COMPILE
8340 //==========================================================================================
8342 void MethodTable::IntroducedMethodIterator::SetChunk(MethodDescChunk * pChunk)
8344 LIMITED_METHOD_CONTRACT;
8348 m_pMethodDesc = pChunk->GetFirstMethodDesc();
8351 m_pChunkEnd = dac_cast<TADDR>(pChunk) + pChunk->SizeOf();
8355 m_pMethodDesc = NULL;
8359 //==========================================================================================
8361 MethodDesc * MethodTable::IntroducedMethodIterator::GetFirst(MethodTable *pMT)
8363 LIMITED_METHOD_CONTRACT;
8364 MethodDescChunk * pChunk = pMT->GetClass()->GetChunks();
8365 return (pChunk != NULL) ? pChunk->GetFirstMethodDesc() : NULL;
8368 //==========================================================================================
8369 MethodDesc * MethodTable::IntroducedMethodIterator::GetNext(MethodDesc * pMD)
8371 WRAPPER_NO_CONTRACT;
8373 MethodDescChunk * pChunk = pMD->GetMethodDescChunk();
8375 // Check whether the next MethodDesc is still within the bounds of the current chunk
8376 TADDR pNext = dac_cast<TADDR>(pMD) + pMD->SizeOf();
8377 TADDR pEnd = dac_cast<TADDR>(pChunk) + pChunk->SizeOf();
8381 // Just skip to the next method in the same chunk
8382 pMD = PTR_MethodDesc(pNext);
8386 _ASSERTE(pNext == pEnd);
8388 // We have walked all the methods in the current chunk. Move on
8389 // to the next chunk.
8390 pChunk = pChunk->GetNextChunk();
8392 pMD = (pChunk != NULL) ? pChunk->GetFirstMethodDesc() : NULL;
8398 //==========================================================================================
8399 CHECK MethodTable::CheckActivated()
8401 WRAPPER_NO_CONTRACT;
8405 CHECK(GetModule()->CheckActivated());
8408 // <TODO> Check all generic type parameters as well </TODO>
8414 // Optimization intended for EnsureInstanceActive, EnsureActive only
8415 #pragma optimize("t", on)
8417 //==========================================================================================
8419 #ifndef DACCESS_COMPILE
8420 VOID MethodTable::EnsureInstanceActive()
8430 Module * pModule = GetModule();
8431 pModule->EnsureActive();
8433 MethodTable * pMT = this;
8434 while (pMT->HasModuleDependencies())
8436 pMT = pMT->GetParentMethodTable();
8437 _ASSERTE(pMT != NULL);
8439 Module * pParentModule = pMT->GetModule();
8440 if (pParentModule != pModule)
8442 pModule = pParentModule;
8443 pModule->EnsureActive();
8447 if (HasInstantiation())
8450 Instantiation inst = GetInstantiation();
8451 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
8453 TypeHandle thArg = inst[i];
8454 if (!thArg.IsTypeDesc())
8456 thArg.AsMethodTable()->EnsureInstanceActive();
8463 #endif //!DACCESS_COMPILE
8465 //==========================================================================================
8466 #ifndef DACCESS_COMPILE
8467 VOID MethodTable::EnsureActive()
8469 WRAPPER_NO_CONTRACT;
8471 GetModule()->EnsureActive();
8476 #pragma optimize("", on)
8479 //==========================================================================================
8480 CHECK MethodTable::CheckInstanceActivated()
8482 WRAPPER_NO_CONTRACT;
8487 Module * pModule = GetModule();
8488 CHECK(pModule->CheckActivated());
8490 MethodTable * pMT = this;
8491 while (pMT->HasModuleDependencies())
8493 pMT = pMT->GetParentMethodTable();
8494 _ASSERTE(pMT != NULL);
8496 Module * pParentModule = pMT->GetModule();
8497 if (pParentModule != pModule)
8499 pModule = pParentModule;
8500 CHECK(pModule->CheckActivated());
8507 #ifdef DACCESS_COMPILE
8509 //==========================================================================================
8511 MethodTable::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
8513 WRAPPER_NO_CONTRACT;
8515 DAC_CHECK_ENUM_THIS();
8516 EMEM_OUT(("MEM: %p MethodTable\n", dac_cast<TADDR>(this)));
8518 DWORD size = GetEndOffsetOfOptionalMembers();
8519 DacEnumMemoryRegion(dac_cast<TADDR>(this), size);
8521 // Make sure the GCDescs are added to the dump
8522 if (ContainsPointers())
8524 PTR_CGCDesc gcdesc = CGCDesc::GetCGCDescFromMT(this);
8525 size_t size = gcdesc->GetSize();
8526 // Manually calculate the start of the GCDescs because CGCDesc::GetStartOfGCData() isn't DAC'ified.
8527 TADDR gcdescStart = dac_cast<TADDR>(this) - size;
8528 DacEnumMemoryRegion(gcdescStart, size);
8531 if (!IsCanonicalMethodTable())
8533 PTR_MethodTable pMTCanonical = GetCanonicalMethodTable();
8535 if (pMTCanonical.IsValid())
8537 pMTCanonical->EnumMemoryRegions(flags);
8541 DacLogMessage("MT %p invalid canonical MT %p\n", dac_cast<TADDR>(this), dac_cast<TADDR>(pMTCanonical));
8546 PTR_EEClass pClass = GetClass();
8548 if (pClass.IsValid())
8552 // This is kind of a workaround, in that ArrayClass is derived from EEClass, but
8553 // it's not virtual, we only cast if the IsArray() predicate holds above.
8554 // For minidumps, DAC will choke if we don't have the full size given
8555 // by ArrayClass available. If ArrayClass becomes more complex, it
8556 // should get it's own EnumMemoryRegions().
8557 DacEnumMemoryRegion(dac_cast<TADDR>(pClass), sizeof(ArrayClass));
8559 pClass->EnumMemoryRegions(flags, this);
8563 DacLogMessage("MT %p invalid class %p\n", dac_cast<TADDR>(this), dac_cast<TADDR>(pClass));
8567 PTR_MethodTable pMTParent = GetParentMethodTable();
8569 if (pMTParent.IsValid())
8571 pMTParent->EnumMemoryRegions(flags);
8574 if (HasNonVirtualSlotsArray())
8576 DacEnumMemoryRegion(dac_cast<TADDR>(GetNonVirtualSlotsArray()), GetNonVirtualSlotsArraySize());
8579 if (HasInterfaceMap())
8581 #ifdef FEATURE_COMINTEROP
8582 if (HasDynamicInterfaceMap())
8583 DacEnumMemoryRegion(dac_cast<TADDR>(GetInterfaceMap()) - sizeof(DWORD_PTR), GetInterfaceMapSize());
8585 #endif // FEATURE_COMINTEROP
8586 DacEnumMemoryRegion(dac_cast<TADDR>(GetInterfaceMap()), GetInterfaceMapSize());
8588 EnumMemoryRegionsForExtraInterfaceInfo();
8591 if (HasPerInstInfo() != NULL)
8593 DacEnumMemoryRegion(dac_cast<TADDR>(GetPerInstInfo()) - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo));
8596 if (GetDictionary() != NULL)
8599 DWORD allocSize = GetInstAndDictSize(&slotSize);
8600 DacEnumMemoryRegion(dac_cast<TADDR>(GetDictionary()), slotSize);
8603 VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots();
8606 DacEnumMemoryRegion(dac_cast<TADDR>(it.GetIndirectionSlot()), it.GetSize());
8609 PTR_MethodTableWriteableData pWriteableData = m_pWriteableData;
8610 if (pWriteableData.IsValid())
8612 pWriteableData.EnumMem();
8615 if (flags != CLRDATA_ENUM_MEM_MINI && flags != CLRDATA_ENUM_MEM_TRIAGE && flags != CLRDATA_ENUM_MEM_HEAP2)
8617 DispatchMap * pMap = GetDispatchMap();
8620 pMap->EnumMemoryRegions(flags);
8623 } // MethodTable::EnumMemoryRegions
8625 #endif // DACCESS_COMPILE
8627 //==========================================================================================
8628 BOOL MethodTable::ContainsGenericMethodVariables()
8639 Instantiation inst = GetInstantiation();
8640 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
8642 if (inst[i].ContainsGenericVariables(TRUE))
8649 //==========================================================================================
8650 Module *MethodTable::GetDefiningModuleForOpenType()
8657 POSTCONDITION((ContainsGenericVariables() != 0) == (RETVAL != NULL));
8662 if (ContainsGenericVariables())
8664 Instantiation inst = GetInstantiation();
8665 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
8667 Module *pModule = inst[i].GetDefiningModuleForOpenType();
8668 if (pModule != NULL)
8676 //==========================================================================================
8677 PCODE MethodTable::GetRestoredSlot(DWORD slotNumber)
8687 // Keep in sync with code:MethodTable::GetRestoredSlotMT
8690 PCODE slot = GetCanonicalMethodTable()->GetSlot(slotNumber);
8691 _ASSERTE(slot != NULL);
8695 //==========================================================================================
8696 MethodTable * MethodTable::GetRestoredSlotMT(DWORD slotNumber)
8706 // Keep in sync with code:MethodTable::GetRestoredSlot
8709 return GetCanonicalMethodTable();
8712 //==========================================================================================
8715 // Methods added by EnC cannot be looked up by slot since
8716 // they have none, see EEClass::AddMethodDesc(). We must perform
8717 // a slow lookup instead of using the fast slot lookup path.
8718 MethodDesc* GetParallelMethodDescForEnC(MethodTable* pMT, MethodDesc* pDefMD)
8725 PRECONDITION(pMT != NULL);
8726 PRECONDITION(pMT->IsCanonicalMethodTable());
8727 PRECONDITION(pDefMD != NULL);
8728 PRECONDITION(pDefMD->IsEnCAddedMethod());
8729 PRECONDITION(pDefMD->GetSlot() == MethodTable::NO_SLOT);
8733 mdMethodDef tkMethod = pDefMD->GetMemberDef();
8734 Module* mod = pDefMD->GetModule();
8735 LOG((LF_ENC, LL_INFO100, "GPMDENC: pMT:%p tok:0x%08x mod:%p\n", pMT, tkMethod, mod));
8737 MethodTable::IntroducedMethodIterator it(pMT);
8738 for (; it.IsValid(); it.Next())
8740 MethodDesc* pMD = it.GetMethodDesc();
8741 if (pMD->GetMemberDef() == tkMethod
8742 && pMD->GetModule() == mod)
8747 LOG((LF_ENC, LL_INFO10000, "GPMDENC: Not found\n"));
8752 MethodDesc* MethodTable::GetParallelMethodDesc(MethodDesc* pDefMD)
8762 #ifdef EnC_SUPPORTED
8763 if (pDefMD->IsEnCAddedMethod())
8764 return GetParallelMethodDescForEnC(this, pDefMD);
8765 #endif // EnC_SUPPORTED
8767 return GetMethodDescForSlot(pDefMD->GetSlot());
8770 #ifndef DACCESS_COMPILE
8772 //==========================================================================================
8773 void MethodTable::SetSlot(UINT32 slotNumber, PCODE slotCode)
8782 if (slotNumber < GetNumVirtuals())
8785 // Verify that slots in shared vtable chunks not owned by this methodtable are only ever patched to stable entrypoint.
8786 // This invariant is required to prevent races with code:MethodDesc::SetStableEntryPointInterlocked.
8788 BOOL fSharedVtableChunk = FALSE;
8789 DWORD indirectionIndex = MethodTable::GetIndexOfVtableIndirection(slotNumber);
8791 if (!IsCanonicalMethodTable())
8793 if (GetVtableIndirections()[indirectionIndex] == GetCanonicalMethodTable()->GetVtableIndirections()[indirectionIndex])
8794 fSharedVtableChunk = TRUE;
8797 if (slotNumber < GetNumParentVirtuals())
8799 if (GetVtableIndirections()[indirectionIndex] == GetParentMethodTable()->GetVtableIndirections()[indirectionIndex])
8800 fSharedVtableChunk = TRUE;
8803 if (fSharedVtableChunk)
8805 MethodDesc* pMD = GetMethodDescForSlotAddress(slotCode);
8806 _ASSERTE(pMD->IsVersionableWithVtableSlotBackpatch() || pMD->GetStableEntryPoint() == slotCode);
8811 // IBC logging is not needed here - slots in ngen images are immutable.
8814 // Ensure on ARM that all target addresses are marked as thumb code.
8815 _ASSERTE(IsThumbCode(slotCode));
8818 *GetSlotPtrRaw(slotNumber) = slotCode;
8821 //==========================================================================================
8822 BOOL MethodTable::HasExplicitOrImplicitPublicDefaultConstructor()
8833 // valuetypes have public default ctors implicitly
8837 if (!HasDefaultConstructor())
8842 MethodDesc * pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot());
8843 return pCanonMD != NULL && pCanonMD->IsPublic();
8846 //==========================================================================================
8847 MethodDesc *MethodTable::GetDefaultConstructor(BOOL forceBoxedEntryPoint /* = FALSE */)
8849 WRAPPER_NO_CONTRACT;
8850 _ASSERTE(HasDefaultConstructor());
8851 MethodDesc *pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot());
8852 // The default constructor for a value type is an instantiating stub.
8853 // The easiest way to find the right stub is to use the following function,
8854 // which in the simple case of the default constructor for a class simply
8855 // returns pCanonMD immediately.
8856 return MethodDesc::FindOrCreateAssociatedMethodDesc(pCanonMD,
8858 forceBoxedEntryPoint,
8859 Instantiation(), /* no method instantiation */
8860 FALSE /* no allowInstParam */);
8863 //==========================================================================================
8864 // Finds the (non-unboxing) MethodDesc that implements the interface virtual static method pInterfaceMD.
8866 MethodTable::ResolveVirtualStaticMethod(
8867 MethodTable* pInterfaceType,
8868 MethodDesc* pInterfaceMD,
8869 ResolveVirtualStaticMethodFlags resolveVirtualStaticMethodFlags,
8870 BOOL* uniqueResolution,
8871 ClassLoadLevel level)
8873 bool verifyImplemented = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::VerifyImplemented) != ResolveVirtualStaticMethodFlags::None;
8874 bool allowVariantMatches = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::AllowVariantMatches) != ResolveVirtualStaticMethodFlags::None;
8875 bool instantiateMethodParameters = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::InstantiateResultOverFinalMethodDesc) != ResolveVirtualStaticMethodFlags::None;
8876 bool allowNullResult = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::AllowNullResult) != ResolveVirtualStaticMethodFlags::None;
8878 if (uniqueResolution != nullptr)
8880 *uniqueResolution = TRUE;
8882 if (!pInterfaceMD->IsSharedByGenericMethodInstantiations() && !pInterfaceType->IsSharedByGenericInstantiations())
8884 // Check that there is no implementation of the interface on this type which is the canonical interface for a shared generic. If so, that indicates that
8885 // we cannot exactly compute a target method result, as even if there is an exact match in the type hierarchy
8886 // it isn't guaranteed that we will always find the right result, as we may find a match on a base type when we should find the match
8887 // on a more derived type.
8889 MethodTable *pInterfaceTypeCanonical = pInterfaceType->GetCanonicalMethodTable();
8890 bool canonicalEquivalentFound = false;
8891 if (pInterfaceType != pInterfaceTypeCanonical)
8893 InterfaceMapIterator it = IterateInterfaceMap();
8896 if (it.CurrentInterfaceMatches(this, pInterfaceTypeCanonical))
8898 canonicalEquivalentFound = true;
8904 if (!canonicalEquivalentFound)
8906 // Search for match on a per-level in the type hierarchy
8907 for (MethodTable* pMT = this; pMT != nullptr; pMT = pMT->GetParentMethodTable())
8909 MethodDesc* pMD = pMT->TryResolveVirtualStaticMethodOnThisType(pInterfaceType, pInterfaceMD, resolveVirtualStaticMethodFlags & ~ResolveVirtualStaticMethodFlags::AllowVariantMatches, level);
8915 if (pInterfaceType->HasVariance() || pInterfaceType->HasTypeEquivalence())
8917 // Variant interface dispatch
8918 MethodTable::InterfaceMapIterator it = pMT->IterateInterfaceMap();
8921 if (it.CurrentInterfaceMatches(this, pInterfaceType))
8923 // This is the variant interface check logic, skip the exact matches as they were handled above
8927 if (!it.HasSameTypeDefAs(pInterfaceType))
8929 // Variance matches require a typedef match
8930 // Equivalence isn't sufficient, and is uninteresting as equivalent interfaces cannot have static virtuals.
8934 BOOL equivalentOrVariantCompatible;
8936 MethodTable *pItfInMap = it.GetInterface(this, CLASS_LOAD_EXACTPARENTS);
8938 if (allowVariantMatches)
8940 equivalentOrVariantCompatible = pItfInMap->CanCastToInterface(pInterfaceType, NULL);
8944 // When performing override checking to ensure that a concrete type is valid, require the implementation
8945 // actually implement the exact or equivalent interface.
8946 equivalentOrVariantCompatible = pItfInMap->IsEquivalentTo(pInterfaceType);
8949 if (equivalentOrVariantCompatible)
8951 // Variant or equivalent matching interface found
8952 // Attempt to resolve on variance matched interface
8953 pMD = pMT->TryResolveVirtualStaticMethodOnThisType(pItfInMap, pInterfaceMD, resolveVirtualStaticMethodFlags & ~ResolveVirtualStaticMethodFlags::AllowVariantMatches, level);
8963 MethodDesc *pMDDefaultImpl = nullptr;
8964 BOOL allowVariantMatchInDefaultImplementationLookup = FALSE;
8967 FindDefaultInterfaceImplementationFlags findDefaultImplementationFlags = FindDefaultInterfaceImplementationFlags::None;
8968 if (allowVariantMatchInDefaultImplementationLookup)
8970 findDefaultImplementationFlags |= FindDefaultInterfaceImplementationFlags::AllowVariance;
8972 if (uniqueResolution == nullptr)
8974 findDefaultImplementationFlags |= FindDefaultInterfaceImplementationFlags::ThrowOnConflict;
8976 if (instantiateMethodParameters)
8978 findDefaultImplementationFlags |= FindDefaultInterfaceImplementationFlags::InstantiateFoundMethodDesc;
8981 BOOL haveUniqueDefaultImplementation = FindDefaultInterfaceImplementation(
8985 findDefaultImplementationFlags,
8987 if (haveUniqueDefaultImplementation || (pMDDefaultImpl != nullptr && (verifyImplemented || uniqueResolution != nullptr)))
8989 // We tolerate conflicts upon verification of implemented SVMs so that they only blow up when actually called at execution time.
8990 if (uniqueResolution != nullptr)
8992 // Always report a unique resolution when reporting results of a variant match
8993 if (allowVariantMatchInDefaultImplementationLookup)
8994 *uniqueResolution = TRUE;
8996 *uniqueResolution = haveUniqueDefaultImplementation;
8998 return pMDDefaultImpl;
9001 // We only loop at most twice here
9002 if (allowVariantMatchInDefaultImplementationLookup)
9007 allowVariantMatchInDefaultImplementationLookup = allowVariantMatches;
9008 } while (allowVariantMatchInDefaultImplementationLookup);
9011 // Default implementation logic, which only kicks in for default implementations when looking up on an exact interface target
9012 if (!pInterfaceMD->IsAbstract() && !(this == g_pCanonMethodTableClass) && !IsSharedByGenericInstantiations())
9014 return pInterfaceMD->FindOrCreateAssociatedMethodDesc(pInterfaceMD, pInterfaceType, FALSE, pInterfaceMD->GetMethodInstantiation(), FALSE);
9018 if (allowNullResult)
9021 COMPlusThrow(kTypeLoadException, E_NOTIMPL);
9024 //==========================================================================================
9025 // Try to locate the appropriate MethodImpl matching a given interface static virtual method.
9026 // Returns nullptr on failure.
9028 MethodTable::TryResolveVirtualStaticMethodOnThisType(MethodTable* pInterfaceType, MethodDesc* pInterfaceMD, ResolveVirtualStaticMethodFlags resolveVirtualStaticMethodFlags, ClassLoadLevel level)
9030 bool instantiateMethodParameters = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::InstantiateResultOverFinalMethodDesc) != ResolveVirtualStaticMethodFlags::None;
9031 bool allowVariance = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::AllowVariantMatches) != ResolveVirtualStaticMethodFlags::None;
9032 bool verifyImplemented = (resolveVirtualStaticMethodFlags & ResolveVirtualStaticMethodFlags::VerifyImplemented) != ResolveVirtualStaticMethodFlags::None;
9035 IMDInternalImport* pMDInternalImport = GetMDImport();
9036 HENUMInternalMethodImplHolder hEnumMethodImpl(pMDInternalImport);
9037 hr = hEnumMethodImpl.EnumMethodImplInitNoThrow(GetCl());
9038 SigTypeContext sigTypeContext(this);
9042 COMPlusThrow(kTypeLoadException, hr);
9045 // This gets the count out of the metadata interface.
9046 uint32_t dwNumberMethodImpls = hEnumMethodImpl.EnumMethodImplGetCount();
9047 MethodDesc* pPrevMethodImpl = nullptr;
9049 // Iterate through each MethodImpl declared on this class
9050 for (uint32_t i = 0; i < dwNumberMethodImpls; i++)
9054 hr = hEnumMethodImpl.EnumMethodImplNext(&methodBody, &methodDecl);
9057 COMPlusThrow(kTypeLoadException, hr);
9061 // In the odd case that the enumerator fails before we've reached the total reported
9062 // entries, let's reset the count and just break out. (Should we throw?)
9066 hr = pMDInternalImport->GetParentToken(methodDecl, &tkParent);
9069 COMPlusThrow(kTypeLoadException, hr);
9071 MethodTable *pInterfaceMT = ClassLoader::LoadTypeDefOrRefOrSpecThrowing(
9075 ClassLoader::ThrowIfNotFound,
9076 ClassLoader::FailIfUninstDefOrRef,
9077 ClassLoader::LoadTypes,
9078 CLASS_LOAD_EXACTPARENTS)
9083 // Allow variant, but not equivalent interface match
9084 if (!pInterfaceType->HasSameTypeDefAs(pInterfaceMT) ||
9085 !pInterfaceMT->CanCastTo(pInterfaceType, NULL))
9092 if (pInterfaceMT != pInterfaceType)
9097 MethodDesc *pMethodDecl;
9099 if ((TypeFromToken(methodDecl) == mdtMethodDef) || pInterfaceMT->IsFullyLoaded())
9101 pMethodDecl = MemberLoader::GetMethodDescFromMemberDefOrRefOrSpec(
9105 /* strictMetadataChecks */ FALSE,
9106 /* allowInstParam */ FALSE,
9107 /* owningTypeLoadLevel */ CLASS_LOAD_EXACTPARENTS);
9109 else if (TypeFromToken(methodDecl) == mdtMemberRef)
9112 PCCOR_SIGNATURE pSig;
9115 IfFailThrow(pMDInternalImport->GetNameAndSigOfMemberRef(methodDecl, &pSig, &cSig, &szMember));
9117 // Do a quick name check to avoid excess use of FindMethod
9118 if (strcmp(szMember, pInterfaceMD->GetName()) != 0)
9123 pMethodDecl = MemberLoader::FindMethod(pInterfaceMT, szMember, pSig, cSig, GetModule());
9127 COMPlusThrow(kTypeLoadException, E_FAIL);
9130 if (pMethodDecl == nullptr)
9132 COMPlusThrow(kTypeLoadException, E_FAIL);
9134 if (!pMethodDecl->HasSameMethodDefAs(pInterfaceMD))
9139 // Spec requires that all body token for MethodImpls that refer to static virtual implementation methods must be MethodDef tokens.
9140 if (TypeFromToken(methodBody) != mdtMethodDef)
9142 COMPlusThrow(kTypeLoadException, E_FAIL);
9145 MethodDesc *pMethodImpl = MemberLoader::GetMethodDescFromMethodDef(
9149 CLASS_LOAD_EXACTPARENTS);
9150 if (pMethodImpl == nullptr)
9152 COMPlusThrow(kTypeLoadException, E_FAIL);
9155 // Spec requires that all body token for MethodImpls that refer to static virtual implementation methods must to methods
9156 // defined on the same type that defines the MethodImpl
9157 if (!HasSameTypeDefAs(pMethodImpl->GetMethodTable()))
9159 COMPlusThrow(kTypeLoadException, E_FAIL);
9162 if (!verifyImplemented && instantiateMethodParameters)
9164 pMethodImpl = pMethodImpl->FindOrCreateAssociatedMethodDesc(
9168 pInterfaceMD->GetMethodInstantiation(),
9169 /* allowInstParam */ FALSE,
9170 /* forceRemotableMethod */ FALSE,
9171 /* allowCreate */ TRUE,
9174 if (pMethodImpl != nullptr)
9176 if (!verifyImplemented)
9180 if (pPrevMethodImpl != nullptr)
9182 // Two MethodImpl records found for the same virtual static interface method
9183 COMPlusThrow(kTypeLoadException, E_FAIL);
9185 pPrevMethodImpl = pMethodImpl;
9189 return pPrevMethodImpl;
9193 MethodTable::VerifyThatAllVirtualStaticMethodsAreImplemented()
9195 InterfaceMapIterator it = IterateInterfaceMap();
9198 MethodTable *pInterfaceMT = it.GetInterfaceApprox();
9199 if (pInterfaceMT->HasVirtualStaticMethods())
9201 pInterfaceMT = it.GetInterface(this, CLASS_LOAD_EXACTPARENTS);
9202 for (MethodIterator it(pInterfaceMT); it.IsValid(); it.Next())
9204 MethodDesc *pMD = it.GetMethodDesc();
9205 // This flag is not really used, passing its address to ResolveVirtualStaticMethod just suppresses
9206 // the ambiguous resolution exception throw as we should delay the exception until we actually hit
9207 // the ambiguity at execution time.
9208 BOOL uniqueResolution;
9209 if (pMD->IsVirtual() &&
9211 !pMD->HasMethodImplSlot() && // Re-abstractions are virtual static abstract with a MethodImpl
9212 (pMD->IsAbstract() &&
9213 !ResolveVirtualStaticMethod(
9216 ResolveVirtualStaticMethodFlags::AllowNullResult | ResolveVirtualStaticMethodFlags::VerifyImplemented,
9217 /* uniqueResolution */ &uniqueResolution,
9218 /* level */ CLASS_LOAD_EXACTPARENTS)))
9220 IMDInternalImport* pInternalImport = GetModule()->GetMDImport();
9221 GetModule()->GetAssembly()->ThrowTypeLoadException(pInternalImport, GetCl(), pMD->GetName(), IDS_CLASSLOAD_STATICVIRTUAL_NOTIMPL);
9228 //==========================================================================================
9229 // Finds the (non-unboxing) MethodDesc that implements the interface method pInterfaceMD.
9231 // Note our ability to resolve constraint methods is affected by the degree of code sharing we are
9232 // performing for generic code.
9235 // MethodDesc which can be used as unvirtualized call. Returns NULL if VSD has to be used.
9237 MethodTable::TryResolveConstraintMethodApprox(
9238 TypeHandle thInterfaceType,
9239 MethodDesc * pInterfaceMD,
9240 BOOL * pfForceUseRuntimeLookup) // = NULL
9247 if (pInterfaceMD->IsStatic())
9249 _ASSERTE(!thInterfaceType.IsTypeDesc());
9250 _ASSERTE(thInterfaceType.IsInterface());
9251 BOOL uniqueResolution = TRUE;
9253 ResolveVirtualStaticMethodFlags flags = ResolveVirtualStaticMethodFlags::AllowVariantMatches
9254 | ResolveVirtualStaticMethodFlags::InstantiateResultOverFinalMethodDesc;
9255 if (pfForceUseRuntimeLookup != NULL)
9257 flags |= ResolveVirtualStaticMethodFlags::AllowNullResult;
9260 MethodDesc *result = ResolveVirtualStaticMethod(
9261 thInterfaceType.GetMethodTable(),
9264 (pfForceUseRuntimeLookup != NULL ? &uniqueResolution : NULL));
9266 if (result == NULL || !uniqueResolution)
9268 _ASSERTE(pfForceUseRuntimeLookup != NULL);
9269 *pfForceUseRuntimeLookup = TRUE;
9275 // We can't resolve constraint calls effectively for reference types, and there's
9276 // not a lot of perf. benefit in doing it anyway.
9280 LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintmethodApprox: not a value type %s\n", GetDebugClassName()));
9284 // 1. Find the (possibly generic) method that would implement the
9285 // constraint if we were making a call on a boxed value type.
9287 MethodTable * pCanonMT = GetCanonicalMethodTable();
9289 MethodDesc * pGenInterfaceMD = pInterfaceMD->StripMethodInstantiation();
9290 MethodDesc * pMD = NULL;
9291 if (pGenInterfaceMD->IsInterface())
9292 { // Sometimes (when compiling shared generic code)
9293 // we don't have enough exact type information at JIT time
9294 // even to decide whether we will be able to resolve to an unboxed entry point...
9295 // To cope with this case we always go via the helper function if there's any
9296 // chance of this happening by checking for all interfaces which might possibly
9297 // be compatible with the call (verification will have ensured that
9298 // at least one of them will be)
9300 // Enumerate all potential interface instantiations
9301 MethodTable::InterfaceMapIterator it = pCanonMT->IterateInterfaceMap();
9302 DWORD cPotentialMatchingInterfaces = 0;
9305 TypeHandle thPotentialInterfaceType(it.GetInterface(pCanonMT));
9306 if (thPotentialInterfaceType.AsMethodTable()->GetCanonicalMethodTable() ==
9307 thInterfaceType.AsMethodTable()->GetCanonicalMethodTable())
9309 cPotentialMatchingInterfaces++;
9310 pMD = pCanonMT->GetMethodDescForInterfaceMethod(thPotentialInterfaceType, pGenInterfaceMD, FALSE /* throwOnConflict */);
9312 // See code:#TryResolveConstraintMethodApprox_DoNotReturnParentMethod
9313 if ((pMD != NULL) && !pMD->GetMethodTable()->IsValueType() && !pMD->IsInterface())
9315 LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintMethodApprox: %s::%s not a value type method\n",
9316 pMD->m_pszDebugClassName, pMD->m_pszDebugMethodName));
9322 _ASSERTE_MSG((cPotentialMatchingInterfaces != 0),
9323 "At least one interface has to implement the method, otherwise there's a bug in JIT/verification.");
9325 if (cPotentialMatchingInterfaces > 1)
9326 { // We have more potentially matching interfaces
9327 MethodTable * pInterfaceMT = thInterfaceType.GetMethodTable();
9328 _ASSERTE(pInterfaceMT->HasInstantiation());
9330 BOOL fIsExactMethodResolved = FALSE;
9332 if (!pInterfaceMT->IsSharedByGenericInstantiations() &&
9333 !pInterfaceMT->IsGenericTypeDefinition() &&
9334 !this->IsSharedByGenericInstantiations() &&
9335 !this->IsGenericTypeDefinition())
9336 { // We have exact interface and type instantiations (no generic variables and __Canon used
9338 if (this->CanCastToInterface(pInterfaceMT))
9340 // We can resolve to exact method
9341 pMD = this->GetMethodDescForInterfaceMethod(pInterfaceMT, pInterfaceMD, FALSE /* throwOnConflict */);
9342 fIsExactMethodResolved = pMD != NULL;
9346 if (!fIsExactMethodResolved)
9347 { // We couldn't resolve the interface statically
9348 _ASSERTE(pfForceUseRuntimeLookup != NULL);
9349 // Notify the caller that it should use runtime lookup
9350 // Note that we can leave pMD incorrect, because we will use runtime lookup
9351 *pfForceUseRuntimeLookup = TRUE;
9356 // If we can resolve the interface exactly then do so (e.g. when doing the exact
9357 // lookup at runtime, or when not sharing generic code).
9358 if (pCanonMT->CanCastToInterface(thInterfaceType.GetMethodTable()))
9360 pMD = pCanonMT->GetMethodDescForInterfaceMethod(thInterfaceType, pGenInterfaceMD, FALSE /* throwOnConflict */);
9363 LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintMethodApprox: failed to find method desc for interface method\n"));
9368 else if (pGenInterfaceMD->IsVirtual())
9370 if (pGenInterfaceMD->HasNonVtableSlot() && pGenInterfaceMD->GetMethodTable()->IsValueType())
9371 { // GetMethodDescForSlot would AV for this slot
9372 // We can get here for (invalid and unverifiable) IL:
9373 // constrained. int32
9374 // callvirt System.Int32::GetHashCode()
9375 pMD = pGenInterfaceMD;
9379 pMD = GetMethodDescForSlot(pGenInterfaceMD->GetSlot());
9384 // The pMD will be NULL if calling a non-virtual instance
9385 // methods on System.Object, i.e. when these are used as a constraint.
9390 { // Fall back to VSD
9394 if (!pMD->GetMethodTable()->IsInterface())
9396 //#TryResolveConstraintMethodApprox_DoNotReturnParentMethod
9397 // Only return a method if the value type itself declares the method
9398 // otherwise we might get a method from Object or System.ValueType
9399 if (!pMD->GetMethodTable()->IsValueType())
9400 { // Fall back to VSD
9404 // We've resolved the method, ignoring its generic method arguments
9405 // If the method is a generic method then go and get the instantiated descriptor
9406 pMD = MethodDesc::FindOrCreateAssociatedMethodDesc(
9409 FALSE /* no BoxedEntryPointStub */,
9410 pInterfaceMD->GetMethodInstantiation(),
9411 FALSE /* no allowInstParam */);
9413 // FindOrCreateAssociatedMethodDesc won't return an BoxedEntryPointStub.
9414 _ASSERTE(pMD != NULL);
9415 _ASSERTE(!pMD->IsUnboxingStub());
9419 } // MethodTable::TryResolveConstraintMethodApprox
9421 //==========================================================================================
9422 // Make best-case effort to obtain an image name for use in an error message.
9424 // This routine must expect to be called before the this object is fully loaded.
9425 // It can return an empty if the name isn't available or the object isn't initialized
9426 // enough to get a name, but it mustn't crash.
9427 //==========================================================================================
9428 LPCWSTR MethodTable::GetPathForErrorMessages()
9434 INJECT_FAULT(COMPlusThrowOM(););
9438 Module *pModule = GetModule();
9442 return pModule->GetPathForErrorMessages();
9450 BOOL MethodTable::Validate()
9452 LIMITED_METHOD_CONTRACT;
9454 ASSERT_AND_CHECK(SanityCheck());
9457 ASSERT_AND_CHECK(m_pWriteableData != NULL);
9459 MethodTableWriteableData *pWriteableData = m_pWriteableData;
9460 DWORD dwLastVerifiedGCCnt = pWriteableData->m_dwLastVerifedGCCnt;
9461 // Here we used to assert that (dwLastVerifiedGCCnt <= GCHeapUtilities::GetGCHeap()->GetGcCount()) but
9462 // this is no longer true because with background gc. Since the purpose of having
9463 // m_dwLastVerifedGCCnt is just to only verify the same method table once for each GC
9464 // I am getting rid of the assert.
9465 if (g_pConfig->FastGCStressLevel () > 1 && dwLastVerifiedGCCnt == GCHeapUtilities::GetGCHeap()->GetGcCount())
9473 ASSERT_AND_CHECK(!"Detected use of a corrupted OBJECTREF. Possible GC hole.");
9476 else if (!IsCanonicalMethodTable())
9478 // Non-canonical method tables has to have non-empty instantiation
9479 if (GetInstantiation().IsEmpty())
9481 ASSERT_AND_CHECK(!"Detected use of a corrupted OBJECTREF. Possible GC hole.");
9486 pWriteableData->m_dwLastVerifedGCCnt = GCHeapUtilities::GetGCHeap()->GetGcCount();
9492 #endif // !DACCESS_COMPILE
9494 NOINLINE BYTE *MethodTable::GetLoaderAllocatorObjectForGC()
9496 WRAPPER_NO_CONTRACT;
9501 BYTE * retVal = *(BYTE**)GetLoaderAllocatorObjectHandle();
9505 int MethodTable::GetFieldAlignmentRequirement()
9509 return GetLayoutInfo()->m_ManagedLargestAlignmentRequirementOfAllMembers;
9511 else if (GetClass()->HasCustomFieldAlignment())
9513 return GetClass()->GetOverriddenFieldAlignmentRequirement();
9515 return min(GetNumInstanceFieldBytes(), TARGET_POINTER_SIZE);
9518 UINT32 MethodTable::GetNativeSize()
9524 PRECONDITION(CheckPointer(GetClass()));
9529 return GetClass()->GetLayoutInfo()->GetManagedSize();
9531 return GetNativeLayoutInfo()->GetSize();
9534 EEClassNativeLayoutInfo const* MethodTable::GetNativeLayoutInfo()
9540 PRECONDITION(HasLayout());
9543 EEClassNativeLayoutInfo* pNativeLayoutInfo = GetClass()->GetNativeLayoutInfo();
9544 if (pNativeLayoutInfo != nullptr)
9546 return pNativeLayoutInfo;
9548 return EnsureNativeLayoutInfoInitialized();
9551 EEClassNativeLayoutInfo const* MethodTable::EnsureNativeLayoutInfoInitialized()
9557 PRECONDITION(HasLayout());
9560 #ifndef DACCESS_COMPILE
9561 EEClassNativeLayoutInfo::InitializeNativeLayoutFieldMetadataThrowing(this);
9562 return this->GetClass()->GetNativeLayoutInfo();
9569 #ifndef DACCESS_COMPILE
9570 PTR_MethodTable MethodTable::InterfaceMapIterator::GetInterface(MethodTable* pMTOwner, ClassLoadLevel loadLevel /*= CLASS_LOADED*/)
9572 CONTRACT(PTR_MethodTable)
9576 PRECONDITION(m_i != (DWORD) -1 && m_i < m_count);
9577 POSTCONDITION(CheckPointer(RETVAL));
9581 MethodTable *pResult = m_pMap->GetMethodTable();
9582 if (pResult->IsSpecialMarkerTypeForGenericCasting() && !pMTOwner->ContainsGenericVariables())
9584 TypeHandle ownerAsInst[MaxGenericParametersForSpecialMarkerType];
9585 for (DWORD i = 0; i < MaxGenericParametersForSpecialMarkerType; i++)
9586 ownerAsInst[i] = pMTOwner;
9588 _ASSERTE(pResult->GetInstantiation().GetNumArgs() <= MaxGenericParametersForSpecialMarkerType);
9589 Instantiation inst(ownerAsInst, pResult->GetInstantiation().GetNumArgs());
9590 pResult = ClassLoader::LoadGenericInstantiationThrowing(pResult->GetModule(), pResult->GetCl(), inst, ClassLoader::LoadTypes, loadLevel).AsMethodTable();
9591 if (pResult->IsFullyLoaded())
9592 SetInterface(pResult);
9596 #endif // DACCESS_COMPILE