1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 // File: methodtable.cpp
12 // ============================================================================
16 #include "clsload.hpp"
19 #include "classcompat.h"
24 #include "siginfo.hpp"
28 #include "dllimport.h"
30 #include "jitinterface.h"
33 #include "fieldmarshaler.h"
35 #include "gcheaputilities.h"
37 #include "dbginterface.h"
38 #include "comdelegate.h"
39 #include "eventtrace.h"
40 #include "fieldmarshaler.h"
43 #include "eeprofinterfaces.h"
44 #include "dllimportcallback.h"
46 #include "methodimpl.h"
47 #include "guidfromname.h"
48 #include "stackprobe.h"
51 #include "comsynchronizable.h"
52 #include "customattribute.h"
53 #include "virtualcallstub.h"
54 #include "contractimpl.h"
57 #endif //FEATURE_PREJIT
59 #ifdef FEATURE_COMINTEROP
60 #include "comcallablewrapper.h"
61 #include "clrtocomcall.h"
62 #include "runtimecallablewrapper.h"
63 #include "winrttypenameconverter.h"
64 #endif // FEATURE_COMINTEROP
66 #ifdef FEATURE_TYPEEQUIVALENCE
67 #include "typeequivalencehash.hpp"
70 #include "listlock.inl"
72 #include "genericdict.h"
73 #include "typestring.h"
77 #ifdef FEATURE_INTERPRETER
78 #include "interpreter.h"
79 #endif // FEATURE_INTERPRETER
81 #ifndef DACCESS_COMPILE
83 // Typedef for string comparition functions.
84 typedef int (__cdecl *UTF8StringCompareFuncPtr)(const char *, const char *);
86 MethodDataCache *MethodTable::s_pMethodDataCache = NULL;
87 BOOL MethodTable::s_fUseMethodDataCache = FALSE;
88 BOOL MethodTable::s_fUseParentMethodData = FALSE;
91 extern unsigned g_dupMethods;
94 #endif // !DACCESS_COMPILE
96 #ifndef DACCESS_COMPILE
97 //==========================================================================================
100 typedef MethodTable::MethodData MethodData;
102 public: // Ctor. Allocates cEntries entries. Throws.
103 static UINT32 GetObjectSize(UINT32 cEntries);
104 MethodDataCache(UINT32 cEntries);
106 MethodData *Find(MethodTable *pMT);
107 MethodData *Find(MethodTable *pMTDecl, MethodTable *pMTImpl);
108 void Insert(MethodData *pMData);
112 // This describes each entry in the cache.
115 MethodData *m_pMData;
119 MethodData *FindHelper(MethodTable *pMTDecl, MethodTable *pMTImpl, UINT32 idx);
121 inline UINT32 GetNextTimestamp()
122 { return ++m_iCurTimestamp; }
124 inline UINT32 NumEntries()
125 { LIMITED_METHOD_CONTRACT; return m_cEntries; }
127 inline void TouchEntry(UINT32 i)
128 { WRAPPER_NO_CONTRACT; m_iLastTouched = i; GetEntry(i)->m_iTimestamp = GetNextTimestamp(); }
130 inline UINT32 GetLastTouchedEntryIndex()
131 { WRAPPER_NO_CONTRACT; return m_iLastTouched; }
133 // The end of this object contains an array of Entry
134 inline Entry *GetEntryData()
135 { LIMITED_METHOD_CONTRACT; return (Entry *)(this + 1); }
137 inline Entry *GetEntry(UINT32 i)
138 { WRAPPER_NO_CONTRACT; return GetEntryData() + i; }
141 // This serializes access to the cache
144 // This allows ageing of entries to decide which to punt when
145 // inserting a new entry.
146 UINT32 m_iCurTimestamp;
148 // The number of entries in the cache
150 UINT32 m_iLastTouched;
153 UINT32 pad; // insures that we are a multiple of 8-bytes
155 }; // class MethodDataCache
157 //==========================================================================================
158 UINT32 MethodDataCache::GetObjectSize(UINT32 cEntries)
160 LIMITED_METHOD_CONTRACT;
161 return sizeof(MethodDataCache) + (sizeof(Entry) * cEntries);
164 //==========================================================================================
165 MethodDataCache::MethodDataCache(UINT32 cEntries)
166 : m_lock(COOPERATIVE_OR_PREEMPTIVE, LOCK_TYPE_DEFAULT),
168 m_cEntries(cEntries),
172 ZeroMemory(GetEntryData(), cEntries * sizeof(Entry));
175 //==========================================================================================
176 MethodTable::MethodData *MethodDataCache::FindHelper(
177 MethodTable *pMTDecl, MethodTable *pMTImpl, UINT32 idx)
185 MethodData *pEntry = GetEntry(idx)->m_pMData;
186 if (pEntry != NULL) {
187 MethodTable *pMTDeclEntry = pEntry->GetDeclMethodTable();
188 MethodTable *pMTImplEntry = pEntry->GetImplMethodTable();
189 if (pMTDeclEntry == pMTDecl && pMTImplEntry == pMTImpl) {
192 else if (pMTDecl == pMTImpl) {
193 if (pMTDeclEntry == pMTDecl) {
194 return pEntry->GetDeclMethodData();
196 if (pMTImplEntry == pMTDecl) {
197 return pEntry->GetImplMethodData();
205 //==========================================================================================
206 MethodTable::MethodData *MethodDataCache::Find(MethodTable *pMTDecl, MethodTable *pMTImpl)
215 g_sdStats.m_cCacheLookups++;
218 SimpleReadLockHolder lh(&m_lock);
220 // Check the last touched entry.
221 MethodData *pEntry = FindHelper(pMTDecl, pMTImpl, GetLastTouchedEntryIndex());
223 // Now search the entire cache.
224 if (pEntry == NULL) {
225 for (UINT32 i = 0; i < NumEntries(); i++) {
226 pEntry = FindHelper(pMTDecl, pMTImpl, i);
227 if (pEntry != NULL) {
234 if (pEntry != NULL) {
240 // Failure to find the entry in the cache.
241 g_sdStats.m_cCacheMisses++;
248 //==========================================================================================
249 MethodTable::MethodData *MethodDataCache::Find(MethodTable *pMT)
252 return Find(pMT, pMT);
255 //==========================================================================================
256 void MethodDataCache::Insert(MethodData *pMData)
259 NOTHROW; // for now, because it does not yet resize.
264 SimpleWriteLockHolder hLock(&m_lock);
266 UINT32 iMin = UINT32_MAX;
267 UINT32 idxMin = UINT32_MAX;
268 for (UINT32 i = 0; i < NumEntries(); i++) {
269 if (GetEntry(i)->m_iTimestamp < iMin) {
271 iMin = GetEntry(i)->m_iTimestamp;
274 Entry *pEntry = GetEntry(idxMin);
275 if (pEntry->m_pMData != NULL) {
276 pEntry->m_pMData->Release();
279 pEntry->m_pMData = pMData;
280 pEntry->m_iTimestamp = GetNextTimestamp();
283 //==========================================================================================
284 void MethodDataCache::Clear()
287 NOTHROW; // for now, because it does not yet resize.
292 // Taking the lock here is just a precaution. Really, the runtime
293 // should be suspended because this is called while unloading an
294 // AppDomain at the SysSuspendEE stage. But, if someone calls it
295 // outside of that context, we should be extra cautious.
296 SimpleWriteLockHolder lh(&m_lock);
298 for (UINT32 i = 0; i < NumEntries(); i++) {
299 Entry *pEntry = GetEntry(i);
300 if (pEntry->m_pMData != NULL) {
301 pEntry->m_pMData->Release();
304 ZeroMemory(GetEntryData(), NumEntries() * sizeof(Entry));
306 } // MethodDataCache::Clear
308 #endif // !DACCESS_COMPILE
311 //==========================================================================================
313 // Initialize the offsets of multipurpose slots at compile time using template metaprogramming
317 struct CountBitsAtCompileTime
319 enum { value = (N & 1) + CountBitsAtCompileTime<(N >> 1)>::value };
323 struct CountBitsAtCompileTime<0>
328 // "mask" is mask of used slots.
330 struct MethodTable::MultipurposeSlotOffset
332 // This is raw index of the slot assigned on first come first served basis
333 enum { raw = CountBitsAtCompileTime<mask>::value };
335 // This is actual index of the slot. It is equal to raw index except for the case
336 // where the first fixed slot is not used, but the second one is. The first fixed
337 // slot has to be assigned instead of the second one in this case. This assumes that
338 // there are exactly two fixed slots.
339 enum { index = (((mask & 3) == 2) && (raw == 1)) ? 0 : raw };
342 enum { slotOffset = (index == 0) ? offsetof(MethodTable, m_pMultipurposeSlot1) :
343 (index == 1) ? offsetof(MethodTable, m_pMultipurposeSlot2) :
344 (sizeof(MethodTable) + index * sizeof(TADDR) - 2 * sizeof(TADDR)) };
346 // Size of methodtable with overflow slots. It is used to compute start offset of optional members.
347 enum { totalSize = (slotOffset >= sizeof(MethodTable)) ? slotOffset : sizeof(MethodTable) };
351 // These macros recursively expand to create 2^N values for the offset arrays
353 #define MULTIPURPOSE_SLOT_OFFSET_1(mask) MULTIPURPOSE_SLOT_OFFSET (mask) MULTIPURPOSE_SLOT_OFFSET (mask | 0x01)
354 #define MULTIPURPOSE_SLOT_OFFSET_2(mask) MULTIPURPOSE_SLOT_OFFSET_1(mask) MULTIPURPOSE_SLOT_OFFSET_1(mask | 0x02)
355 #define MULTIPURPOSE_SLOT_OFFSET_3(mask) MULTIPURPOSE_SLOT_OFFSET_2(mask) MULTIPURPOSE_SLOT_OFFSET_2(mask | 0x04)
356 #define MULTIPURPOSE_SLOT_OFFSET_4(mask) MULTIPURPOSE_SLOT_OFFSET_3(mask) MULTIPURPOSE_SLOT_OFFSET_3(mask | 0x08)
357 #define MULTIPURPOSE_SLOT_OFFSET_5(mask) MULTIPURPOSE_SLOT_OFFSET_4(mask) MULTIPURPOSE_SLOT_OFFSET_4(mask | 0x10)
359 #define MULTIPURPOSE_SLOT_OFFSET(mask) MultipurposeSlotOffset<mask>::slotOffset,
360 const BYTE MethodTable::c_DispatchMapSlotOffsets[] = {
361 MULTIPURPOSE_SLOT_OFFSET_2(0)
363 const BYTE MethodTable::c_NonVirtualSlotsOffsets[] = {
364 MULTIPURPOSE_SLOT_OFFSET_3(0)
366 const BYTE MethodTable::c_ModuleOverrideOffsets[] = {
367 MULTIPURPOSE_SLOT_OFFSET_4(0)
369 #undef MULTIPURPOSE_SLOT_OFFSET
371 #define MULTIPURPOSE_SLOT_OFFSET(mask) MultipurposeSlotOffset<mask>::totalSize,
372 const BYTE MethodTable::c_OptionalMembersStartOffsets[] = {
373 MULTIPURPOSE_SLOT_OFFSET_5(0)
375 #undef MULTIPURPOSE_SLOT_OFFSET
378 //==========================================================================================
379 // Optimization intended for MethodTable::GetModule, MethodTable::GetDispatchMap and MethodTable::GetNonVirtualSlotsPtr
381 #include <optsmallperfcritical.h>
383 PTR_Module MethodTable::GetModule()
385 LIMITED_METHOD_DAC_CONTRACT;
387 g_IBCLogger.LogMethodTableAccess(this);
389 // Fast path for non-generic non-array case
390 if ((m_dwFlags & (enum_flag_HasComponentSize | enum_flag_GenericsMask)) == 0)
391 return GetLoaderModule();
393 MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable();
394 if (!pMTForModule->HasModuleOverride())
395 return pMTForModule->GetLoaderModule();
397 TADDR pSlot = pMTForModule->GetMultipurposeSlotPtr(enum_flag_HasModuleOverride, c_ModuleOverrideOffsets);
398 return RelativeFixupPointer<PTR_Module>::GetValueAtPtr(pSlot);
401 //==========================================================================================
402 PTR_Module MethodTable::GetModule_NoLogging()
404 LIMITED_METHOD_DAC_CONTRACT;
406 // Fast path for non-generic non-array case
407 if ((m_dwFlags & (enum_flag_HasComponentSize | enum_flag_GenericsMask)) == 0)
408 return GetLoaderModule();
410 MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable();
411 if (!pMTForModule->HasModuleOverride())
412 return pMTForModule->GetLoaderModule();
414 TADDR pSlot = pMTForModule->GetMultipurposeSlotPtr(enum_flag_HasModuleOverride, c_ModuleOverrideOffsets);
415 return RelativeFixupPointer<PTR_Module>::GetValueAtPtr(pSlot);
418 //==========================================================================================
419 PTR_DispatchMap MethodTable::GetDispatchMap()
421 LIMITED_METHOD_DAC_CONTRACT;
423 MethodTable * pMT = this;
425 if (!pMT->HasDispatchMapSlot())
427 pMT = pMT->GetCanonicalMethodTable();
428 if (!pMT->HasDispatchMapSlot())
432 g_IBCLogger.LogDispatchMapAccess(pMT);
434 TADDR pSlot = pMT->GetMultipurposeSlotPtr(enum_flag_HasDispatchMapSlot, c_DispatchMapSlotOffsets);
435 return RelativePointer<PTR_DispatchMap>::GetValueAtPtr(pSlot);
438 //==========================================================================================
439 TADDR MethodTable::GetNonVirtualSlotsPtr()
441 LIMITED_METHOD_DAC_CONTRACT;
443 _ASSERTE(GetFlag(enum_flag_HasNonVirtualSlots));
444 return GetMultipurposeSlotPtr(enum_flag_HasNonVirtualSlots, c_NonVirtualSlotsOffsets);
447 #include <optdefault.h>
450 //==========================================================================================
451 PTR_Module MethodTable::GetModuleIfLoaded()
463 g_IBCLogger.LogMethodTableAccess(this);
465 MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable();
466 if (!pMTForModule->HasModuleOverride())
467 return pMTForModule->GetLoaderModule();
469 return Module::RestoreModulePointerIfLoaded(pMTForModule->GetModuleOverridePtr(), pMTForModule->GetLoaderModule());
472 #ifndef DACCESS_COMPILE
473 //==========================================================================================
474 void MethodTable::SetModule(Module * pModule)
476 LIMITED_METHOD_CONTRACT;
478 if (HasModuleOverride())
480 GetModuleOverridePtr()->SetValue(pModule);
483 _ASSERTE(GetModule() == pModule);
485 #endif // DACCESS_COMPILE
487 //==========================================================================================
488 BOOL MethodTable::ValidateWithPossibleAV()
490 CANNOT_HAVE_CONTRACT;
493 // MethodTables have the canonicalization property below.
494 // i.e. canonicalize, and canonicalize again, and check the result are
495 // the same. This is a property that holds for every single valid object in
496 // the system, but which should hold for very few other addresses.
498 // For non-generic classes, we can rely on comparing
499 // object->methodtable->class->methodtable
501 // object->methodtable
503 // However, for generic instantiation this does not work. There we must
506 // object->methodtable->class->methodtable->class
508 // object->methodtable->class
510 // Of course, that's not necessarily enough to verify that the method
511 // table and class are absolutely valid - we rely on type soundness
512 // for that. We need to do more sanity checking to
513 // make sure that our pointer here is in fact a valid object.
514 PTR_EEClass pEEClass = this->GetClassWithPossibleAV();
515 return ((this == pEEClass->GetMethodTableWithPossibleAV()) ||
516 ((HasInstantiation() || IsArray()) &&
517 (pEEClass->GetMethodTableWithPossibleAV()->GetClassWithPossibleAV() == pEEClass)));
520 #ifndef DACCESS_COMPILE
522 //==========================================================================================
523 BOOL MethodTable::IsClassInited(AppDomain* pAppDomain /* = NULL */)
527 if (IsClassPreInited())
530 if (IsSharedByGenericInstantiations())
533 DomainLocalModule *pLocalModule;
534 if (pAppDomain == NULL)
536 pLocalModule = GetDomainLocalModule();
540 pLocalModule = GetDomainLocalModule(pAppDomain);
543 _ASSERTE(pLocalModule != NULL);
545 return pLocalModule->IsClassInitialized(this);
548 //==========================================================================================
549 BOOL MethodTable::IsInitError()
553 DomainLocalModule *pLocalModule = GetDomainLocalModule();
554 _ASSERTE(pLocalModule != NULL);
556 return pLocalModule->IsClassInitError(this);
559 //==========================================================================================
560 // mark the class as having its .cctor run
561 void MethodTable::SetClassInited()
564 _ASSERTE(!IsClassPreInited() || MscorlibBinder::IsClass(this, CLASS__SHARED_STATICS));
565 GetDomainLocalModule()->SetClassInitialized(this);
568 //==========================================================================================
569 void MethodTable::SetClassInitError()
572 GetDomainLocalModule()->SetClassInitError(this);
575 //==========================================================================================
576 // mark the class as having been restored.
577 void MethodTable::SetIsRestored()
586 PRECONDITION(!IsFullyLoaded());
588 // If functions on this type have already been requested for rejit, then give the rejit
589 // manager a chance to jump-stamp the code we are implicitly restoring. This ensures the
590 // first thread entering the function will jump to the prestub and trigger the
591 // rejit. Note that the PublishMethodTableHolder may take a lock to avoid a rejit race.
592 // See code:ReJitManager::PublishMethodHolder::PublishMethodHolder#PublishCode
593 // for details on the race.
596 ReJitPublishMethodTableHolder(this);
597 FastInterlockAnd(EnsureWritablePages(&(GetWriteableDataForWrite()->m_dwFlags)), ~MethodTableWriteableData::enum_flag_Unrestored);
599 #ifndef DACCESS_COMPILE
600 if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER))
602 ETW::MethodLog::MethodTableRestored(this);
607 #ifdef FEATURE_COMINTEROP
609 //==========================================================================================
610 // mark as COM object type (System.__ComObject and types deriving from it)
611 void MethodTable::SetComObjectType()
613 LIMITED_METHOD_CONTRACT;
614 SetFlag(enum_flag_ComObject);
617 #endif // FEATURE_COMINTEROP
619 #if defined(FEATURE_TYPEEQUIVALENCE)
620 void MethodTable::SetHasTypeEquivalence()
622 LIMITED_METHOD_CONTRACT;
623 SetFlag(enum_flag_HasTypeEquivalence);
627 #ifdef FEATURE_ICASTABLE
628 void MethodTable::SetICastable()
630 LIMITED_METHOD_CONTRACT;
631 SetFlag(enum_flag_ICastable);
635 BOOL MethodTable::IsICastable()
637 LIMITED_METHOD_DAC_CONTRACT;
638 #ifdef FEATURE_ICASTABLE
639 return GetFlag(enum_flag_ICastable);
646 #endif // !DACCESS_COMPILE
648 //==========================================================================================
649 WORD MethodTable::GetNumMethods()
651 LIMITED_METHOD_DAC_CONTRACT;
652 return GetClass()->GetNumMethods();
655 //==========================================================================================
656 PTR_BaseDomain MethodTable::GetDomain()
658 LIMITED_METHOD_DAC_CONTRACT;
659 g_IBCLogger.LogMethodTableAccess(this);
660 return GetLoaderModule()->GetDomain();
663 //==========================================================================================
664 BOOL MethodTable::IsDomainNeutral()
666 STATIC_CONTRACT_NOTHROW;
667 STATIC_CONTRACT_GC_NOTRIGGER;
668 STATIC_CONTRACT_SO_TOLERANT;
669 STATIC_CONTRACT_FORBID_FAULT;
670 STATIC_CONTRACT_SUPPORTS_DAC;
672 BOOL ret = GetLoaderModule()->GetAssembly()->IsDomainNeutral();
673 #ifndef DACCESS_COMPILE
674 _ASSERTE(!ret == !GetLoaderAllocator()->IsDomainNeutral());
680 //==========================================================================================
681 BOOL MethodTable::HasSameTypeDefAs(MethodTable *pMT)
683 LIMITED_METHOD_DAC_CONTRACT;
688 // optimize for the negative case where we expect RID mismatch
689 if (GetTypeDefRid() != pMT->GetTypeDefRid())
692 if (GetCanonicalMethodTable() == pMT->GetCanonicalMethodTable())
695 return (GetModule() == pMT->GetModule());
698 //==========================================================================================
699 BOOL MethodTable::HasSameTypeDefAs_NoLogging(MethodTable *pMT)
701 LIMITED_METHOD_DAC_CONTRACT;
706 // optimize for the negative case where we expect RID mismatch
707 if (GetTypeDefRid_NoLogging() != pMT->GetTypeDefRid_NoLogging())
710 if (GetCanonicalMethodTable() == pMT->GetCanonicalMethodTable())
713 return (GetModule_NoLogging() == pMT->GetModule_NoLogging());
716 #ifndef DACCESS_COMPILE
718 //==========================================================================================
719 PTR_MethodTable InterfaceInfo_t::GetApproxMethodTable(Module * pContainingModule)
728 #ifdef FEATURE_PREJIT
729 if (m_pMethodTable.IsTagged())
731 // Ideally, we would use Module::RestoreMethodTablePointer here. Unfortunately, it is not
732 // possible because of the current type loader architecture that restores types incrementally
733 // even in the NGen case.
734 MethodTable * pItfMT = *(m_pMethodTable.GetValuePtr());
736 // Restore the method table, but do not write it back if it has instantiation. We do not want
737 // to write back the approximate instantiations.
738 Module::RestoreMethodTablePointerRaw(&pItfMT, pContainingModule, CLASS_LOAD_APPROXPARENTS);
740 if (!pItfMT->HasInstantiation())
742 // m_pMethodTable.SetValue() is not used here since we want to update the indirection cell
743 *EnsureWritablePages(m_pMethodTable.GetValuePtr()) = pItfMT;
749 MethodTable * pItfMT = m_pMethodTable.GetValue();
750 ClassLoader::EnsureLoaded(TypeHandle(pItfMT), CLASS_LOAD_APPROXPARENTS);
754 #ifndef CROSSGEN_COMPILE
755 //==========================================================================================
756 // get the method desc given the interface method desc
757 /* static */ MethodDesc *MethodTable::GetMethodDescForInterfaceMethodAndServer(
758 TypeHandle ownerType, MethodDesc *pItfMD, OBJECTREF *pServer)
760 CONTRACT(MethodDesc*)
765 PRECONDITION(CheckPointer(pItfMD));
766 PRECONDITION(pItfMD->IsInterface());
767 PRECONDITION(!ownerType.IsNull());
768 PRECONDITION(ownerType.GetMethodTable()->HasSameTypeDefAs(pItfMD->GetMethodTable()));
769 POSTCONDITION(CheckPointer(RETVAL));
772 VALIDATEOBJECTREF(*pServer);
775 MethodTable * pItfMT = ownerType.GetMethodTable();
776 PREFIX_ASSUME(pItfMT != NULL);
779 MethodTable *pServerMT = (*pServer)->GetMethodTable();
780 PREFIX_ASSUME(pServerMT != NULL);
782 if (pServerMT->IsTransparentProxy())
784 // If pServer is a TP, then the interface method desc is the one to
785 // use to dispatch the call.
789 #ifdef FEATURE_ICASTABLE
790 // In case of ICastable, instead of trying to find method implementation in the real object type
791 // we call pObj.GetValueInternal() and call GetMethodDescForInterfaceMethod() again with whatever type it returns.
792 // It allows objects that implement ICastable to mimic behavior of other types.
793 if (pServerMT->IsICastable() &&
794 !pItfMD->HasMethodInstantiation() &&
795 !TypeHandle(pServerMT).CanCastTo(ownerType)) // we need to make sure object doesn't implement this interface in a natural way
797 GCStress<cfg_any>::MaybeTrigger();
799 // Make call to ICastableHelpers.GetImplType(obj, interfaceTypeObj)
800 PREPARE_NONVIRTUAL_CALLSITE(METHOD__ICASTABLEHELPERS__GETIMPLTYPE);
802 OBJECTREF ownerManagedType = ownerType.GetManagedClassObject(); //GC triggers
804 DECLARE_ARGHOLDER_ARRAY(args, 2);
805 args[ARGNUM_0] = OBJECTREF_TO_ARGHOLDER(*pServer);
806 args[ARGNUM_1] = OBJECTREF_TO_ARGHOLDER(ownerManagedType);
808 OBJECTREF impTypeObj = NULL;
809 CALL_MANAGED_METHOD_RETREF(impTypeObj, OBJECTREF, args);
811 INDEBUG(ownerManagedType = NULL); //ownerManagedType wasn't protected during the call
812 if (impTypeObj == NULL) // GetImplType returns default(RuntimeTypeHandle)
814 COMPlusThrow(kEntryPointNotFoundException);
817 ReflectClassBaseObject* resultTypeObj = ((ReflectClassBaseObject*)OBJECTREFToObject(impTypeObj));
818 TypeHandle resulTypeHnd = resultTypeObj->GetType();
819 MethodTable *pResultMT = resulTypeHnd.GetMethodTable();
821 RETURN(pResultMT->GetMethodDescForInterfaceMethod(ownerType, pItfMD));
825 #ifdef FEATURE_COMINTEROP
826 if (pServerMT->IsComObjectType() && !pItfMD->HasMethodInstantiation())
828 // interop needs an exact MethodDesc
829 pItfMD = MethodDesc::FindOrCreateAssociatedMethodDesc(
831 ownerType.GetMethodTable(),
832 FALSE, // forceBoxedEntryPoint
833 Instantiation(), // methodInst
834 FALSE, // allowInstParam
835 TRUE); // forceRemotableMethod
837 RETURN(pServerMT->GetMethodDescForComInterfaceMethod(pItfMD, false));
839 #endif // !FEATURE_COMINTEROP
841 // Handle pure COM+ types.
842 RETURN (pServerMT->GetMethodDescForInterfaceMethod(ownerType, pItfMD));
845 #ifdef FEATURE_COMINTEROP
846 //==========================================================================================
847 // get the method desc given the interface method desc on a COM implemented server
848 // (if fNullOk is set then NULL is an allowable return value)
849 MethodDesc *MethodTable::GetMethodDescForComInterfaceMethod(MethodDesc *pItfMD, bool fNullOk)
851 CONTRACT(MethodDesc*)
856 PRECONDITION(CheckPointer(pItfMD));
857 PRECONDITION(pItfMD->IsInterface());
858 PRECONDITION(IsComObjectType());
859 POSTCONDITION(fNullOk || CheckPointer(RETVAL));
863 MethodTable * pItfMT = pItfMD->GetMethodTable();
864 PREFIX_ASSUME(pItfMT != NULL);
866 // We now handle __ComObject class that doesn't have Dynamic Interface Map
867 if (!HasDynamicInterfaceMap())
873 // Now we handle the more complex extensible RCW's. The first thing to do is check
874 // to see if the static definition of the extensible RCW specifies that the class
875 // implements the interface.
876 DWORD slot = (DWORD) -1;
878 // Calling GetTarget here instead of FindDispatchImpl gives us caching functionality to increase speed.
879 PCODE tgt = VirtualCallStubManager::GetTarget(
880 pItfMT->GetLoaderAllocator()->GetDispatchToken(pItfMT->GetTypeID(), pItfMD->GetSlot()), this);
884 RETURN(MethodTable::GetMethodDescForSlotAddress(tgt));
887 // The interface is not in the static class definition so we need to look at the
888 // dynamic interfaces.
889 else if (FindDynamicallyAddedInterface(pItfMT))
891 // This interface was added to the class dynamically so it is implemented
892 // by the COM object. We treat this dynamically added interfaces the same
893 // way we treat COM objects. That is by using the interface vtable.
902 #endif // FEATURE_COMINTEROP
904 #endif // CROSSGEN_COMPILE
906 //---------------------------------------------------------------------------------------
908 MethodTable* CreateMinimalMethodTable(Module* pContainingModule,
909 LoaderHeap* pCreationHeap,
910 AllocMemTracker* pamTracker)
917 INJECT_FAULT(COMPlusThrowOM());
921 EEClass* pClass = EEClass::CreateMinimalClass(pCreationHeap, pamTracker);
923 LOG((LF_BCL, LL_INFO100, "Level2 - Creating MethodTable {0x%p}...\n", pClass));
925 MethodTable* pMT = (MethodTable *)(void *)pamTracker->Track(pCreationHeap->AllocMem(S_SIZE_T(sizeof(MethodTable))));
927 // Note: Memory allocated on loader heap is zero filled
928 // memset(pMT, 0, sizeof(MethodTable));
930 // Allocate the private data block ("private" during runtime in the ngen'ed case).
931 BYTE* pMTWriteableData = (BYTE *)
932 pamTracker->Track(pCreationHeap->AllocMem(S_SIZE_T(sizeof(MethodTableWriteableData))));
933 pMT->SetWriteableData((PTR_MethodTableWriteableData)pMTWriteableData);
936 // Set up the EEClass
938 pClass->SetMethodTable(pMT); // in the EEClass set the pointer to this MethodTable
939 pClass->SetAttrClass(tdPublic | tdSealed);
942 // Set up the MethodTable
944 // Does not need parent. Note that MethodTable for COR_GLOBAL_PARENT_TOKEN does not have parent either,
945 // so the system has to be wired for dealing with no parent anyway.
946 pMT->SetParentMethodTable(NULL);
947 pMT->SetClass(pClass);
948 pMT->SetLoaderModule(pContainingModule);
949 pMT->SetLoaderAllocator(pContainingModule->GetLoaderAllocator());
950 pMT->SetInternalCorElementType(ELEMENT_TYPE_CLASS);
951 pMT->SetBaseSize(ObjSizeOf(Object));
954 pClass->SetDebugClassName("dynamicClass");
955 pMT->SetDebugClassName("dynamicClass");
958 LOG((LF_BCL, LL_INFO10, "Level1 - MethodTable created {0x%p}\n", pClass));
964 #ifdef FEATURE_COMINTEROP
965 #ifndef CROSSGEN_COMPILE
966 //==========================================================================================
967 OBJECTREF MethodTable::GetObjCreateDelegate()
976 _ASSERT(!IsInterface());
978 return ObjectFromHandle(GetOHDelegate());
983 //==========================================================================================
984 void MethodTable::SetObjCreateDelegate(OBJECTREF orDelegate)
990 THROWS; // From CreateHandle
995 StoreObjectInHandle(GetOHDelegate(), orDelegate);
997 SetOHDelegate (GetAppDomain()->CreateHandle(orDelegate));
999 #endif //CROSSGEN_COMPILE
1000 #endif // FEATURE_COMINTEROP
1003 //==========================================================================================
1004 void MethodTable::SetInterfaceMap(WORD wNumInterfaces, InterfaceInfo_t* iMap)
1006 LIMITED_METHOD_CONTRACT;
1007 if (wNumInterfaces == 0)
1009 _ASSERTE(!HasInterfaceMap());
1013 m_wNumInterfaces = wNumInterfaces;
1015 CONSISTENCY_CHECK(IS_ALIGNED(iMap, sizeof(void*)));
1016 m_pInterfaceMap.SetValue(iMap);
1019 //==========================================================================================
1020 // Called after GetExtraInterfaceInfoSize above to setup a new MethodTable with the additional memory to track
1021 // extra interface info. If there are a non-zero number of interfaces implemented on this class but
1022 // GetExtraInterfaceInfoSize() returned zero, this call must still be made (with a NULL argument).
1023 void MethodTable::InitializeExtraInterfaceInfo(PVOID pInfo)
1025 STANDARD_VM_CONTRACT;
1027 // Check that memory was allocated or not allocated in the right scenarios.
1028 _ASSERTE(((pInfo == NULL) && (GetExtraInterfaceInfoSize(GetNumInterfaces()) == 0)) ||
1029 ((pInfo != NULL) && (GetExtraInterfaceInfoSize(GetNumInterfaces()) != 0)));
1031 // This call is a no-op if we don't require extra interface info (in which case a buffer should never have
1033 if (!HasExtraInterfaceInfo())
1035 _ASSERTE(pInfo == NULL);
1039 // Get pointer to optional slot that holds either a small inlined bitmap of flags or the pointer to a
1041 PTR_TADDR pInfoSlot = GetExtraInterfaceInfoPtr();
1043 // In either case, data inlined or held in an external buffer, the correct thing to do is to write pInfo
1044 // to the slot. In the inlined case we wish to set all flags to their default value (zero, false) and
1045 // writing NULL does that. Otherwise we simply want to dump the buffer pointer directly into the slot (no
1046 // need for a discriminator bit, we can always infer which format we're using based on the interface
1048 *pInfoSlot = (TADDR)pInfo;
1050 // There shouldn't be any need for further initialization in the buffered case since loader heap
1051 // allocation zeroes data.
1054 for (DWORD i = 0; i < GetExtraInterfaceInfoSize(GetNumInterfaces()); i++)
1055 _ASSERTE(*((BYTE*)pInfo + i) == 0);
1059 #ifdef FEATURE_NATIVE_IMAGE_GENERATION
1061 void MethodTable::SaveExtraInterfaceInfo(DataImage *pImage)
1063 STANDARD_VM_CONTRACT;
1065 // No extra data to save if the number of interfaces is below the threshhold -- there is either no data or
1066 // it all fits into the optional members inline.
1067 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold)
1070 pImage->StoreStructure((LPVOID)*GetExtraInterfaceInfoPtr(),
1071 GetExtraInterfaceInfoSize(GetNumInterfaces()),
1072 DataImage::ITEM_INTERFACE_MAP);
1075 void MethodTable::FixupExtraInterfaceInfo(DataImage *pImage)
1077 STANDARD_VM_CONTRACT;
1079 // No pointer to extra data to fixup if the number of interfaces is below the threshhold -- there is
1080 // either no data or it all fits into the optional members inline.
1081 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold)
1084 pImage->FixupPointerField(this, (BYTE*)GetExtraInterfaceInfoPtr() - (BYTE*)this);
1086 #endif // FEATURE_NATIVE_IMAGE_GENERATION
1088 // Define a macro that generates a mask for a given bit in a TADDR correctly on either 32 or 64 bit platforms.
1090 #define SELECT_TADDR_BIT(_index) (1ULL << (_index))
1092 #define SELECT_TADDR_BIT(_index) (1U << (_index))
1095 //==========================================================================================
1096 // For the given interface in the map (specified via map index) mark the interface as declared explicitly on
1097 // this class. This is not legal for dynamically added interfaces (as used by RCWs).
1098 void MethodTable::SetInterfaceDeclaredOnClass(DWORD index)
1100 STANDARD_VM_CONTRACT;
1102 _ASSERTE(HasExtraInterfaceInfo());
1103 _ASSERTE(index < GetNumInterfaces());
1105 // Get address of optional slot for extra info.
1106 PTR_TADDR pInfoSlot = GetExtraInterfaceInfoPtr();
1108 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold)
1110 // Bitmap of flags is stored inline in the optional slot.
1111 *pInfoSlot |= SELECT_TADDR_BIT(index);
1115 // Slot points to a buffer containing a larger bitmap.
1116 TADDR *pBitmap = (PTR_TADDR)*pInfoSlot;
1118 DWORD idxTaddr = index / (sizeof(TADDR) * 8); // Select TADDR in array that covers the target bit
1119 DWORD idxInTaddr = index % (sizeof(TADDR) * 8);
1120 TADDR bitmask = SELECT_TADDR_BIT(idxInTaddr);
1122 pBitmap[idxTaddr] |= bitmask;
1123 _ASSERTE((pBitmap[idxTaddr] & bitmask) == bitmask);
1127 //==========================================================================================
1128 // For the given interface return true if the interface was declared explicitly on this class.
1129 bool MethodTable::IsInterfaceDeclaredOnClass(DWORD index)
1131 STANDARD_VM_CONTRACT;
1133 _ASSERTE(HasExtraInterfaceInfo());
1135 // Dynamic interfaces are always marked as not DeclaredOnClass (I don't know why but this is how the code
1136 // was originally authored).
1137 if (index >= GetNumInterfaces())
1139 #ifdef FEATURE_COMINTEROP
1140 _ASSERTE(HasDynamicInterfaceMap());
1141 #endif // FEATURE_COMINTEROP
1145 // Get data from the optional extra info slot.
1146 TADDR taddrInfo = *GetExtraInterfaceInfoPtr();
1148 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold)
1150 // Bitmap of flags is stored directly in the value.
1151 return (taddrInfo & SELECT_TADDR_BIT(index)) != 0;
1155 // Slot points to a buffer containing a larger bitmap.
1156 TADDR *pBitmap = (PTR_TADDR)taddrInfo;
1158 DWORD idxTaddr = index / (sizeof(TADDR) * 8); // Select TADDR in array that covers the target bit
1159 DWORD idxInTaddr = index % (sizeof(TADDR) * 8);
1160 TADDR bitmask = SELECT_TADDR_BIT(idxInTaddr);
1162 return (pBitmap[idxTaddr] & bitmask) != 0;
1166 #ifdef FEATURE_COMINTEROP
1168 //==========================================================================================
1169 PTR_InterfaceInfo MethodTable::GetDynamicallyAddedInterfaceMap()
1171 LIMITED_METHOD_DAC_CONTRACT;
1172 PRECONDITION(HasDynamicInterfaceMap());
1174 return GetInterfaceMap() + GetNumInterfaces();
1177 //==========================================================================================
1178 unsigned MethodTable::GetNumDynamicallyAddedInterfaces()
1180 LIMITED_METHOD_DAC_CONTRACT;
1181 PRECONDITION(HasDynamicInterfaceMap());
1183 PTR_InterfaceInfo pInterfaces = GetInterfaceMap();
1184 PREFIX_ASSUME(pInterfaces != NULL);
1185 return (unsigned)*(dac_cast<PTR_SIZE_T>(pInterfaces) - 1);
1188 //==========================================================================================
1189 BOOL MethodTable::FindDynamicallyAddedInterface(MethodTable *pInterface)
1191 LIMITED_METHOD_CONTRACT;
1193 _ASSERTE(IsRestored_NoLogging());
1194 _ASSERTE(HasDynamicInterfaceMap()); // This should never be called on for a type that is not an extensible RCW.
1196 unsigned cDynInterfaces = GetNumDynamicallyAddedInterfaces();
1197 InterfaceInfo_t *pDynItfMap = GetDynamicallyAddedInterfaceMap();
1199 for (unsigned i = 0; i < cDynInterfaces; i++)
1201 if (pDynItfMap[i].GetMethodTable() == pInterface)
1208 //==========================================================================================
1209 void MethodTable::AddDynamicInterface(MethodTable *pItfMT)
1216 PRECONDITION(IsRestored_NoLogging());
1217 PRECONDITION(HasDynamicInterfaceMap()); // This should never be called on for a type that is not an extensible RCW.
1221 unsigned NumDynAddedInterfaces = GetNumDynamicallyAddedInterfaces();
1222 unsigned TotalNumInterfaces = GetNumInterfaces() + NumDynAddedInterfaces;
1224 InterfaceInfo_t *pNewItfMap = NULL;
1225 S_SIZE_T AllocSize = (S_SIZE_T(S_UINT32(TotalNumInterfaces) + S_UINT32(1)) * S_SIZE_T(sizeof(InterfaceInfo_t))) + S_SIZE_T(sizeof(DWORD_PTR));
1226 if (AllocSize.IsOverflow())
1227 ThrowHR(COR_E_OVERFLOW);
1229 // Allocate the new interface table adding one for the new interface and one
1230 // more for the dummy slot before the start of the table..
1231 pNewItfMap = (InterfaceInfo_t*)(void*)GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(AllocSize);
1233 pNewItfMap = (InterfaceInfo_t*)(((BYTE *)pNewItfMap) + sizeof(DWORD_PTR));
1235 // Copy the old map into the new one.
1236 if (TotalNumInterfaces > 0) {
1237 InterfaceInfo_t *pInterfaceMap = GetInterfaceMap();
1238 PREFIX_ASSUME(pInterfaceMap != NULL);
1240 for (unsigned index = 0; index < TotalNumInterfaces; ++index)
1242 InterfaceInfo_t *pIntInfo = (InterfaceInfo_t *) (pNewItfMap + index);
1243 pIntInfo->SetMethodTable((pInterfaceMap + index)->GetMethodTable());
1247 // Add the new interface at the end of the map.
1248 pNewItfMap[TotalNumInterfaces].SetMethodTable(pItfMT);
1250 // Update the count of dynamically added interfaces.
1251 *(((DWORD_PTR *)pNewItfMap) - 1) = NumDynAddedInterfaces + 1;
1253 // Switch the old interface map with the new one.
1254 EnsureWritablePages(&m_pInterfaceMap);
1255 m_pInterfaceMap.SetValueVolatile(pNewItfMap);
1257 // Log the fact that we leaked the interface vtable map.
1259 LOG((LF_INTEROP, LL_EVERYTHING,
1260 "Extensible RCW %s being cast to interface %s caused an interface vtable map leak",
1261 GetClass()->GetDebugClassName(), pItfMT->GetClass()->m_szDebugClassName));
1263 LOG((LF_INTEROP, LL_EVERYTHING,
1264 "Extensible RCW being cast to an interface caused an interface vtable map leak"));
1266 } // MethodTable::AddDynamicInterface
1268 #endif // FEATURE_COMINTEROP
1270 void MethodTable::SetupGenericsStaticsInfo(FieldDesc* pStaticFieldDescs)
1280 // No need to generate IDs for open types. Indeed since we don't save them
1281 // in the NGEN image it would be actively incorrect to do so. However
1282 // we still leave the optional member in the MethodTable holding the value -1 for the ID.
1284 GenericsStaticsInfo *pInfo = GetGenericsStaticsInfo();
1285 if (!ContainsGenericVariables() && !IsSharedByGenericInstantiations())
1287 Module * pModuleForStatics = GetLoaderModule();
1289 pInfo->m_DynamicTypeID = pModuleForStatics->AllocateDynamicEntry(this);
1293 pInfo->m_DynamicTypeID = (SIZE_T)-1;
1296 pInfo->m_pFieldDescs.SetValueMaybeNull(pStaticFieldDescs);
1299 #endif // !DACCESS_COMPILE
1301 //==========================================================================================
1302 // Calculate how many bytes of storage will be required to track additional information for interfaces. This
1303 // will be zero if there are no interfaces, but can also be zero for small numbers of interfaces as well, and
1304 // callers should be ready to handle this.
1305 /* static */ SIZE_T MethodTable::GetExtraInterfaceInfoSize(DWORD cInterfaces)
1307 LIMITED_METHOD_DAC_CONTRACT;
1309 // For small numbers of interfaces we can record the info in the TADDR of the optional member itself (use
1310 // the TADDR as a bitmap).
1311 if (cInterfaces <= kInlinedInterfaceInfoThreshhold)
1314 // Otherwise we'll cause an array of TADDRs to be allocated (use TADDRs since the heap space allocated
1315 // will almost certainly need to be TADDR aligned anyway).
1316 return ALIGN_UP(cInterfaces, sizeof(TADDR) * 8) / 8;
1319 #ifdef DACCESS_COMPILE
1320 //==========================================================================================
1321 void MethodTable::EnumMemoryRegionsForExtraInterfaceInfo()
1325 // No extra data to enum if the number of interfaces is below the threshhold -- there is either no data or
1326 // it all fits into the optional members inline.
1327 if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold)
1330 DacEnumMemoryRegion(*GetExtraInterfaceInfoPtr(), GetExtraInterfaceInfoSize(GetNumInterfaces()));
1332 #endif // DACCESS_COMPILE
1334 //==========================================================================================
1335 Module* MethodTable::GetModuleForStatics()
1337 WRAPPER_NO_CONTRACT;
1340 g_IBCLogger.LogMethodTableAccess(this);
1342 if (HasGenericsStaticsInfo())
1344 DWORD dwDynamicClassDomainID;
1345 return GetGenericsStaticsModuleAndID(&dwDynamicClassDomainID);
1349 return GetLoaderModule();
1353 //==========================================================================================
1354 DWORD MethodTable::GetModuleDynamicEntryID()
1356 WRAPPER_NO_CONTRACT;
1359 _ASSERTE(IsDynamicStatics() && "Only memory reflection emit types and generics can have a dynamic ID");
1361 if (HasGenericsStaticsInfo())
1363 DWORD dwDynamicClassDomainID;
1364 GetGenericsStaticsModuleAndID(&dwDynamicClassDomainID);
1365 return dwDynamicClassDomainID;
1369 return GetClass()->GetModuleDynamicID();
1373 #ifndef DACCESS_COMPILE
1375 #ifdef FEATURE_TYPEEQUIVALENCE
1376 //==========================================================================================
1377 // Equivalence based on Guid and TypeIdentifier attributes to support the "no-PIA" feature.
1378 BOOL MethodTable::IsEquivalentTo_Worker(MethodTable *pOtherMT COMMA_INDEBUG(TypeHandlePairList *pVisited))
1385 SO_TOLERANT; // we are called from MethodTable::CanCastToClass
1389 _ASSERTE(HasTypeEquivalence() && pOtherMT->HasTypeEquivalence());
1393 if (TypeHandlePairList::Exists(pVisited, TypeHandle(this), TypeHandle(pOtherMT)))
1395 _ASSERTE(!"We are in the process of comparing these types already. That should never happen!");
1398 TypeHandlePairList newVisited(TypeHandle(this), TypeHandle(pOtherMT), pVisited);
1402 if (HasInstantiation() != pOtherMT->HasInstantiation())
1407 if (!pOtherMT->IsArray() || GetRank() != pOtherMT->GetRank())
1410 // arrays of structures have their own unshared MTs and will take this path
1411 return (GetApproxArrayElementTypeHandle().IsEquivalentTo(pOtherMT->GetApproxArrayElementTypeHandle() COMMA_INDEBUG(&newVisited)));
1414 BOOL bResult = FALSE;
1416 BEGIN_SO_INTOLERANT_CODE(GetThread());
1417 bResult = IsEquivalentTo_WorkerInner(pOtherMT COMMA_INDEBUG(&newVisited));
1418 END_SO_INTOLERANT_CODE;
1423 //==========================================================================================
1424 // Type equivalence - SO intolerant part.
1425 BOOL MethodTable::IsEquivalentTo_WorkerInner(MethodTable *pOtherMT COMMA_INDEBUG(TypeHandlePairList *pVisited))
1433 LOADS_TYPE(CLASS_DEPENDENCIES_LOADED);
1437 AppDomain *pDomain = GetAppDomain();
1438 if (pDomain != NULL)
1440 TypeEquivalenceHashTable::EquivalenceMatch match = pDomain->GetTypeEquivalenceCache()->CheckEquivalence(TypeHandle(this), TypeHandle(pOtherMT));
1443 case TypeEquivalenceHashTable::Match:
1445 case TypeEquivalenceHashTable::NoMatch:
1447 case TypeEquivalenceHashTable::MatchUnknown:
1455 BOOL fEquivalent = FALSE;
1457 if (HasInstantiation())
1459 // we limit variance on generics only to interfaces
1460 if (!IsInterface() || !pOtherMT->IsInterface())
1462 fEquivalent = FALSE;
1463 goto EquivalenceCalculated;
1466 // check whether the instantiations are equivalent
1467 Instantiation inst1 = GetInstantiation();
1468 Instantiation inst2 = pOtherMT->GetInstantiation();
1470 if (inst1.GetNumArgs() != inst2.GetNumArgs())
1472 fEquivalent = FALSE;
1473 goto EquivalenceCalculated;
1476 for (DWORD i = 0; i < inst1.GetNumArgs(); i++)
1478 if (!inst1[i].IsEquivalentTo(inst2[i] COMMA_INDEBUG(pVisited)))
1480 fEquivalent = FALSE;
1481 goto EquivalenceCalculated;
1485 if (GetTypeDefRid() == pOtherMT->GetTypeDefRid() && GetModule() == pOtherMT->GetModule())
1487 // it's OK to declare the MTs equivalent at this point; the cases we care
1488 // about are IList<IFoo> and IList<IBar> where IFoo and IBar are equivalent
1493 fEquivalent = FALSE;
1495 goto EquivalenceCalculated;
1500 if (!pOtherMT->IsArray() || GetRank() != pOtherMT->GetRank())
1502 fEquivalent = FALSE;
1503 goto EquivalenceCalculated;
1506 // arrays of structures have their own unshared MTs and will take this path
1507 fEquivalent = (GetApproxArrayElementTypeHandle().IsEquivalentTo(pOtherMT->GetApproxArrayElementTypeHandle() COMMA_INDEBUG(pVisited)));
1508 goto EquivalenceCalculated;
1511 fEquivalent = CompareTypeDefsForEquivalence(GetCl(), pOtherMT->GetCl(), GetModule(), pOtherMT->GetModule(), NULL);
1513 EquivalenceCalculated:
1514 // Only record equivalence matches if we are in an AppDomain
1515 if (pDomain != NULL)
1517 // Collectible type results will not get cached.
1518 if ((!this->Collectible() && !pOtherMT->Collectible()))
1520 TypeEquivalenceHashTable::EquivalenceMatch match;
1521 match = fEquivalent ? TypeEquivalenceHashTable::Match : TypeEquivalenceHashTable::NoMatch;
1522 pDomain->GetTypeEquivalenceCache()->RecordEquivalence(TypeHandle(this), TypeHandle(pOtherMT), match);
1528 #endif // FEATURE_TYPEEQUIVALENCE
1530 //==========================================================================================
1531 BOOL MethodTable::CanCastToInterface(MethodTable *pTargetMT, TypeHandlePairList *pVisited)
1539 PRECONDITION(CheckPointer(pTargetMT));
1540 PRECONDITION(pTargetMT->IsInterface());
1541 PRECONDITION(!IsTransparentProxy());
1542 PRECONDITION(IsRestored_NoLogging());
1546 if (!pTargetMT->HasVariance())
1548 if (HasTypeEquivalence() || pTargetMT->HasTypeEquivalence())
1550 if (IsInterface() && IsEquivalentTo(pTargetMT))
1553 return ImplementsEquivalentInterface(pTargetMT);
1556 return CanCastToNonVariantInterface(pTargetMT);
1560 if (CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited))
1563 InterfaceMapIterator it = IterateInterfaceMap();
1566 if (it.GetInterface()->CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited))
1573 //==========================================================================================
1574 BOOL MethodTable::CanCastByVarianceToInterfaceOrDelegate(MethodTable *pTargetMT, TypeHandlePairList *pVisited)
1582 PRECONDITION(CheckPointer(pTargetMT));
1583 PRECONDITION(pTargetMT->HasVariance());
1584 PRECONDITION(pTargetMT->IsInterface() || pTargetMT->IsDelegate());
1585 PRECONDITION(IsRestored_NoLogging());
1589 BOOL returnValue = FALSE;
1591 EEClass *pClass = NULL;
1593 TypeHandlePairList pairList(this, pTargetMT, pVisited);
1595 if (TypeHandlePairList::Exists(pVisited, this, pTargetMT))
1598 if (GetTypeDefRid() != pTargetMT->GetTypeDefRid() || GetModule() != pTargetMT->GetModule())
1604 pClass = pTargetMT->GetClass();
1605 Instantiation inst = GetInstantiation();
1606 Instantiation targetInst = pTargetMT->GetInstantiation();
1608 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
1610 TypeHandle thArg = inst[i];
1611 TypeHandle thTargetArg = targetInst[i];
1613 // If argument types are not equivalent, test them for compatibility
1614 // in accordance with the the variance annotation
1615 if (!thArg.IsEquivalentTo(thTargetArg))
1617 switch (pClass->GetVarianceOfTypeParameter(i))
1620 if (!thArg.IsBoxedAndCanCastTo(thTargetArg, &pairList))
1624 case gpContravariant :
1625 if (!thTargetArg.IsBoxedAndCanCastTo(thArg, &pairList))
1633 _ASSERTE(!"Illegal variance annotation");
1647 //==========================================================================================
1648 BOOL MethodTable::CanCastToClass(MethodTable *pTargetMT, TypeHandlePairList *pVisited)
1656 PRECONDITION(CheckPointer(pTargetMT));
1657 PRECONDITION(!pTargetMT->IsArray());
1658 PRECONDITION(!pTargetMT->IsInterface());
1662 MethodTable *pMT = this;
1664 // If the target type has variant type parameters, we take a slower path
1665 if (pTargetMT->HasVariance())
1667 // At present, we support variance only on delegates and interfaces
1668 CONSISTENCY_CHECK(pTargetMT->IsDelegate());
1670 // First chase inheritance hierarchy until we hit a class that only differs in its instantiation
1672 // Cheap check for equivalence
1673 if (pMT->IsEquivalentTo(pTargetMT))
1676 g_IBCLogger.LogMethodTableAccess(pMT);
1678 if (pMT->CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited))
1681 pMT = pMT->GetParentMethodTable();
1685 // If there are no variant type parameters, just chase the hierarchy
1689 if (pMT->IsEquivalentTo(pTargetMT))
1692 g_IBCLogger.LogMethodTableAccess(pMT);
1694 pMT = pMT->GetParentMethodTable();
1701 #include <optsmallperfcritical.h>
1702 //==========================================================================================
1703 BOOL MethodTable::CanCastToNonVariantInterface(MethodTable *pTargetMT)
1712 PRECONDITION(CheckPointer(pTargetMT));
1713 PRECONDITION(pTargetMT->IsInterface());
1714 PRECONDITION(!pTargetMT->HasVariance());
1715 PRECONDITION(!IsTransparentProxy());
1716 PRECONDITION(IsRestored_NoLogging());
1720 // Check to see if the current class is for the interface passed in.
1721 if (this == pTargetMT)
1724 // Check to see if the static class definition indicates we implement the interface.
1725 return ImplementsInterfaceInline(pTargetMT);
1728 //==========================================================================================
1729 TypeHandle::CastResult MethodTable::CanCastToInterfaceNoGC(MethodTable *pTargetMT)
1738 PRECONDITION(CheckPointer(pTargetMT));
1739 PRECONDITION(pTargetMT->IsInterface());
1740 PRECONDITION(!IsTransparentProxy());
1741 PRECONDITION(IsRestored_NoLogging());
1745 if (!pTargetMT->HasVariance() && !IsArray() && !HasTypeEquivalence() && !pTargetMT->HasTypeEquivalence())
1747 return CanCastToNonVariantInterface(pTargetMT) ? TypeHandle::CanCast : TypeHandle::CannotCast;
1751 // We're conservative on variant interfaces and types with equivalence
1752 return TypeHandle::MaybeCast;
1756 //==========================================================================================
1757 TypeHandle::CastResult MethodTable::CanCastToClassNoGC(MethodTable *pTargetMT)
1766 PRECONDITION(CheckPointer(pTargetMT));
1767 PRECONDITION(!pTargetMT->IsArray());
1768 PRECONDITION(!pTargetMT->IsInterface());
1772 // We're conservative on variant classes
1773 if (pTargetMT->HasVariance() || g_IBCLogger.InstrEnabled())
1775 return TypeHandle::MaybeCast;
1778 // Type equivalence needs the slow path
1779 if (HasTypeEquivalence() || pTargetMT->HasTypeEquivalence())
1781 return TypeHandle::MaybeCast;
1784 // If there are no variant type parameters, just chase the hierarchy
1787 PTR_VOID pMT = this;
1790 if (pMT == pTargetMT)
1791 return TypeHandle::CanCast;
1793 pMT = MethodTable::GetParentMethodTable(pMT);
1797 return TypeHandle::CannotCast;
1799 #include <optdefault.h>
1802 MethodTable::IsExternallyVisible()
1813 BOOL bIsVisible = IsTypeDefExternallyVisible(GetCl(), GetModule(), GetClass()->GetAttrClass());
1815 if (bIsVisible && HasInstantiation() && !IsGenericTypeDefinition())
1817 for (COUNT_T i = 0; i < GetNumGenericArgs(); i++)
1819 if (!GetInstantiation()[i].IsExternallyVisible())
1825 } // MethodTable::IsExternallyVisible
1827 #ifdef FEATURE_PREJIT
1829 BOOL MethodTable::CanShareVtableChunksFrom(MethodTable *pTargetMT, Module *pCurrentLoaderModule, Module *pCurrentPreferredZapModule)
1831 WRAPPER_NO_CONTRACT;
1833 // These constraints come from two places:
1834 // 1. A non-zapped MT cannot share with a zapped MT since it may result in SetSlot() on a read-only slot
1835 // 2. Zapping this MT in MethodTable::Save cannot "unshare" something we decide to share now
1837 // We could fix both of these and allow non-zapped MTs to share chunks fully by doing the following
1838 // 1. Fix the few dangerous callers of SetSlot to first check whether the chunk itself is zapped
1839 // (see MethodTableBuilder::CopyExactParentSlots, or we could use ExecutionManager::FindZapModule)
1840 // 2. Have this function return FALSE if IsCompilationProcess and rely on MethodTable::Save to do all sharing for the NGen case
1842 return !pTargetMT->IsZapped() &&
1843 pTargetMT->GetLoaderModule() == pCurrentLoaderModule &&
1844 pCurrentLoaderModule == pCurrentPreferredZapModule &&
1845 pCurrentPreferredZapModule == Module::GetPreferredZapModuleForMethodTable(pTargetMT);
1850 BOOL MethodTable::CanShareVtableChunksFrom(MethodTable *pTargetMT, Module *pCurrentLoaderModule)
1852 WRAPPER_NO_CONTRACT;
1854 return pTargetMT->GetLoaderModule() == pCurrentLoaderModule;
1862 MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug)
1864 //diag functions shouldn't affect normal behavior
1873 const size_t cchBuff = MAX_CLASSNAME_LENGTH + 30;
1874 LPWSTR buff = fDebug ? (LPWSTR) qb.AllocNoThrow(cchBuff * sizeof(WCHAR)) : NULL;
1876 if ((buff == NULL) && fDebug)
1878 WszOutputDebugString(W("OOM when dumping VTable - falling back to logging"));
1884 swprintf_s(buff, cchBuff, W("Vtable (with interface dupes) for '%S':\n"), szClassName);
1886 swprintf_s(&buff[wcslen(buff)], cchBuff - wcslen(buff) , W(" Total duplicate slots = %d\n"), g_dupMethods);
1888 WszOutputDebugString(buff);
1892 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1893 LOG((LF_ALWAYS, LL_ALWAYS, "Vtable (with interface dupes) for '%s':\n", szClassName));
1894 LOG((LF_ALWAYS, LL_ALWAYS, " Total duplicate slots = %d\n", g_dupMethods));
1900 MethodIterator it(this);
1901 for (; it.IsValid(); it.Next())
1903 MethodDesc *pMD = it.GetMethodDesc();
1904 LPCUTF8 pszName = pMD->GetName((USHORT) it.GetSlotNumber());
1905 DWORD dwAttrs = pMD->GetAttrs();
1909 DefineFullyQualifiedNameForClass();
1910 LPCUTF8 name = GetFullyQualifiedNameForClass(pMD->GetMethodTable());
1911 swprintf_s(buff, cchBuff,
1912 W(" slot %2d: %S::%S%S 0x%p (slot = %2d)\n"),
1916 IsMdFinal(dwAttrs) ? " (final)" : "",
1917 pMD->GetMethodEntryPoint(),
1920 WszOutputDebugString(buff);
1924 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1925 LOG((LF_ALWAYS, LL_ALWAYS,
1926 " slot %2d: %s::%s%s 0x%p (slot = %2d)\n",
1928 pMD->GetClass()->GetDebugClassName(),
1930 IsMdFinal(dwAttrs) ? " (final)" : "",
1931 pMD->GetMethodEntryPoint(),
1935 if (it.GetSlotNumber() == (DWORD)(GetNumMethods()-1))
1939 WszOutputDebugString(W(" <-- vtable ends here\n"));
1943 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1944 LOG((LF_ALWAYS, LL_ALWAYS, " <-- vtable ends here\n"));
1949 EX_CATCH_HRESULT(hr);
1953 WszOutputDebugString(W("\n"));
1957 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1958 LOG((LF_ALWAYS, LL_ALWAYS, "\n"));
1960 } // MethodTable::DebugDumpVtable
1963 MethodTable::Debug_DumpInterfaceMap(
1964 LPCSTR szInterfaceMapPrefix)
1966 // Diagnostic functions shouldn't affect normal behavior
1974 if (GetNumInterfaces() == 0)
1975 { // There are no interfaces, no point in printing interface map info
1979 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1980 LOG((LF_ALWAYS, LL_ALWAYS,
1981 "%s Interface Map for '%s':\n",
1982 szInterfaceMapPrefix,
1983 GetDebugClassName()));
1984 LOG((LF_ALWAYS, LL_ALWAYS,
1985 " Number of interfaces = %d\n",
1986 GetNumInterfaces()));
1991 InterfaceMapIterator it(this, false);
1994 MethodTable *pInterfaceMT = it.GetInterface();
1996 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
1997 LOG((LF_ALWAYS, LL_ALWAYS,
1998 " index %2d: %s 0x%p\n",
2000 pInterfaceMT->GetDebugClassName(),
2003 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
2004 LOG((LF_ALWAYS, LL_ALWAYS, " <-- interface map ends here\n"));
2006 EX_CATCH_HRESULT(hr);
2008 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
2009 LOG((LF_ALWAYS, LL_ALWAYS, "\n"));
2010 } // MethodTable::Debug_DumpInterfaceMap
2013 MethodTable::Debug_DumpDispatchMap()
2015 WRAPPER_NO_CONTRACT; // It's a dev helper, we don't care about contracts
2017 if (!HasDispatchMap())
2018 { // There is no dipstch map for this type, no point in printing the info
2022 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
2023 LOG((LF_ALWAYS, LL_ALWAYS, "Dispatch Map for '%s':\n", GetDebugClassName()));
2025 InterfaceInfo_t * pInterfaceMap = GetInterfaceMap();
2026 DispatchMap::EncodedMapIterator it(this);
2028 while (it.IsValid())
2030 DispatchMapEntry *pEntry = it.Entry();
2032 UINT32 nInterfaceIndex = pEntry->GetTypeID().GetInterfaceNum();
2033 _ASSERTE(nInterfaceIndex < GetNumInterfaces());
2035 MethodTable * pInterface = pInterfaceMap[nInterfaceIndex].GetMethodTable();
2036 UINT32 nInterfaceSlotNumber = pEntry->GetSlotNumber();
2037 UINT32 nImplementationSlotNumber = pEntry->GetTargetSlotNumber();
2038 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
2039 LOG((LF_ALWAYS, LL_ALWAYS,
2040 " Interface %d (%s) slot %d (%s) implemented in slot %d (%s)\n",
2042 pInterface->GetDebugClassName(),
2043 nInterfaceSlotNumber,
2044 pInterface->GetMethodDescForSlot(nInterfaceSlotNumber)->GetName(),
2045 nImplementationSlotNumber,
2046 GetMethodDescForSlot(nImplementationSlotNumber)->GetName()));
2050 //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad
2051 LOG((LF_ALWAYS, LL_ALWAYS, " <-- Dispatch map ends here\n"));
2052 } // MethodTable::Debug_DumpDispatchMap
2056 //==========================================================================================
2057 NOINLINE BOOL MethodTable::ImplementsInterface(MethodTable *pInterface)
2059 WRAPPER_NO_CONTRACT;
2060 return ImplementsInterfaceInline(pInterface);
2063 //==========================================================================================
2064 BOOL MethodTable::ImplementsEquivalentInterface(MethodTable *pInterface)
2071 PRECONDITION(pInterface->IsInterface()); // class we are looking up should be an interface
2075 // look for exact match first (optimize for success)
2076 if (ImplementsInterfaceInline(pInterface))
2079 if (!pInterface->HasTypeEquivalence())
2082 DWORD numInterfaces = GetNumInterfaces();
2083 if (numInterfaces == 0)
2086 InterfaceInfo_t *pInfo = GetInterfaceMap();
2090 if (pInfo->GetMethodTable()->IsEquivalentTo(pInterface))
2095 while (--numInterfaces);
2100 //==========================================================================================
2101 MethodDesc *MethodTable::GetMethodDescForInterfaceMethod(MethodDesc *pInterfaceMD)
2107 PRECONDITION(!pInterfaceMD->HasClassOrMethodInstantiation());
2110 WRAPPER_NO_CONTRACT;
2112 return GetMethodDescForInterfaceMethod(TypeHandle(pInterfaceMD->GetMethodTable()), pInterfaceMD);
2115 //==========================================================================================
2116 MethodDesc *MethodTable::GetMethodDescForInterfaceMethod(TypeHandle ownerType, MethodDesc *pInterfaceMD)
2122 PRECONDITION(!ownerType.IsNull());
2123 PRECONDITION(ownerType.GetMethodTable()->IsInterface());
2124 PRECONDITION(ownerType.GetMethodTable()->HasSameTypeDefAs(pInterfaceMD->GetMethodTable()));
2125 PRECONDITION(IsArray() || ImplementsEquivalentInterface(ownerType.GetMethodTable()) || ownerType.GetMethodTable()->HasVariance());
2129 MethodDesc *pMD = NULL;
2131 MethodTable *pInterfaceMT = ownerType.AsMethodTable();
2133 #ifdef CROSSGEN_COMPILE
2134 DispatchSlot implSlot(FindDispatchSlot(pInterfaceMT->GetTypeID(), pInterfaceMD->GetSlot()));
2135 PCODE pTgt = implSlot.GetTarget();
2137 PCODE pTgt = VirtualCallStubManager::GetTarget(
2138 pInterfaceMT->GetLoaderAllocator()->GetDispatchToken(pInterfaceMT->GetTypeID(), pInterfaceMD->GetSlot()),
2141 pMD = MethodTable::GetMethodDescForSlotAddress(pTgt);
2144 MethodDesc *pDispSlotMD = FindDispatchSlotForInterfaceMD(ownerType, pInterfaceMD).GetMethodDesc();
2145 _ASSERTE(pDispSlotMD == pMD);
2148 pMD->CheckRestore();
2152 #endif // DACCESS_COMPILE
2154 //==========================================================================================
2155 PTR_FieldDesc MethodTable::GetFieldDescByIndex(DWORD fieldIndex)
2157 LIMITED_METHOD_CONTRACT;
2159 if (HasGenericsStaticsInfo() &&
2160 fieldIndex >= GetNumIntroducedInstanceFields())
2162 return GetGenericsStaticFieldDescs() + (fieldIndex - GetNumIntroducedInstanceFields());
2166 return GetClass()->GetFieldDescList() + fieldIndex;
2170 //==========================================================================================
2171 DWORD MethodTable::GetIndexForFieldDesc(FieldDesc *pField)
2173 LIMITED_METHOD_CONTRACT;
2175 if (pField->IsStatic() && HasGenericsStaticsInfo())
2177 FieldDesc *pStaticFields = GetGenericsStaticFieldDescs();
2179 return GetNumIntroducedInstanceFields() + DWORD(pField - pStaticFields);
2184 FieldDesc *pFields = GetClass()->GetFieldDescList();
2186 return DWORD(pField - pFields);
2190 //==========================================================================================
2192 #pragma optimize("t", on)
2194 // compute whether the type can be considered to have had its
2195 // static initialization run without doing anything at all, i.e. whether we know
2196 // immediately that the type requires nothing to do for initialization
2198 // If a type used as a representiative during JITting is PreInit then
2199 // any types that it may represent within a code-sharing
2200 // group are also PreInit. For example, if List<object> is PreInit then List<string>
2201 // and List<MyType> are also PreInit. This is because the dynamicStatics, staticRefHandles
2202 // and hasCCtor are all identical given a head type, and weakening the domainNeutrality
2203 // to DomainSpecific only makes more types PreInit.
2204 BOOL MethodTable::IsClassPreInited()
2206 LIMITED_METHOD_CONTRACT;
2208 if (ContainsGenericVariables())
2211 if (HasClassConstructor())
2214 if (HasBoxedRegularStatics())
2217 if (IsDynamicStatics())
2223 #pragma optimize("", on)
2226 //========================================================================================
2228 #if defined(FEATURE_UNIX_AMD64_STRUCT_PASSING_ITF)
2230 #if defined(_DEBUG) && defined(LOGGING)
2232 const char* GetSystemVClassificationTypeName(SystemVClassificationType t)
2236 case SystemVClassificationTypeUnknown: return "Unknown";
2237 case SystemVClassificationTypeStruct: return "Struct";
2238 case SystemVClassificationTypeNoClass: return "NoClass";
2239 case SystemVClassificationTypeMemory: return "Memory";
2240 case SystemVClassificationTypeInteger: return "Integer";
2241 case SystemVClassificationTypeIntegerReference: return "IntegerReference";
2242 case SystemVClassificationTypeIntegerByRef: return "IntegerByReference";
2243 case SystemVClassificationTypeSSE: return "SSE";
2244 case SystemVClassificationTypeTypedReference: return "TypedReference";
2245 default: return "ERROR";
2248 #endif // _DEBUG && LOGGING
2250 // Returns 'true' if the struct is passed in registers, 'false' otherwise.
2251 bool MethodTable::ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool useNativeLayout)
2253 if (useNativeLayout)
2255 return ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout);
2259 return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout);
2263 // If we have a field classification already, but there is a union, we must merge the classification type of the field. Returns the
2264 // new, merged classification type.
2266 static SystemVClassificationType ReClassifyField(SystemVClassificationType originalClassification, SystemVClassificationType newFieldClassification)
2268 _ASSERTE((newFieldClassification == SystemVClassificationTypeInteger) ||
2269 (newFieldClassification == SystemVClassificationTypeIntegerReference) ||
2270 (newFieldClassification == SystemVClassificationTypeIntegerByRef) ||
2271 (newFieldClassification == SystemVClassificationTypeSSE));
2273 switch (newFieldClassification)
2275 case SystemVClassificationTypeInteger:
2276 // Integer overrides everything; the resulting classification is Integer. Can't merge Integer and IntegerReference.
2277 _ASSERTE((originalClassification == SystemVClassificationTypeInteger) ||
2278 (originalClassification == SystemVClassificationTypeSSE));
2280 return SystemVClassificationTypeInteger;
2282 case SystemVClassificationTypeSSE:
2283 // If the old and new classifications are both SSE, then the merge is SSE, otherwise it will be integer. Can't merge SSE and IntegerReference.
2284 _ASSERTE((originalClassification == SystemVClassificationTypeInteger) ||
2285 (originalClassification == SystemVClassificationTypeSSE));
2287 if (originalClassification == SystemVClassificationTypeSSE)
2289 return SystemVClassificationTypeSSE;
2293 return SystemVClassificationTypeInteger;
2296 case SystemVClassificationTypeIntegerReference:
2297 // IntegerReference can only merge with IntegerReference.
2298 _ASSERTE(originalClassification == SystemVClassificationTypeIntegerReference);
2299 return SystemVClassificationTypeIntegerReference;
2301 case SystemVClassificationTypeIntegerByRef:
2302 // IntegerByReference can only merge with IntegerByReference.
2303 _ASSERTE(originalClassification == SystemVClassificationTypeIntegerByRef);
2304 return SystemVClassificationTypeIntegerByRef;
2307 _ASSERTE(false); // Unexpected type.
2308 return SystemVClassificationTypeUnknown;
2312 // Returns 'true' if the struct is passed in registers, 'false' otherwise.
2313 bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassingHelperPtr helperPtr,
2314 unsigned int nestingLevel,
2315 unsigned int startOffsetOfStruct,
2316 bool useNativeLayout)
2327 WORD numIntroducedFields = GetNumIntroducedInstanceFields();
2329 // It appears the VM gives a struct with no fields of size 1.
2330 // Don't pass in register such structure.
2331 if (numIntroducedFields == 0)
2336 // No struct register passing with explicit layout. There may be cases where explicit layout may be still
2337 // eligible for register struct passing, but it is hard to tell the real intent. Make it simple and just
2338 // unconditionally disable register struct passing for explicit layout.
2339 if (GetClass()->HasExplicitFieldOffsetLayout())
2341 LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithManagedLayout: struct %s has explicit layout; will not be enregistered\n",
2342 nestingLevel * 5, "", this->GetDebugClassName()));
2346 LOG((LF_JIT, LL_EVERYTHING, "%*s**** Classify %s (%p), startOffset %d, total struct size %d\n",
2347 nestingLevel * 5, "", this->GetDebugClassName(), this, startOffsetOfStruct, helperPtr->structSize));
2351 FieldDesc *pField = GetApproxFieldDescListRaw();
2352 FieldDesc *pFieldEnd = pField + numIntroducedFields;
2354 // System types are loaded before others, so ByReference<T> would be loaded before Span<T> or any other type that has a
2355 // ByReference<T> field. ByReference<T> is the first by-ref-like system type to be loaded (see
2356 // SystemDomain::LoadBaseSystemClasses), so if the current method table is marked as by-ref-like and g_pByReferenceClass is
2357 // null, it must be the initial load of ByReference<T>.
2358 bool isThisByReferenceOfT = IsByRefLike() && (g_pByReferenceClass == nullptr || HasSameTypeDefAs(g_pByReferenceClass));
2360 for (; pField < pFieldEnd; pField++)
2366 DWORD fieldOffset = pField->GetOffset();
2367 unsigned normalizedFieldOffset = fieldOffset + startOffsetOfStruct;
2369 unsigned int fieldSize = pField->GetSize();
2370 _ASSERTE(fieldSize != (unsigned int)-1);
2372 // The field can't span past the end of the struct.
2373 if ((normalizedFieldOffset + fieldSize) > helperPtr->structSize)
2375 _ASSERTE(false && "Invalid struct size. The size of fields and overall size don't agree");
2379 CorElementType fieldType = pField->GetFieldType();
2381 SystemVClassificationType fieldClassificationType;
2382 if (isThisByReferenceOfT)
2384 // ByReference<T> is a special type whose single IntPtr field holds a by-ref potentially interior pointer to GC
2385 // memory, so classify its field as such
2386 _ASSERTE(numIntroducedFields == 1);
2387 _ASSERTE(fieldType == CorElementType::ELEMENT_TYPE_I);
2388 fieldClassificationType = SystemVClassificationTypeIntegerByRef;
2392 fieldClassificationType = CorInfoType2UnixAmd64Classification(fieldType);
2397 pField->GetName_NoThrow(&fieldName);
2399 if (fieldClassificationType == SystemVClassificationTypeStruct)
2401 TypeHandle th = pField->GetApproxFieldTypeHandleThrowing();
2402 _ASSERTE(!th.IsNull());
2403 MethodTable* pFieldMT = th.GetMethodTable();
2405 bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct;
2406 helperPtr->inEmbeddedStruct = true;
2408 bool structRet = false;
2409 // If classifying for marshaling/PInvoke and the aggregated struct has a native layout
2410 // use the native classification. If not, continue using the managed layout.
2411 if (useNativeLayout && pFieldMT->HasLayout())
2413 structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout);
2417 structRet = pFieldMT->ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout);
2420 helperPtr->inEmbeddedStruct = inEmbeddedStructPrev;
2424 // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister.
2431 if (fieldClassificationType == SystemVClassificationTypeTypedReference ||
2432 CorInfoType2UnixAmd64Classification(GetClass_NoLogging()->GetInternalCorElementType()) == SystemVClassificationTypeTypedReference)
2434 // The TypedReference is a very special type.
2435 // In source/metadata it has two fields - Type and Value and both are defined of type IntPtr.
2436 // When the VM creates a layout of the type it changes the type of the Value to ByRef type and the
2437 // type of the Type field is left to IntPtr (TYPE_I internally - native int type.)
2438 // This requires a special treatment of this type. The code below handles the both fields (and this entire type).
2440 for (unsigned i = 0; i < 2; i++)
2443 fieldOffset = (i == 0 ? 0 : 8);
2444 normalizedFieldOffset = fieldOffset + startOffsetOfStruct;
2445 fieldClassificationType = (i == 0 ? SystemVClassificationTypeIntegerByRef : SystemVClassificationTypeInteger);
2446 if ((normalizedFieldOffset % fieldSize) != 0)
2448 // The spec requires that struct values on the stack from register passed fields expects
2449 // those fields to be at their natural alignment.
2451 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d not at natural alignment; not enregistering struct\n",
2452 nestingLevel * 5, "", fieldNum, fieldNum, (i == 0 ? "Value" : "Type"), fieldOffset, normalizedFieldOffset, fieldSize));
2456 helperPtr->largestFieldOffset = (int)normalizedFieldOffset;
2458 // Set the data for a new field.
2460 // The new field classification must not have been initialized yet.
2461 _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass);
2463 // There are only a few field classifications that are allowed.
2464 _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) ||
2465 (fieldClassificationType == SystemVClassificationTypeIntegerReference) ||
2466 (fieldClassificationType == SystemVClassificationTypeIntegerByRef) ||
2467 (fieldClassificationType == SystemVClassificationTypeSSE));
2469 helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType;
2470 helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldSize;
2471 helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset;
2473 LOG((LF_JIT, LL_EVERYTHING, " %*s**** Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n",
2474 nestingLevel * 5, "", fieldNum, (i == 0 ? "Value" : "Type"), fieldOffset, normalizedFieldOffset, fieldSize, helperPtr->currentUniqueOffsetField,
2475 GetSystemVClassificationTypeName(fieldClassificationType),
2476 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField])));
2478 helperPtr->currentUniqueOffsetField++;
2484 // Both fields of the special TypedReference struct are handled.
2487 // Done classifying the System.TypedReference struct fields.
2491 if ((normalizedFieldOffset % fieldSize) != 0)
2493 // The spec requires that struct values on the stack from register passed fields expects
2494 // those fields to be at their natural alignment.
2496 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d not at natural alignment; not enregistering struct\n",
2497 nestingLevel * 5, "", fieldNum, fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldSize));
2501 if ((int)normalizedFieldOffset <= helperPtr->largestFieldOffset)
2503 // Find the field corresponding to this offset and update the size if needed.
2504 // We assume that either it matches the offset of a previously seen field, or
2505 // it is an out-of-order offset (the VM does give us structs in non-increasing
2506 // offset order sometimes) that doesn't overlap any other field.
2508 // REVIEW: will the offset ever match a previously seen field offset for cases that are NOT ExplicitLayout?
2509 // If not, we can get rid of this loop, and just assume the offset is from an out-of-order field. We wouldn't
2510 // need to maintain largestFieldOffset, either, since we would then assume all fields are unique. We could
2511 // also get rid of ReClassifyField().
2513 for (i = helperPtr->currentUniqueOffsetField - 1; i >= 0; i--)
2515 if (helperPtr->fieldOffsets[i] == normalizedFieldOffset)
2517 if (fieldSize > helperPtr->fieldSizes[i])
2519 helperPtr->fieldSizes[i] = fieldSize;
2522 helperPtr->fieldClassifications[i] = ReClassifyField(helperPtr->fieldClassifications[i], fieldClassificationType);
2524 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d, union with uniqueOffsetField %d, field type classification %s, reclassified field to %s\n",
2525 nestingLevel * 5, "", fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldSize, i,
2526 GetSystemVClassificationTypeName(fieldClassificationType),
2527 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[i])));
2531 // Make sure the field doesn't start in the middle of another field.
2532 _ASSERTE((normalizedFieldOffset < helperPtr->fieldOffsets[i]) ||
2533 (normalizedFieldOffset >= helperPtr->fieldOffsets[i] + helperPtr->fieldSizes[i]));
2538 // The proper size of the union set of fields has been set above; continue to the next field.
2544 helperPtr->largestFieldOffset = (int)normalizedFieldOffset;
2547 // Set the data for a new field.
2549 // The new field classification must not have been initialized yet.
2550 _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass);
2552 // There are only a few field classifications that are allowed.
2553 _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) ||
2554 (fieldClassificationType == SystemVClassificationTypeIntegerReference) ||
2555 (fieldClassificationType == SystemVClassificationTypeIntegerByRef) ||
2556 (fieldClassificationType == SystemVClassificationTypeSSE));
2558 helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType;
2559 helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldSize;
2560 helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset;
2562 LOG((LF_JIT, LL_EVERYTHING, " %*s**** Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n",
2563 nestingLevel * 5, "", fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldSize, helperPtr->currentUniqueOffsetField,
2564 GetSystemVClassificationTypeName(fieldClassificationType),
2565 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField])));
2567 _ASSERTE(helperPtr->currentUniqueOffsetField < SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT);
2568 helperPtr->currentUniqueOffsetField++;
2569 } // end per-field for loop
2571 AssignClassifiedEightByteTypes(helperPtr, nestingLevel);
2576 // Returns 'true' if the struct is passed in registers, 'false' otherwise.
2577 bool MethodTable::ClassifyEightBytesWithNativeLayout(SystemVStructRegisterPassingHelperPtr helperPtr,
2578 unsigned int nestingLevel,
2579 unsigned int startOffsetOfStruct,
2580 bool useNativeLayout)
2591 // Should be in this method only doing a native layout classification.
2592 _ASSERTE(useNativeLayout);
2594 #ifdef DACCESS_COMPILE
2595 // No register classification for this case.
2597 #else // DACCESS_COMPILE
2601 // If there is no native layout for this struct use the managed layout instead.
2602 return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout);
2605 const FieldMarshaler *pFieldMarshaler = GetLayoutInfo()->GetFieldMarshalers();
2606 UINT numIntroducedFields = GetLayoutInfo()->GetNumCTMFields();
2609 if (numIntroducedFields == 0)
2614 // No struct register passing with explicit layout. There may be cases where explicit layout may be still
2615 // eligible for register struct passing, but it is hard to tell the real intent. Make it simple and just
2616 // unconditionally disable register struct passing for explicit layout.
2617 if (GetClass()->HasExplicitFieldOffsetLayout())
2619 LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithNativeLayout: struct %s has explicit layout; will not be enregistered\n",
2620 nestingLevel * 5, "", this->GetDebugClassName()));
2625 LOG((LF_JIT, LL_EVERYTHING, "%*s**** Classify for native struct %s (%p), startOffset %d, total struct size %d\n",
2626 nestingLevel * 5, "", this->GetDebugClassName(), this, startOffsetOfStruct, helperPtr->structSize));
2630 while (numIntroducedFields--)
2636 FieldDesc *pField = pFieldMarshaler->GetFieldDesc();
2637 CorElementType fieldType = pField->GetFieldType();
2639 // Invalid field type.
2640 if (fieldType == ELEMENT_TYPE_END)
2645 DWORD fieldOffset = pFieldMarshaler->GetExternalOffset();
2646 unsigned normalizedFieldOffset = fieldOffset + startOffsetOfStruct;
2648 unsigned int fieldNativeSize = pFieldMarshaler->NativeSize();
2649 if (fieldNativeSize > SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES)
2651 // Pass on stack in this case.
2655 _ASSERTE(fieldNativeSize != (unsigned int)-1);
2657 // The field can't span past the end of the struct.
2658 if ((normalizedFieldOffset + fieldNativeSize) > helperPtr->structSize)
2660 _ASSERTE(false && "Invalid native struct size. The size of fields and overall size don't agree");
2664 SystemVClassificationType fieldClassificationType = SystemVClassificationTypeUnknown;
2668 pField->GetName_NoThrow(&fieldName);
2671 // Some NStruct Field Types have extra information and require special handling
2672 NStructFieldType cls = pFieldMarshaler->GetNStructFieldType();
2673 if (cls == NFT_FIXEDCHARARRAYANSI)
2675 fieldClassificationType = SystemVClassificationTypeInteger;
2677 else if (cls == NFT_FIXEDARRAY)
2679 VARTYPE vtElement = ((FieldMarshaler_FixedArray*)pFieldMarshaler)->GetElementVT();
2698 fieldClassificationType = SystemVClassificationTypeInteger;
2702 fieldClassificationType = SystemVClassificationTypeSSE;
2713 case VT_USERDEFINED:
2719 case VT_STREAMED_OBJECT:
2720 case VT_STORED_OBJECT:
2721 case VT_BLOB_OBJECT:
2729 #ifdef FEATURE_COMINTEROP
2730 else if (cls == NFT_INTERFACE)
2732 // COMInterop not supported for CORECLR.
2733 _ASSERTE(false && "COMInterop not supported for CORECLR.");
2736 #ifdef FEATURE_CLASSIC_COMINTEROP
2737 else if (cls == NFT_SAFEARRAY)
2739 // COMInterop not supported for CORECLR.
2740 _ASSERTE(false && "COMInterop not supported for CORECLR.");
2743 #endif // FEATURE_CLASSIC_COMINTEROP
2744 #endif // FEATURE_COMINTEROP
2745 else if (cls == NFT_NESTEDLAYOUTCLASS)
2747 MethodTable* pFieldMT = ((FieldMarshaler_NestedLayoutClass*)pFieldMarshaler)->GetMethodTable();
2749 bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct;
2750 helperPtr->inEmbeddedStruct = true;
2751 bool structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout);
2752 helperPtr->inEmbeddedStruct = inEmbeddedStructPrev;
2756 // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister.
2762 else if (cls == NFT_NESTEDVALUECLASS)
2764 MethodTable* pFieldMT = ((FieldMarshaler_NestedValueClass*)pFieldMarshaler)->GetMethodTable();
2766 bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct;
2767 helperPtr->inEmbeddedStruct = true;
2768 bool structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout);
2769 helperPtr->inEmbeddedStruct = inEmbeddedStructPrev;
2773 // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister.
2779 else if (cls == NFT_COPY1)
2781 // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy1.
2784 case ELEMENT_TYPE_I1:
2785 fieldClassificationType = SystemVClassificationTypeInteger;
2788 case ELEMENT_TYPE_U1:
2789 fieldClassificationType = SystemVClassificationTypeInteger;
2794 return false; // Pass on stack.
2797 else if (cls == NFT_COPY2)
2799 // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy2.
2802 case ELEMENT_TYPE_CHAR:
2803 case ELEMENT_TYPE_I2:
2804 case ELEMENT_TYPE_U2:
2805 fieldClassificationType = SystemVClassificationTypeInteger;
2810 return false; // Pass on stack.
2813 else if (cls == NFT_COPY4)
2815 // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy4.
2818 // At this point, ELEMENT_TYPE_I must be 4 bytes long. Same for ELEMENT_TYPE_U.
2819 case ELEMENT_TYPE_I:
2820 case ELEMENT_TYPE_I4:
2821 case ELEMENT_TYPE_U:
2822 case ELEMENT_TYPE_U4:
2823 case ELEMENT_TYPE_PTR:
2824 fieldClassificationType = SystemVClassificationTypeInteger;
2827 case ELEMENT_TYPE_R4:
2828 fieldClassificationType = SystemVClassificationTypeSSE;
2833 return false; // Pass on stack.
2836 else if (cls == NFT_COPY8)
2838 // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy8.
2841 // At this point, ELEMENT_TYPE_I must be 8 bytes long. Same for ELEMENT_TYPE_U.
2842 case ELEMENT_TYPE_I:
2843 case ELEMENT_TYPE_I8:
2844 case ELEMENT_TYPE_U:
2845 case ELEMENT_TYPE_U8:
2846 case ELEMENT_TYPE_PTR:
2847 fieldClassificationType = SystemVClassificationTypeInteger;
2850 case ELEMENT_TYPE_R8:
2851 fieldClassificationType = SystemVClassificationTypeSSE;
2856 return false; // Pass on stack.
2859 else if (cls == NFT_FIXEDSTRINGUNI)
2861 fieldClassificationType = SystemVClassificationTypeInteger;
2863 else if (cls == NFT_FIXEDSTRINGANSI)
2865 fieldClassificationType = SystemVClassificationTypeInteger;
2869 // All other NStruct Field Types which do not require special handling.
2872 #ifdef FEATURE_COMINTEROP
2876 case NFT_VARIANTBOOL:
2878 // COMInterop not supported for CORECLR.
2879 _ASSERTE(false && "COMInterop not supported for CORECLR.");
2881 #endif // FEATURE_COMINTEROP
2883 case NFT_STRINGANSI:
2885 case NFT_STRINGUTF8:
2889 case NFT_SAFEHANDLE:
2890 case NFT_CRITICALHANDLE:
2891 fieldClassificationType = SystemVClassificationTypeInteger;
2894 // It's not clear what the right behavior for NTF_DECIMAL and NTF_DATE is
2895 // But those two types would only make sense on windows. We can revisit this later
2904 if ((normalizedFieldOffset % fieldNativeSize) != 0)
2906 // The spec requires that struct values on the stack from register passed fields expects
2907 // those fields to be at their natural alignment.
2909 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Native Field %d %s: offset %d (normalized %d), native size %d not at natural alignment; not enregistering struct\n",
2910 nestingLevel * 5, "", fieldNum, fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize));
2914 if ((int)normalizedFieldOffset <= helperPtr->largestFieldOffset)
2916 // Find the field corresponding to this offset and update the size if needed.
2917 // We assume that either it matches the offset of a previously seen field, or
2918 // it is an out-of-order offset (the VM does give us structs in non-increasing
2919 // offset order sometimes) that doesn't overlap any other field.
2922 for (i = helperPtr->currentUniqueOffsetField - 1; i >= 0; i--)
2924 if (helperPtr->fieldOffsets[i] == normalizedFieldOffset)
2926 if (fieldNativeSize > helperPtr->fieldSizes[i])
2928 helperPtr->fieldSizes[i] = fieldNativeSize;
2931 helperPtr->fieldClassifications[i] = ReClassifyField(helperPtr->fieldClassifications[i], fieldClassificationType);
2933 LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Native Field %d %s: offset %d (normalized %d), native size %d, union with uniqueOffsetField %d, field type classification %s, reclassified field to %s\n",
2934 nestingLevel * 5, "", fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize, i,
2935 GetSystemVClassificationTypeName(fieldClassificationType),
2936 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[i])));
2940 // Make sure the field doesn't start in the middle of another field.
2941 _ASSERTE((normalizedFieldOffset < helperPtr->fieldOffsets[i]) ||
2942 (normalizedFieldOffset >= helperPtr->fieldOffsets[i] + helperPtr->fieldSizes[i]));
2947 // The proper size of the union set of fields has been set above; continue to the next field.
2953 helperPtr->largestFieldOffset = (int)normalizedFieldOffset;
2956 // Set the data for a new field.
2958 // The new field classification must not have been initialized yet.
2959 _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass);
2961 // There are only a few field classifications that are allowed.
2962 _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) ||
2963 (fieldClassificationType == SystemVClassificationTypeIntegerReference) ||
2964 (fieldClassificationType == SystemVClassificationTypeIntegerByRef) ||
2965 (fieldClassificationType == SystemVClassificationTypeSSE));
2967 helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType;
2968 helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldNativeSize;
2969 helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset;
2971 LOG((LF_JIT, LL_EVERYTHING, " %*s**** Native Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n",
2972 nestingLevel * 5, "", fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize, helperPtr->currentUniqueOffsetField,
2973 GetSystemVClassificationTypeName(fieldClassificationType),
2974 GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField])));
2976 _ASSERTE(helperPtr->currentUniqueOffsetField < SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT);
2977 helperPtr->currentUniqueOffsetField++;
2978 ((BYTE*&)pFieldMarshaler) += MAXFIELDMARSHALERSIZE;
2979 } // end per-field for loop
2981 AssignClassifiedEightByteTypes(helperPtr, nestingLevel);
2984 #endif // DACCESS_COMPILE
2987 // Assigns the classification types to the array with eightbyte types.
2988 void MethodTable::AssignClassifiedEightByteTypes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel) const
2990 static const size_t CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS = CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS * SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
2991 static_assert_no_msg(CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS == SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT);
2993 if (!helperPtr->inEmbeddedStruct)
2995 _ASSERTE(nestingLevel == 0);
2997 int largestFieldOffset = helperPtr->largestFieldOffset;
2998 _ASSERTE(largestFieldOffset != -1);
3000 // We're at the top level of the recursion, and we're done looking at the fields.
3001 // Now sort the fields by offset and set the output data.
3003 int sortedFieldOrder[CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS];
3004 for (unsigned i = 0; i < CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS; i++)
3006 sortedFieldOrder[i] = -1;
3009 unsigned numFields = helperPtr->currentUniqueOffsetField;
3010 for (unsigned i = 0; i < numFields; i++)
3012 _ASSERTE(helperPtr->fieldOffsets[i] < CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS);
3013 _ASSERTE(sortedFieldOrder[helperPtr->fieldOffsets[i]] == -1); // we haven't seen this field offset yet.
3014 sortedFieldOrder[helperPtr->fieldOffsets[i]] = i;
3017 // Calculate the eightbytes and their types.
3018 unsigned int accumulatedSizeForEightByte = 0;
3019 unsigned int currentEightByteOffset = 0;
3020 unsigned int currentEightByte = 0;
3022 int lastFieldOrdinal = sortedFieldOrder[largestFieldOffset];
3023 unsigned int offsetAfterLastFieldByte = largestFieldOffset + helperPtr->fieldSizes[lastFieldOrdinal];
3024 SystemVClassificationType lastFieldClassification = helperPtr->fieldClassifications[lastFieldOrdinal];
3026 unsigned offset = 0;
3027 for (unsigned fieldSize = 0; offset < helperPtr->structSize; offset += fieldSize)
3029 SystemVClassificationType fieldClassificationType;
3031 int ordinal = sortedFieldOrder[offset];
3034 // If there is no field that starts as this offset, treat its contents as padding.
3035 // Any padding that follows the last field receives the same classification as the
3036 // last field; padding between fields receives the NO_CLASS classification as per
3037 // the SysV ABI spec.
3039 fieldClassificationType = offset < offsetAfterLastFieldByte ? SystemVClassificationTypeNoClass : lastFieldClassification;
3043 fieldSize = helperPtr->fieldSizes[ordinal];
3044 _ASSERTE(fieldSize > 0 && fieldSize <= SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES);
3046 fieldClassificationType = helperPtr->fieldClassifications[ordinal];
3047 _ASSERTE(fieldClassificationType != SystemVClassificationTypeMemory && fieldClassificationType != SystemVClassificationTypeUnknown);
3050 if (helperPtr->eightByteClassifications[currentEightByte] == fieldClassificationType)
3052 // Do nothing. The eight-byte already has this classification.
3054 else if (helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeNoClass)
3056 helperPtr->eightByteClassifications[currentEightByte] = fieldClassificationType;
3058 else if ((helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeInteger) ||
3059 (fieldClassificationType == SystemVClassificationTypeInteger))
3061 _ASSERTE((fieldClassificationType != SystemVClassificationTypeIntegerReference) &&
3062 (fieldClassificationType != SystemVClassificationTypeIntegerByRef));
3064 helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeInteger;
3066 else if ((helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeIntegerReference) ||
3067 (fieldClassificationType == SystemVClassificationTypeIntegerReference))
3069 helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeIntegerReference;
3071 else if ((helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeIntegerByRef) ||
3072 (fieldClassificationType == SystemVClassificationTypeIntegerByRef))
3074 helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeIntegerByRef;
3078 helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeSSE;
3081 accumulatedSizeForEightByte += fieldSize;
3082 if (accumulatedSizeForEightByte == SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES)
3084 // Save data for this eightbyte.
3085 helperPtr->eightByteSizes[currentEightByte] = SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES;
3086 helperPtr->eightByteOffsets[currentEightByte] = currentEightByteOffset;
3088 // Set up for next eightbyte.
3090 _ASSERTE(currentEightByte <= CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS);
3092 currentEightByteOffset = offset + fieldSize;
3093 accumulatedSizeForEightByte = 0;
3096 _ASSERTE(accumulatedSizeForEightByte < SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES);
3099 // Handle structs that end in the middle of an eightbyte.
3100 if (accumulatedSizeForEightByte > 0 && accumulatedSizeForEightByte < SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES)
3102 _ASSERTE((helperPtr->structSize % SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES) != 0);
3104 helperPtr->eightByteSizes[currentEightByte] = accumulatedSizeForEightByte;
3105 helperPtr->eightByteOffsets[currentEightByte] = currentEightByteOffset;
3109 helperPtr->eightByteCount = currentEightByte;
3111 _ASSERTE(helperPtr->eightByteCount <= CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS);
3114 LOG((LF_JIT, LL_EVERYTHING, " ----\n"));
3115 LOG((LF_JIT, LL_EVERYTHING, " **** Number EightBytes: %d\n", helperPtr->eightByteCount));
3116 for (unsigned i = 0; i < helperPtr->eightByteCount; i++)
3118 LOG((LF_JIT, LL_EVERYTHING, " **** eightByte %d -- classType: %s, eightByteOffset: %d, eightByteSize: %d\n",
3119 i, GetSystemVClassificationTypeName(helperPtr->eightByteClassifications[i]), helperPtr->eightByteOffsets[i], helperPtr->eightByteSizes[i]));
3125 #endif // defined(FEATURE_UNIX_AMD64_STRUCT_PASSING_ITF)
3127 #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
3128 //==========================================================================================
3129 void MethodTable::AllocateRegularStaticBoxes()
3135 PRECONDITION(!ContainsGenericVariables());
3136 PRECONDITION(HasBoxedRegularStatics());
3141 LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Instantiating static handles for %s\n", GetDebugClassName()));
3145 PTR_BYTE pStaticBase = GetGCStaticsBasePointer();
3147 GCPROTECT_BEGININTERIOR(pStaticBase);
3149 // In ngened case, we have cached array with boxed statics MTs. In JITed case, we have just the FieldDescs
3150 ClassCtorInfoEntry *pClassCtorInfoEntry = GetClassCtorInfoIfExists();
3151 if (pClassCtorInfoEntry != NULL)
3153 OBJECTREF* pStaticSlots = (OBJECTREF*)(pStaticBase + pClassCtorInfoEntry->firstBoxedStaticOffset);
3154 GCPROTECT_BEGININTERIOR(pStaticSlots);
3156 ArrayDPTR(RelativeFixupPointer<PTR_MethodTable>) ppMTs = GetLoaderModule()->GetZapModuleCtorInfo()->
3157 GetGCStaticMTs(pClassCtorInfoEntry->firstBoxedStaticMTIndex);
3159 DWORD numBoxedStatics = pClassCtorInfoEntry->numBoxedStatics;
3160 for (DWORD i = 0; i < numBoxedStatics; i++)
3162 #ifdef FEATURE_PREJIT
3163 Module::RestoreMethodTablePointer(&(ppMTs[i]), GetLoaderModule());
3165 MethodTable *pFieldMT = ppMTs[i].GetValue();
3169 LOG((LF_CLASSLOADER, LL_INFO10000, "\tInstantiating static of type %s\n", pFieldMT->GetDebugClassName()));
3170 OBJECTREF obj = AllocateStaticBox(pFieldMT, pClassCtorInfoEntry->hasFixedAddressVTStatics);
3172 SetObjectReference( &(pStaticSlots[i]), obj, GetAppDomain() );
3178 // We should never take this codepath in zapped images.
3179 _ASSERTE(!IsZapped());
3181 FieldDesc *pField = HasGenericsStaticsInfo() ?
3182 GetGenericsStaticFieldDescs() : (GetApproxFieldDescListRaw() + GetNumIntroducedInstanceFields());
3183 FieldDesc *pFieldEnd = pField + GetNumStaticFields();
3185 while (pField < pFieldEnd)
3187 _ASSERTE(pField->IsStatic());
3189 if (!pField->IsSpecialStatic() && pField->IsByValue())
3191 TypeHandle th = pField->GetFieldTypeHandleThrowing();
3192 MethodTable* pFieldMT = th.GetMethodTable();
3194 LOG((LF_CLASSLOADER, LL_INFO10000, "\tInstantiating static of type %s\n", pFieldMT->GetDebugClassName()));
3195 OBJECTREF obj = AllocateStaticBox(pFieldMT, HasFixedAddressVTStatics());
3197 SetObjectReference( (OBJECTREF*)(pStaticBase + pField->GetOffset()), obj, GetAppDomain() );
3206 //==========================================================================================
3207 OBJECTREF MethodTable::AllocateStaticBox(MethodTable* pFieldMT, BOOL fPinned, OBJECTHANDLE* pHandle)
3217 _ASSERTE(pFieldMT->IsValueType());
3219 // Activate any dependent modules if necessary
3220 pFieldMT->EnsureInstanceActive();
3222 OBJECTREF obj = AllocateObject(pFieldMT);
3224 // Pin the object if necessary
3227 LOG((LF_CLASSLOADER, LL_INFO10000, "\tSTATICS:Pinning static (VT fixed address attribute) of type %s\n", pFieldMT->GetDebugClassName()));
3228 OBJECTHANDLE oh = GetAppDomain()->CreatePinningHandle(obj);
3245 //==========================================================================================
3246 BOOL MethodTable::RunClassInitEx(OBJECTREF *pThrowable)
3253 PRECONDITION(IsFullyLoaded());
3254 PRECONDITION(IsProtectedByGCFrame(pThrowable));
3258 // A somewhat unusual function, can both return throwable and throw.
3259 // The difference is, we throw on restartable operations and just return throwable
3260 // on exceptions fatal for the .cctor
3261 // (Of course in the latter case the caller is supposed to throw pThrowable)
3262 // Doing the opposite ( i.e. throwing on fatal and returning on nonfatal)
3263 // would be more intuitive but it's more convenient the way it is
3267 // During the <clinit>, this thread must not be asynchronously
3268 // stopped or interrupted. That would leave the class unavailable
3269 // and is therefore a security hole. We don't have to worry about
3270 // multithreading, since we only manipulate the current thread's count.
3271 ThreadPreventAsyncHolder preventAsync;
3273 // If the static initialiser throws an exception that it doesn't catch, it has failed
3276 // Activate our module if necessary
3277 EnsureInstanceActive();
3279 STRESS_LOG1(LF_CLASSLOADER, LL_INFO1000, "RunClassInit: Calling class contructor for type %pT\n", this);
3281 MethodTable * pCanonMT = GetCanonicalMethodTable();
3283 // Call the code method without touching MethodDesc if possible
3284 PCODE pCctorCode = pCanonMT->GetSlot(pCanonMT->GetClassConstructorSlot());
3286 if (pCanonMT->IsSharedByGenericInstantiations())
3288 PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(pCctorCode);
3289 DECLARE_ARGHOLDER_ARRAY(args, 1);
3290 args[ARGNUM_0] = PTR_TO_ARGHOLDER(this);
3291 CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE;
3292 CALL_MANAGED_METHOD_NORET(args);
3296 PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(pCctorCode);
3297 DECLARE_ARGHOLDER_ARRAY(args, 0);
3298 CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE;
3299 CALL_MANAGED_METHOD_NORET(args);
3302 STRESS_LOG1(LF_CLASSLOADER, LL_INFO100000, "RunClassInit: Returned Successfully from class contructor for type %pT\n", this);
3308 // Exception set by parent
3309 // <TODO>@TODO: We should make this an ExceptionInInitializerError if the exception thrown is not
3310 // a subclass of Error</TODO>
3311 *pThrowable = GET_THROWABLE();
3312 _ASSERTE(fRet == FALSE);
3314 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
3315 // If active thread state does not have a CorruptionSeverity set for the exception,
3316 // then set one up based upon the current exception code and/or the throwable.
3318 // When can we be here and current exception tracker may not have corruption severity set?
3319 // Incase of SO in managed code, SO is never seen by CLR's exception handler for managed code
3320 // and if this happens in cctor, we can end up here without the corruption severity set.
3321 Thread *pThread = GetThread();
3322 _ASSERTE(pThread != NULL);
3323 ThreadExceptionState *pCurTES = pThread->GetExceptionState();
3324 _ASSERTE(pCurTES != NULL);
3325 if (pCurTES->GetLastActiveExceptionCorruptionSeverity() == NotSet)
3327 if (CEHelper::IsProcessCorruptedStateException(GetCurrentExceptionCode()) ||
3328 CEHelper::IsProcessCorruptedStateException(*pThrowable))
3330 // Process Corrupting
3331 pCurTES->SetLastActiveExceptionCorruptionSeverity(ProcessCorrupting);
3332 LOG((LF_EH, LL_INFO100, "MethodTable::RunClassInitEx - Exception treated as ProcessCorrupting.\n"));
3337 pCurTES->SetLastActiveExceptionCorruptionSeverity(NotCorrupting);
3338 LOG((LF_EH, LL_INFO100, "MethodTable::RunClassInitEx - Exception treated as non-corrupting.\n"));
3343 LOG((LF_EH, LL_INFO100, "MethodTable::RunClassInitEx - Exception already has corruption severity set.\n"));
3345 #endif // FEATURE_CORRUPTING_EXCEPTIONS
3347 EX_END_CATCH(SwallowAllExceptions)
3352 //==========================================================================================
3353 void MethodTable::DoRunClassInitThrowing()
3366 // This is a fairly aggressive policy. Merely asking that the class be initialized is grounds for kicking you out.
3367 // Alternately, we could simply NOP out the class initialization. Since the aggressive policy is also the more secure
3368 // policy, keep this unless it proves intractable to remove all premature classinits in the system.
3372 pThread = GetThread();
3374 INTERIOR_STACK_PROBE_FOR(pThread, 8);
3376 AppDomain *pDomain = GetAppDomain();
3378 HRESULT hrResult = E_FAIL;
3379 const char *description;
3380 STRESS_LOG2(LF_CLASSLOADER, LL_INFO100000, "DoRunClassInit: Request to init %pT in appdomain %p\n", this, pDomain);
3383 // Take the global lock
3386 ListLock *_pLock = pDomain->GetClassInitLock();
3388 ListLockHolder pInitLock(_pLock);
3391 if (IsClassInited())
3395 // Handle cases where the .cctor has already tried to run but failed.
3401 // Some error occurred trying to init this class
3402 ListLockEntry* pEntry= (ListLockEntry *) _pLock->Find(this);
3403 _ASSERTE(pEntry!=NULL);
3404 _ASSERTE(pEntry->m_pLoaderAllocator == (GetDomain()->IsSharedDomain() ? pDomain->GetLoaderAllocator() : GetLoaderAllocator()));
3406 // If this isn't a TypeInitializationException, then its creation failed
3407 // somehow previously, so we should make one last attempt to create it. If
3408 // that fails, just throw the exception that was originally thrown.
3409 // Primarily, this deals with the problem that the exception is a
3410 // ThreadAbortException, because this must be executing on a different
3411 // thread. If in fact this thread is also aborting, then rethrowing the
3412 // other thread's exception will not do any worse.
3414 // If we need to create the type init exception object, we'll need to
3415 // GC protect these, so might as well create the structure now.
3417 OBJECTREF pInitException;
3418 OBJECTREF pNewInitException;
3419 OBJECTREF pThrowable;
3422 gc.pInitException = pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException);
3423 gc.pNewInitException = NULL;
3424 gc.pThrowable = NULL;
3426 GCPROTECT_BEGIN(gc);
3428 // We need to release this lock because CreateTypeInitializationExceptionObject and fetching the TypeLoad exception can cause
3429 // managed code to re-enter into this codepath, causing a locking order violation.
3430 pInitLock.Release();
3432 if (MscorlibBinder::GetException(kTypeInitializationException) != gc.pInitException->GetMethodTable())
3434 DefineFullyQualifiedNameForClassWOnStack();
3435 LPCWSTR wszName = GetFullyQualifiedNameForClassW(this);
3437 CreateTypeInitializationExceptionObject(wszName, &gc.pInitException, &gc.pNewInitException, &gc.pThrowable);
3439 LOADERHANDLE hOrigInitException = pEntry->m_hInitException;
3440 if (!CLRException::IsPreallocatedExceptionObject(pEntry->m_pLoaderAllocator->GetHandleValue(hOrigInitException)))
3442 // Now put the new init exception in the handle. If another thread beat us (because we released the
3443 // lock above), then we'll just let the extra init exception object get collected later.
3444 pEntry->m_pLoaderAllocator->CompareExchangeValueInHandle(pEntry->m_hInitException, gc.pNewInitException, gc.pInitException);
3446 // if the stored exception is a preallocated one we cannot store the new Exception object in it.
3447 // we'll attempt to create a new handle for the new TypeInitializationException object
3448 LOADERHANDLE hNewInitException = NULL;
3449 // CreateHandle can throw due to OOM. We need to catch this so that we make sure to set the
3450 // init error. Whatever exception was thrown will be rethrown below, so no worries.
3452 hNewInitException = pEntry->m_pLoaderAllocator->AllocateHandle(gc.pNewInitException);
3454 // If we failed to create the handle we'll just leave the originally alloc'd one in place.
3455 } EX_END_CATCH(SwallowAllExceptions);
3457 // if two threads are racing to set m_hInitException, clear the handle created by the loser
3458 if (hNewInitException != NULL &&
3459 InterlockedCompareExchangeT((&pEntry->m_hInitException), hNewInitException, hOrigInitException) != hOrigInitException)
3461 pEntry->m_pLoaderAllocator->ClearHandle(hNewInitException);
3466 gc.pThrowable = gc.pInitException;
3471 // Throw the saved exception. Since we may be rethrowing a previously cached exception, must clear the stack trace first.
3472 // Rethrowing a previously cached exception is distasteful but is required for appcompat with Everett.
3474 // (The IsException() is probably more appropriate as an assert but as this isn't a heavily tested code path,
3475 // I prefer to be defensive here.)
3476 if (IsException(gc.pThrowable->GetMethodTable()))
3478 ((EXCEPTIONREF)(gc.pThrowable))->ClearStackTraceForThrow();
3481 // <FEATURE_CORRUPTING_EXCEPTIONS>
3482 // Specify the corruption severity to be used to raise this exception in COMPlusThrow below.
3483 // This will ensure that when the exception is seen by the managed code personality routine,
3484 // it will setup the correct corruption severity in the exception tracker.
3485 // </FEATURE_CORRUPTING_EXCEPTIONS>
3487 COMPlusThrow(gc.pThrowable
3488 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
3489 , pEntry->m_CorruptionSeverity
3490 #endif // FEATURE_CORRUPTING_EXCEPTIONS
3494 description = ".cctor lock";
3496 description = GetDebugClassName();
3501 //nontrivial holder, might take a lock in destructor
3502 ListLockEntryHolder pEntry(ListLockEntry::Find(pInitLock, this, description));
3504 ListLockEntryLockHolder pLock(pEntry, FALSE);
3506 // We have a list entry, we can release the global lock now
3507 pInitLock.Release();
3509 if (pLock.DeadlockAwareAcquire())
3511 if (pEntry->m_hrResultCode == S_FALSE)
3513 if (!NingenEnabled())
3515 if (HasBoxedRegularStatics())
3517 // First, instantiate any objects needed for value type statics
3518 AllocateRegularStaticBoxes();
3521 // Nobody has run the .cctor yet
3522 if (HasClassConstructor())
3525 OBJECTREF pInnerException;
3526 OBJECTREF pInitException;
3527 OBJECTREF pThrowable;
3529 gc.pInnerException = NULL;
3530 gc.pInitException = NULL;
3531 gc.pThrowable = NULL;
3532 GCPROTECT_BEGIN(gc);
3534 if (!RunClassInitEx(&gc.pInnerException))
3536 // The .cctor failed and we want to store the exception that resulted
3537 // in the entry. Increment the ref count to keep the entry alive for
3538 // subsequent attempts to run the .cctor.
3540 // For collectible types, register the entry for cleanup.
3541 if (GetLoaderAllocator()->IsCollectible())
3543 GetLoaderAllocator()->RegisterFailedTypeInitForCleanup(pEntry);
3546 _ASSERTE(g_pThreadAbortExceptionClass == MscorlibBinder::GetException(kThreadAbortException));
3548 if(gc.pInnerException->GetMethodTable() == g_pThreadAbortExceptionClass)
3550 gc.pThrowable = gc.pInnerException;
3551 gc.pInitException = gc.pInnerException;
3552 gc.pInnerException = NULL;
3556 DefineFullyQualifiedNameForClassWOnStack();
3557 LPCWSTR wszName = GetFullyQualifiedNameForClassW(this);
3559 // Note that this may not succeed due to problems creating the exception
3560 // object. On failure, it will first try to
3561 CreateTypeInitializationExceptionObject(
3562 wszName, &gc.pInnerException, &gc.pInitException, &gc.pThrowable);
3565 pEntry->m_pLoaderAllocator = GetDomain()->IsSharedDomain() ? pDomain->GetLoaderAllocator() : GetLoaderAllocator();
3567 // CreateHandle can throw due to OOM. We need to catch this so that we make sure to set the
3568 // init error. Whatever exception was thrown will be rethrown below, so no worries.
3570 // Save the exception object, and return to caller as well.
3571 pEntry->m_hInitException = pEntry->m_pLoaderAllocator->AllocateHandle(gc.pInitException);
3573 // If we failed to create the handle (due to OOM), we'll just store the preallocated OOM
3574 // handle here instead.
3575 pEntry->m_hInitException = pEntry->m_pLoaderAllocator->AllocateHandle(CLRException::GetPreallocatedOutOfMemoryException());
3576 } EX_END_CATCH(SwallowAllExceptions);
3578 pEntry->m_hrResultCode = E_FAIL;
3579 SetClassInitError();
3581 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
3582 // Save the corruption severity of the exception so that if the type system
3583 // attempts to pick it up from its cache list and throw again, it should
3584 // treat the exception as corrupting, if applicable.
3585 pEntry->m_CorruptionSeverity = pThread->GetExceptionState()->GetLastActiveExceptionCorruptionSeverity();
3587 // We should be having a valid corruption severity at this point
3588 _ASSERTE(pEntry->m_CorruptionSeverity != NotSet);
3589 #endif // FEATURE_CORRUPTING_EXCEPTIONS
3591 COMPlusThrow(gc.pThrowable
3592 #ifdef FEATURE_CORRUPTING_EXCEPTIONS
3593 , pEntry->m_CorruptionSeverity
3594 #endif // FEATURE_CORRUPTING_EXCEPTIONS
3602 pEntry->m_hrResultCode = S_OK;
3604 // Set the initialization flags in the DLS and on domain-specific types.
3605 // Note we also set the flag for dynamic statics, which use the DynamicStatics part
3606 // of the DLS irrespective of whether the type is domain neutral or not.
3612 // Use previous result
3614 hrResult = pEntry->m_hrResultCode;
3615 if(FAILED(hrResult))
3617 // An exception may have occurred in the cctor. DoRunClassInit() should return FALSE in that
3619 _ASSERTE(pEntry->m_hInitException);
3620 _ASSERTE(pEntry->m_pLoaderAllocator == (GetDomain()->IsSharedDomain() ? pDomain->GetLoaderAllocator() : GetLoaderAllocator()));
3621 _ASSERTE(IsInitError());
3623 // Throw the saved exception. Since we are rethrowing a previously cached exception, must clear the stack trace first.
3624 // Rethrowing a previously cached exception is distasteful but is required for appcompat with Everett.
3626 // (The IsException() is probably more appropriate as an assert but as this isn't a heavily tested code path,
3627 // I prefer to be defensive here.)
3628 if (IsException(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)->GetMethodTable()))
3630 ((EXCEPTIONREF)(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)))->ClearStackTraceForThrow();
3632 COMPlusThrow(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException));
3639 // Notify any entries waiting on the current entry and wait for the required entries.
3642 // We need to take the global lock before we play with the list of entries.
3644 STRESS_LOG2(LF_CLASSLOADER, LL_INFO100000, "DoRunClassInit: returning SUCCESS for init %pT in appdomain %p\n", this, pDomain);
3645 // No need to set pThrowable in case of error it will already have been set.
3647 g_IBCLogger.LogMethodTableAccess(this);
3650 END_INTERIOR_STACK_PROBE;
3653 //==========================================================================================
3654 void MethodTable::CheckRunClassInitThrowing()
3661 INJECT_FAULT(COMPlusThrowOM());
3662 PRECONDITION(IsFullyLoaded());
3666 { // Debug-only code causes SO volation, so add exception.
3667 CONTRACT_VIOLATION(SOToleranceViolation);
3668 CONSISTENCY_CHECK(CheckActivated());
3671 // To find GC hole easier...
3674 if (IsClassPreInited())
3677 // Don't initialize shared generic instantiations (e.g. MyClass<__Canon>)
3678 if (IsSharedByGenericInstantiations())
3681 DomainLocalModule *pLocalModule = GetDomainLocalModule();
3682 _ASSERTE(pLocalModule);
3684 DWORD iClassIndex = GetClassIndex();
3686 // Check to see if we have already run the .cctor for this class.
3687 if (!pLocalModule->IsClassAllocated(this, iClassIndex))
3688 pLocalModule->PopulateClass(this);
3690 if (!pLocalModule->IsClassInitialized(this, iClassIndex))
3691 DoRunClassInitThrowing();
3694 //==========================================================================================
3695 void MethodTable::CheckRunClassInitAsIfConstructingThrowing()
3705 if (HasPreciseInitCctors())
3707 MethodTable *pMTCur = this;
3708 while (pMTCur != NULL)
3710 if (!pMTCur->GetClass()->IsBeforeFieldInit())
3711 pMTCur->CheckRunClassInitThrowing();
3713 pMTCur = pMTCur->GetParentMethodTable();
3718 //==========================================================================================
3719 OBJECTREF MethodTable::Allocate()
3729 CONSISTENCY_CHECK(IsFullyLoaded());
3731 EnsureInstanceActive();
3733 if (HasPreciseInitCctors())
3735 CheckRunClassInitAsIfConstructingThrowing();
3738 return AllocateObject(this);
3741 //==========================================================================================
3742 // box 'data' creating a new object and return it. This routine understands the special
3743 // handling needed for Nullable values.
3744 // see code:Nullable#NullableVerification
3746 OBJECTREF MethodTable::Box(void* data)
3753 PRECONDITION(IsValueType());
3759 GCPROTECT_BEGININTERIOR (data);
3763 // We should never box a type that contains stack pointers.
3764 COMPlusThrow(kInvalidOperationException, W("InvalidOperation_TypeCannotBeBoxed"));
3767 ref = FastBox(&data);
3772 OBJECTREF MethodTable::FastBox(void** data)
3779 PRECONDITION(IsValueType());
3783 // See code:Nullable#NullableArchitecture for more
3785 return Nullable::Box(*data, this);
3787 OBJECTREF ref = Allocate();
3788 CopyValueClass(ref->UnBox(), *data, this, ref->GetAppDomain());
3792 #if _TARGET_X86_ || _TARGET_AMD64_
3793 //==========================================================================================
3794 static void FastCallFinalize(Object *obj, PCODE funcPtr, BOOL fCriticalCall)
3796 STATIC_CONTRACT_THROWS;
3797 STATIC_CONTRACT_GC_TRIGGERS;
3798 STATIC_CONTRACT_MODE_COOPERATIVE;
3799 STATIC_CONTRACT_SO_INTOLERANT;
3801 BEGIN_CALL_TO_MANAGEDEX(fCriticalCall ? EEToManagedCriticalCall : EEToManagedDefault);
3803 #if defined(_TARGET_X86_)
3809 INDEBUG(nop) // Mark the fact that we can call managed code
3812 #else // _TARGET_X86_
3814 FastCallFinalizeWorker(obj, funcPtr);
3816 #endif // _TARGET_X86_
3818 END_CALL_TO_MANAGED();
3821 #endif // _TARGET_X86_ || _TARGET_AMD64_
3823 void CallFinalizerOnThreadObject(Object *obj)
3825 STATIC_CONTRACT_MODE_COOPERATIVE;
3827 THREADBASEREF refThis = (THREADBASEREF)ObjectToOBJECTREF(obj);
3828 Thread* thread = refThis->GetInternal();
3830 // Prevent multiple calls to Finalize
3831 // Objects can be resurrected after being finalized. However, there is no
3832 // race condition here. We always check whether an exposed thread object is
3833 // still attached to the internal Thread object, before proceeding.
3836 refThis->SetDelegate(NULL);
3838 // During process shutdown, we finalize even reachable objects. But if we break
3839 // the link between the System.Thread and the internal Thread object, the runtime
3840 // may not work correctly. In particular, we won't be able to transition between
3841 // contexts and domains to finalize other objects. Since the runtime doesn't
3842 // require that Threads finalize during shutdown, we need to disable this. If
3843 // we wait until phase 2 of shutdown finalization (when the EE is suspended and
3844 // will never resume) then we can simply skip the side effects of Thread
3846 if ((g_fEEShutDown & ShutDown_Finalize2) == 0)
3848 if (GetThread() != thread)
3850 refThis->ClearInternal();
3853 FastInterlockOr ((ULONG *)&thread->m_State, Thread::TS_Finalized);
3854 Thread::SetCleanupNeededForFinalizedThread();
3859 //==========================================================================================
3860 // From the GC finalizer thread, invoke the Finalize() method on an object.
3861 void MethodTable::CallFinalizer(Object *obj)
3868 PRECONDITION(obj->GetMethodTable()->HasFinalizer() ||
3869 obj->GetMethodTable()->IsTransparentProxy());
3873 // Never call any finalizers under ngen for determinism
3874 if (IsCompilationProcess())
3879 MethodTable *pMT = obj->GetMethodTable();
3882 // Check for precise init class constructors that have failed, if any have failed, then we didn't run the
3883 // constructor for the object, and running the finalizer for the object would violate the CLI spec by running
3884 // instance code without having successfully run the precise-init class constructor.
3885 if (pMT->HasPreciseInitCctors())
3887 MethodTable *pMTCur = pMT;
3890 if ((!pMTCur->GetClass()->IsBeforeFieldInit()) && pMTCur->IsInitError())
3892 // Precise init Type Initializer for type failed... do not run finalizer
3896 pMTCur = pMTCur->GetParentMethodTable();
3898 while (pMTCur != NULL);
3901 if (pMT == g_pThreadClass)
3903 // Finalizing Thread object requires ThreadStoreLock. It is expensive if
3904 // we keep taking ThreadStoreLock. This is very bad if we have high retiring
3905 // rate of Thread objects.
3906 // To avoid taking ThreadStoreLock multiple times, we mark Thread with TS_Finalized
3907 // and clean up a batch of them when we take ThreadStoreLock next time.
3909 // To avoid possible hierarchy requirement between critical finalizers, we call cleanup
3911 CallFinalizerOnThreadObject(obj);
3916 // Determine if the object has a critical or normal finalizer.
3917 BOOL fCriticalFinalizer = pMT->HasCriticalFinalizer();
3919 // There's no reason to actually set up a frame here. If we crawl out of the
3920 // Finalize() method on this thread, we will see FRAME_TOP which indicates
3921 // that the crawl should terminate. This is analogous to how KickOffThread()
3922 // starts new threads in the runtime.
3923 PCODE funcPtr = pMT->GetRestoredSlot(g_pObjectFinalizerMD->GetSlot());
3926 if (fCriticalFinalizer)
3928 STRESS_LOG2(LF_GCALLOC, LL_INFO100, "Finalizing CriticalFinalizer %pM in domain %d\n",
3929 pMT, GetAppDomain()->GetId().m_dwId);
3933 #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
3935 #ifdef DEBUGGING_SUPPORTED
3936 if (CORDebuggerTraceCall())
3937 g_pDebugInterface->TraceCall((const BYTE *) funcPtr);
3938 #endif // DEBUGGING_SUPPORTED
3940 FastCallFinalize(obj, funcPtr, fCriticalFinalizer);
3942 #else // defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
3944 PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(funcPtr);
3946 DECLARE_ARGHOLDER_ARRAY(args, 1);
3948 args[ARGNUM_0] = PTR_TO_ARGHOLDER(obj);
3950 if (fCriticalFinalizer)
3955 CALL_MANAGED_METHOD_NORET(args);
3957 #endif // (defined(_TARGET_X86_) && defined(_TARGET_AMD64_)
3960 if (fCriticalFinalizer)
3962 STRESS_LOG2(LF_GCALLOC, LL_INFO100, "Finalized CriticalFinalizer %pM in domain %d without exception\n",
3963 pMT, GetAppDomain()->GetId().m_dwId);
3968 //==========================================================================
3969 // If the MethodTable doesn't yet know the Exposed class that represents it via
3970 // Reflection, acquire that class now. Regardless, return it to the caller.
3971 //==========================================================================
3972 OBJECTREF MethodTable::GetManagedClassObject()
3974 CONTRACT(OBJECTREF) {
3979 INJECT_FAULT(COMPlusThrowOM());
3980 PRECONDITION(!IsTransparentProxy() && !IsArray()); // Arrays and remoted objects can't go through this path.
3981 POSTCONDITION(GetWriteableData()->m_hExposedClassObject != 0);
3987 // Force a GC here because GetManagedClassObject could trigger GC nondeterminsticaly
3988 GCStress<cfg_any, PulseGcTriggerPolicy>::MaybeTrigger();
3991 if (GetWriteableData()->m_hExposedClassObject == NULL)
3993 // Make sure that we have been restored
3996 if (IsTransparentProxy()) // Extra protection in a retail build against doing this on a transparent proxy.
3999 REFLECTCLASSBASEREF refClass = NULL;
4000 GCPROTECT_BEGIN(refClass);
4001 if (GetAssembly()->IsIntrospectionOnly())
4002 refClass = (REFLECTCLASSBASEREF) AllocateObject(MscorlibBinder::GetClass(CLASS__CLASS_INTROSPECTION_ONLY));
4004 refClass = (REFLECTCLASSBASEREF) AllocateObject(g_pRuntimeTypeClass);
4006 LoaderAllocator *pLoaderAllocator = GetLoaderAllocator();
4008 ((ReflectClassBaseObject*)OBJECTREFToObject(refClass))->SetType(TypeHandle(this));
4009 ((ReflectClassBaseObject*)OBJECTREFToObject(refClass))->SetKeepAlive(pLoaderAllocator->GetExposedObject());
4011 // Let all threads fight over who wins using InterlockedCompareExchange.
4012 // Only the winner can set m_ExposedClassObject from NULL.
4013 LOADERHANDLE exposedClassObjectHandle = pLoaderAllocator->AllocateHandle(refClass);
4015 if (FastInterlockCompareExchangePointer(&(EnsureWritablePages(GetWriteableDataForWrite())->m_hExposedClassObject), exposedClassObjectHandle, static_cast<LOADERHANDLE>(NULL)))
4017 pLoaderAllocator->ClearHandle(exposedClassObjectHandle);
4022 RETURN(GetManagedClassObjectIfExists());
4025 #endif //!DACCESS_COMPILE && !CROSSGEN_COMPILE
4027 //==========================================================================================
4028 // This needs to stay consistent with AllocateNewMT() and MethodTable::Save()
4030 // <TODO> protect this via some asserts as we've had one hard-to-track-down
4031 // bug already </TODO>
4033 void MethodTable::GetSavedExtent(TADDR *pStart, TADDR *pEnd)
4044 if (ContainsPointersOrCollectible())
4045 start = dac_cast<TADDR>(this) - CGCDesc::GetCGCDescFromMT(this)->GetSize();
4047 start = dac_cast<TADDR>(this);
4049 TADDR end = dac_cast<TADDR>(this) + GetEndOffsetOfOptionalMembers();
4051 _ASSERTE(start && end && (start < end));
4056 #ifdef FEATURE_NATIVE_IMAGE_GENERATION
4058 #ifndef DACCESS_COMPILE
4060 BOOL MethodTable::CanInternVtableChunk(DataImage *image, VtableIndirectionSlotIterator it)
4062 STANDARD_VM_CONTRACT;
4064 _ASSERTE(IsCompilationProcess());
4066 BOOL canBeSharedWith = TRUE;
4068 // We allow full sharing except that which would break MethodTable::Fixup -- when the slots are Fixup'd
4069 // we need to ensure that regardless of who is doing the Fixup the same target is decided on.
4070 // Note that if this requirement is not met, an assert will fire in ZapStoredStructure::Save
4072 if (GetFlag(enum_flag_NotInPZM))
4074 canBeSharedWith = FALSE;
4077 if (canBeSharedWith)
4079 for (DWORD slotNumber = it.GetStartSlot(); slotNumber < it.GetEndSlot(); slotNumber++)
4081 MethodDesc *pMD = GetMethodDescForSlot(slotNumber);
4082 _ASSERTE(pMD != NULL);
4083 pMD->CheckRestore();
4085 if (!image->CanEagerBindToMethodDesc(pMD))
4087 canBeSharedWith = FALSE;
4093 return canBeSharedWith;
4096 //==========================================================================================
4097 void MethodTable::PrepopulateDictionary(DataImage * image, BOOL nonExpansive)
4099 STANDARD_VM_CONTRACT;
4101 if (GetDictionary())
4103 // We can only save elements of the dictionary if we are sure of its
4104 // layout, which means we must be either tightly-knit to the EEClass
4105 // (i.e. be the owner of the EEClass) or else we can hard-bind to the EEClass.
4106 // There's no point in prepopulating the dictionary if we can't save the entries.
4108 // This corresponds to the canSaveSlots which we pass to the Dictionary::Fixup
4110 if (!IsCanonicalMethodTable() && image->CanEagerBindToMethodTable(GetCanonicalMethodTable()))
4112 LOG((LF_JIT, LL_INFO10000, "GENERICS: Prepopulating dictionary for MT %s\n", GetDebugClassName()));
4113 GetDictionary()->PrepopulateDictionary(NULL, this, nonExpansive);
4118 //==========================================================================================
4119 void ModuleCtorInfo::AddElement(MethodTable *pMethodTable)
4121 STANDARD_VM_CONTRACT;
4123 // Get the values for the new entry before we update the
4124 // cache in the Module
4126 // Expand the table if needed. No lock is needed because this is at NGEN time
4127 if (numElements >= numLastAllocated)
4129 _ASSERTE(numElements == numLastAllocated);
4131 RelativePointer<MethodTable *> *ppOldMTEntries = ppMT;
4134 #pragma warning(push)
4135 #pragma warning(disable:22011) // Suppress PREFast warning about integer overflows or underflows
4137 DWORD numNewAllocated = max(2 * numLastAllocated, MODULE_CTOR_ELEMENTS);
4139 #pragma warning(pop)
4142 ppMT = new RelativePointer<MethodTable *> [numNewAllocated];
4146 for (unsigned index = 0; index < numLastAllocated; ++index)
4148 ppMT[index].SetValueMaybeNull(ppOldMTEntries[index].GetValueMaybeNull());
4151 for (unsigned index = numLastAllocated; index < numNewAllocated; ++index)
4153 ppMT[index].SetValueMaybeNull(NULL);
4156 delete[] ppOldMTEntries;
4158 numLastAllocated = numNewAllocated;
4161 // Assign the new entry
4163 // Note the use of two "parallel" arrays. We do this to keep the workingset smaller since we
4164 // often search (in GetClassCtorInfoIfExists) for a methodtable pointer but never actually find it.
4166 ppMT[numElements].SetValue(pMethodTable);
4170 //==========================================================================================
4171 void MethodTable::Save(DataImage *image, DWORD profilingFlags)
4175 PRECONDITION(IsRestored_NoLogging());
4176 PRECONDITION(IsFullyLoaded());
4177 PRECONDITION(image->GetModule()->GetAssembly() ==
4178 GetAppDomain()->ToCompilationDomain()->GetTargetAssembly());
4181 LOG((LF_ZAP, LL_INFO10000, "MethodTable::Save %s (%p)\n", GetDebugClassName(), this));
4183 // Be careful about calling DictionaryLayout::Trim - strict conditions apply.
4184 // See note on that method.
4185 if (GetDictionary() &&
4186 GetClass()->GetDictionaryLayout() &&
4187 image->CanEagerBindToMethodTable(GetCanonicalMethodTable()))
4189 GetClass()->GetDictionaryLayout()->Trim();
4192 // Set the "restore" flags. They may not have been set yet.
4193 // We don't need the return value of this call.
4194 NeedsRestore(image);
4196 //check if this is actually in the PZM
4197 if (Module::GetPreferredZapModuleForMethodTable(this) != GetLoaderModule())
4199 _ASSERTE(!IsStringOrArray());
4200 SetFlag(enum_flag_NotInPZM);
4203 // Set the IsStructMarshallable Bit
4204 if (::IsStructMarshalable(this))
4206 SetStructMarshalable();
4211 GetSavedExtent(&start, &end);
4213 #ifdef FEATURE_COMINTEROP
4216 // Make sure our GUID is computed
4218 // Generic WinRT types can have their GUID computed only if the instantiation is WinRT-legal
4219 if (IsLegalNonArrayWinRTType())
4222 if (SUCCEEDED(GetGuidNoThrow(&dummy, TRUE, FALSE)))
4224 GuidInfo* pGuidInfo = GetGuidInfo();
4225 _ASSERTE(pGuidInfo != NULL);
4227 image->StoreStructure(pGuidInfo,
4229 DataImage::ITEM_GUID_INFO);
4231 Module *pModule = GetModule();
4232 if (pModule->CanCacheWinRTTypeByGuid(this))
4234 pModule->CacheWinRTTypeByGuid(this, pGuidInfo);
4239 GuidInfo** ppGuidInfo = GetGuidInfoPtr();
4244 #endif // FEATURE_COMINTEROP
4248 if (GetDebugClassName() != NULL && !image->IsStored(GetDebugClassName()))
4249 image->StoreStructure(debug_m_szClassName, (ULONG)(strlen(GetDebugClassName())+1),
4250 DataImage::ITEM_DEBUG,
4254 DataImage::ItemKind kindBasic = DataImage::ITEM_METHOD_TABLE;
4256 kindBasic = DataImage::ITEM_METHOD_TABLE_SPECIAL_WRITEABLE;
4258 ZapStoredStructure * pMTNode = image->StoreStructure((void*) start, (ULONG)(end - start), kindBasic);
4260 if ((void *)this != (void *)start)
4261 image->BindPointer(this, pMTNode, (BYTE *)this - (BYTE *)start);
4263 // Store the vtable chunks
4264 VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots();
4267 if (!image->IsStored(it.GetIndirectionSlot()))
4269 if (CanInternVtableChunk(image, it))
4270 image->StoreInternedStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK);
4272 image->StoreStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK);
4276 // Tell the interning system that we have already shared this structure without its help
4277 image->NoteReusedStructure(it.GetIndirectionSlot());
4281 if (HasNonVirtualSlotsArray())
4283 image->StoreStructure(GetNonVirtualSlotsArray(), GetNonVirtualSlotsArraySize(), DataImage::ITEM_VTABLE_CHUNK);
4286 if (HasInterfaceMap())
4288 #ifdef FEATURE_COMINTEROP
4289 // Dynamic interface maps have an additional DWORD_PTR preceding the InterfaceInfo_t array
4290 if (HasDynamicInterfaceMap())
4292 ZapStoredStructure * pInterfaceMapNode;
4293 if (decltype(InterfaceInfo_t::m_pMethodTable)::isRelative)
4295 pInterfaceMapNode = image->StoreStructure(((DWORD_PTR *)GetInterfaceMap()) - 1,
4296 GetInterfaceMapSize(),
4297 DataImage::ITEM_INTERFACE_MAP);
4301 pInterfaceMapNode = image->StoreInternedStructure(((DWORD_PTR *)GetInterfaceMap()) - 1,
4302 GetInterfaceMapSize(),
4303 DataImage::ITEM_INTERFACE_MAP);
4305 image->BindPointer(GetInterfaceMap(), pInterfaceMapNode, sizeof(DWORD_PTR));
4308 #endif // FEATURE_COMINTEROP
4310 if (decltype(InterfaceInfo_t::m_pMethodTable)::isRelative)
4312 image->StoreStructure(GetInterfaceMap(), GetInterfaceMapSize(), DataImage::ITEM_INTERFACE_MAP);
4316 image->StoreInternedStructure(GetInterfaceMap(), GetInterfaceMapSize(), DataImage::ITEM_INTERFACE_MAP);
4320 SaveExtraInterfaceInfo(image);
4323 // If we have a dispatch map, save it.
4324 if (HasDispatchMapSlot())
4326 GetDispatchMap()->Save(image);
4329 if (HasPerInstInfo())
4331 ZapStoredStructure * pPerInstInfoNode;
4332 if (CanEagerBindToParentDictionaries(image, NULL))
4334 if (PerInstInfoElem_t::isRelative)
4336 pPerInstInfoNode = image->StoreStructure((BYTE *)GetPerInstInfo() - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo), DataImage::ITEM_DICTIONARY);
4340 pPerInstInfoNode = image->StoreInternedStructure((BYTE *)GetPerInstInfo() - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo), DataImage::ITEM_DICTIONARY);
4345 pPerInstInfoNode = image->StoreStructure((BYTE *)GetPerInstInfo() - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo), DataImage::ITEM_DICTIONARY_WRITEABLE);
4347 image->BindPointer(GetPerInstInfo(), pPerInstInfoNode, sizeof(GenericsDictInfo));
4350 Dictionary * pDictionary = GetDictionary();
4351 if (pDictionary != NULL)
4355 if (!IsCanonicalMethodTable())
4357 // CanEagerBindToMethodTable would not work for targeted patching here. The dictionary
4358 // layout is sensitive to compilation order that can be changed by TP compatible changes.
4359 BOOL canSaveSlots = (image->GetModule() == GetCanonicalMethodTable()->GetLoaderModule());
4361 fIsWriteable = pDictionary->IsWriteable(image, canSaveSlots,
4362 GetNumGenericArgs(),
4364 GetClass()->GetDictionaryLayout());
4368 fIsWriteable = FALSE;
4374 image->StoreInternedStructure(pDictionary, GetInstAndDictSize(), DataImage::ITEM_DICTIONARY);
4378 image->StoreStructure(pDictionary, GetInstAndDictSize(), DataImage::ITEM_DICTIONARY_WRITEABLE);
4382 WORD numStaticFields = GetClass()->GetNumStaticFields();
4384 if (!IsCanonicalMethodTable() && HasGenericsStaticsInfo() && numStaticFields != 0)
4386 FieldDesc * pGenericsFieldDescs = GetGenericsStaticFieldDescs();
4388 for (DWORD i = 0; i < numStaticFields; i++)
4390 FieldDesc *pFld = pGenericsFieldDescs + i;
4391 pFld->PrecomputeNameHash();
4394 ZapStoredStructure * pFDNode = image->StoreStructure(pGenericsFieldDescs, sizeof(FieldDesc) * numStaticFields,
4395 DataImage::ITEM_GENERICS_STATIC_FIELDDESCS);
4397 for (DWORD i = 0; i < numStaticFields; i++)
4399 FieldDesc *pFld = pGenericsFieldDescs + i;
4400 pFld->SaveContents(image);
4401 if (pFld != pGenericsFieldDescs)
4402 image->BindPointer(pFld, pFDNode, (BYTE *)pFld - (BYTE *)pGenericsFieldDescs);
4406 // Allocate a ModuleCtorInfo entry in the NGEN image if necessary
4407 if (HasBoxedRegularStatics())
4409 image->GetModule()->GetZapModuleCtorInfo()->AddElement(this);
4412 // MethodTable WriteableData
4415 PTR_Const_MethodTableWriteableData pWriteableData = GetWriteableData_NoLogging();
4416 _ASSERTE(pWriteableData != NULL);
4417 if (pWriteableData != NULL)
4419 pWriteableData->Save(image, this, profilingFlags);
4422 LOG((LF_ZAP, LL_INFO10000, "MethodTable::Save %s (%p) complete.\n", GetDebugClassName(), this));
4424 // Save the EEClass at the same time as the method table if this is the canonical method table
4425 if (IsCanonicalMethodTable())
4426 GetClass()->Save(image, this);
4427 } // MethodTable::Save
4429 //==========================================================================
4430 // The NeedsRestore Computation.
4432 // WARNING: The NeedsRestore predicate on MethodTable and EEClass
4433 // MUST be computable immediately after we have loaded a type.
4434 // It must NOT depend on any additions or changes made to the
4435 // MethodTable as a result of compiling code, or
4436 // later steps such as prepopulating dictionaries.
4437 //==========================================================================
4438 BOOL MethodTable::ComputeNeedsRestore(DataImage *image, TypeHandleList *pVisited)
4443 // See comment in ComputeNeedsRestoreWorker
4444 PRECONDITION(GetLoaderModule()->HasNativeImage() || GetLoaderModule() == GetAppDomain()->ToCompilationDomain()->GetTargetModule());
4448 _ASSERTE(GetAppDomain()->IsCompilationDomain()); // only used at ngen time!
4450 if (GetWriteableData()->IsNeedsRestoreCached())
4452 return GetWriteableData()->GetCachedNeedsRestore();
4455 // We may speculatively assume that any types we've visited on this run of
4456 // the ComputeNeedsRestore algorithm don't need a restore. If they
4457 // do need a restore then we will check that when we first visit that method
4459 if (TypeHandleList::Exists(pVisited, TypeHandle(this)))
4461 pVisited->MarkBrokenCycle(this);
4464 TypeHandleList newVisited(this, pVisited);
4466 BOOL needsRestore = ComputeNeedsRestoreWorker(image, &newVisited);
4468 // Cache the results of running the algorithm.
4469 // We can only cache the result if we have not speculatively assumed
4470 // that any types are not NeedsRestore
4471 if (!newVisited.HasBrokenCycleMark())
4473 GetWriteableDataForWrite()->SetCachedNeedsRestore(needsRestore);
4477 _ASSERTE(pVisited != NULL);
4479 return needsRestore;
4482 //==========================================================================================
4483 BOOL MethodTable::ComputeNeedsRestoreWorker(DataImage *image, TypeHandleList *pVisited)
4485 STANDARD_VM_CONTRACT;
4488 // You should only call ComputeNeedsRestoreWorker on things being saved into
4489 // the current LoaderModule - the NeedsRestore flag should have been computed
4490 // for all items from NGEN images, and we should never compute NeedsRestore
4491 // on anything that is not related to an NGEN image. If this fails then
4492 // there is probably a CanEagerBindTo check missing as we trace through a
4493 // pointer from one data structure to another.
4494 // Trace back on the call stack and work out where this condition first fails.
4496 Module* myModule = GetLoaderModule();
4497 AppDomain* myAppDomain = GetAppDomain();
4498 CompilationDomain* myCompilationDomain = myAppDomain->ToCompilationDomain();
4499 Module* myCompilationModule = myCompilationDomain->GetTargetModule();
4501 if (myModule != myCompilationModule)
4503 _ASSERTE(!"You should only call ComputeNeedsRestoreWorker on things being saved into the current LoaderModule");
4507 if (g_CorCompileVerboseLevel == CORCOMPILE_VERBOSE)
4509 DefineFullyQualifiedNameForClassW();
4510 LPCWSTR name = GetFullyQualifiedNameForClassW(this);
4511 WszOutputDebugString(W("MethodTable "));
4512 WszOutputDebugString(name);
4513 WszOutputDebugString(W(" needs restore? "));
4515 if (g_CorCompileVerboseLevel >= CORCOMPILE_STATS && GetModule()->GetNgenStats())
4516 GetModule()->GetNgenStats()->MethodTableRestoreNumReasons[TotalMethodTables]++;
4518 #define UPDATE_RESTORE_REASON(ARG) \
4519 if (g_CorCompileVerboseLevel == CORCOMPILE_VERBOSE) \
4520 { WszOutputDebugString(W("Yes, ")); WszOutputDebugString(W(#ARG "\n")); } \
4521 if (g_CorCompileVerboseLevel >= CORCOMPILE_STATS && GetModule()->GetNgenStats()) \
4522 GetModule()->GetNgenStats()->MethodTableRestoreNumReasons[ARG]++;
4524 // The special method table for IL stubs has to be prerestored. Restore is not able to handle it
4525 // because of it does not have a token. In particular, this is a problem for /profiling native images.
4526 if (this == image->GetModule()->GetILStubCache()->GetStubMethodTable())
4531 // When profiling, we always want to perform the restore.
4532 if (GetAppDomain()->ToCompilationDomain()->m_fForceProfiling)
4534 UPDATE_RESTORE_REASON(ProfilingEnabled);
4538 if (DependsOnEquivalentOrForwardedStructs())
4540 UPDATE_RESTORE_REASON(ComImportStructDependenciesNeedRestore);
4544 if (!IsCanonicalMethodTable() && !image->CanPrerestoreEagerBindToMethodTable(GetCanonicalMethodTable(), pVisited))
4546 UPDATE_RESTORE_REASON(CanNotPreRestoreHardBindToCanonicalMethodTable);
4550 if (!image->CanEagerBindToModule(GetModule()))
4552 UPDATE_RESTORE_REASON(CrossAssembly);
4556 if (GetParentMethodTable())
4558 if (!image->CanPrerestoreEagerBindToMethodTable(GetParentMethodTable(), pVisited))
4560 UPDATE_RESTORE_REASON(CanNotPreRestoreHardBindToParentMethodTable);
4565 // Check per-inst pointers-to-dictionaries.
4566 if (!CanEagerBindToParentDictionaries(image, pVisited))
4568 UPDATE_RESTORE_REASON(CanNotHardBindToInstanceMethodTableChain);
4572 // Now check if the dictionary (if any) owned by this methodtable needs a restore.
4573 if (GetDictionary())
4575 if (GetDictionary()->ComputeNeedsRestore(image, pVisited, GetNumGenericArgs()))
4577 UPDATE_RESTORE_REASON(GenericsDictionaryNeedsRestore);
4582 // The interface chain is traversed without doing CheckRestore's. Thus
4583 // if any of the types in the inherited interfaces hierarchy need a restore
4584 // or are cross-module pointers then this methodtable will also need a restore.
4585 InterfaceMapIterator it = IterateInterfaceMap();
4588 if (!image->CanPrerestoreEagerBindToMethodTable(it.GetInterface(), pVisited))
4590 UPDATE_RESTORE_REASON(InterfaceIsGeneric);
4595 if (NeedsCrossModuleGenericsStaticsInfo())
4597 UPDATE_RESTORE_REASON(CrossModuleGenericsStatics);
4603 if(!image->CanPrerestoreEagerBindToTypeHandle(GetApproxArrayElementTypeHandle(), pVisited))
4605 UPDATE_RESTORE_REASON(ArrayElement);
4610 if (g_CorCompileVerboseLevel == CORCOMPILE_VERBOSE)
4612 WszOutputDebugString(W("No\n"));
4617 //==========================================================================================
4618 BOOL MethodTable::CanEagerBindToParentDictionaries(DataImage *image, TypeHandleList *pVisited)
4620 STANDARD_VM_CONTRACT;
4622 MethodTable *pChain = GetParentMethodTable();
4623 while (pChain != NULL)
4625 // This is for the case were the method table contains a pointer to
4626 // an inherited dictionary, e.g. given the case D : C, C : B<int>
4627 // where B<int> is in another module then D contains a pointer to the
4628 // dictionary for B<int>. Note that in this case we might still be
4629 // able to hadbind to C.
4630 if (pChain->HasInstantiation())
4632 if (!image->CanEagerBindToMethodTable(pChain, FALSE, pVisited) ||
4633 !image->CanHardBindToZapModule(pChain->GetLoaderModule()))
4638 pChain = pChain->GetParentMethodTable();
4643 //==========================================================================================
4644 BOOL MethodTable::NeedsCrossModuleGenericsStaticsInfo()
4646 STANDARD_VM_CONTRACT;
4648 return HasGenericsStaticsInfo() && !ContainsGenericVariables() && !IsSharedByGenericInstantiations() &&
4649 (Module::GetPreferredZapModuleForMethodTable(this) != GetLoaderModule());
4652 //==========================================================================================
4653 BOOL MethodTable::IsWriteable()
4655 STANDARD_VM_CONTRACT;
4657 #ifdef FEATURE_COMINTEROP
4658 // Dynamic expansion of interface map writes into method table
4659 // (see code:MethodTable::AddDynamicInterface)
4660 if (HasDynamicInterfaceMap())
4663 // CCW template is created lazily and when that happens, the
4664 // pointer is written directly into the method table.
4665 if (HasCCWTemplate())
4668 // RCW per-type data is created lazily at run-time.
4669 if (HasRCWPerTypeData())
4676 //==========================================================================================
4677 // This is used when non-canonical (i.e. duplicated) method tables
4678 // attempt to bind to items logically belonging to an EEClass or MethodTable.
4679 // i.e. the contract map in the EEClass and the generic dictionary stored in the canonical
4682 // We want to check if we can hard bind to the containing structure before
4683 // deciding to hardbind to the inside of it. This is because we may not be able
4684 // to hardbind to all EEClass and/or MethodTables even if they live in a hradbindable
4685 // target module. Thus we want to call CanEagerBindToMethodTable
4686 // to check we can hardbind to the containing structure.
4688 void HardBindOrClearDictionaryPointer(DataImage *image, MethodTable *pMT, void * p, SSIZE_T offset, bool isRelative)
4690 WRAPPER_NO_CONTRACT;
4692 if (image->CanEagerBindToMethodTable(pMT) &&
4693 image->CanHardBindToZapModule(pMT->GetLoaderModule()))
4697 image->FixupRelativePointerField(p, offset);
4701 image->FixupPointerField(p, offset);
4706 image->ZeroPointerField(p, offset);
4710 //==========================================================================================
4711 void MethodTable::Fixup(DataImage *image)
4716 PRECONDITION(IsFullyLoaded());
4720 LOG((LF_ZAP, LL_INFO10000, "MethodTable::Fixup %s\n", GetDebugClassName()));
4722 if (GetWriteableData()->IsFixedUp())
4725 BOOL needsRestore = NeedsRestore(image);
4726 LOG((LF_ZAP, LL_INFO10000, "MethodTable::Fixup %s (%p), needsRestore=%d\n", GetDebugClassName(), this, needsRestore));
4728 BOOL isCanonical = IsCanonicalMethodTable();
4730 Module *pZapModule = image->GetModule();
4732 MethodTable *pNewMT = (MethodTable *) image->GetImagePointer(this);
4734 // For canonical method tables, the pointer to the EEClass is never encoded as a fixup
4735 // even if this method table is not in its preferred zap module, i.e. the two are
4737 if (IsCanonicalMethodTable())
4739 // Pointer to EEClass
4740 image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pEEClass);
4745 // Encode m_pEEClassOrCanonMT
4747 MethodTable * pCanonMT = GetCanonicalMethodTable();
4749 ZapNode * pImport = NULL;
4750 if (image->CanEagerBindToMethodTable(pCanonMT))
4752 if (image->CanHardBindToZapModule(pCanonMT->GetLoaderModule()))
4754 // Pointer to canonical methodtable
4755 image->FixupPlainOrRelativeField(this, &MethodTable::m_pCanonMT, pCanonMT, UNION_METHODTABLE);
4759 // Pointer to lazy bound indirection cell to canonical methodtable
4760 pImport = image->GetTypeHandleImport(pCanonMT);
4765 // Pointer to eager bound indirection cell to canonical methodtable
4766 _ASSERTE(pCanonMT->IsTypicalTypeDefinition() ||
4767 !pCanonMT->ContainsGenericVariables());
4768 pImport = image->GetTypeHandleImport(pCanonMT);
4771 if (pImport != NULL)
4773 image->FixupPlainOrRelativeFieldToNode(this, &MethodTable::m_pCanonMT, pImport, UNION_INDIRECTION);
4777 image->FixupField(this, offsetof(MethodTable, m_pLoaderModule), pZapModule, 0, IMAGE_REL_BASED_RELPTR);
4780 image->FixupPointerField(this, offsetof(MethodTable, debug_m_szClassName));
4783 MethodTable * pParentMT = GetParentMethodTable();
4784 _ASSERTE(!pNewMT->m_pParentMethodTable.IsIndirectPtrMaybeNull());
4786 ZapRelocationType relocType;
4787 if (decltype(MethodTable::m_pParentMethodTable)::isRelative)
4789 relocType = IMAGE_REL_BASED_RELPTR;
4793 relocType = IMAGE_REL_BASED_PTR;
4796 if (pParentMT != NULL)
4799 // Encode m_pParentMethodTable
4801 ZapNode * pImport = NULL;
4802 if (image->CanEagerBindToMethodTable(pParentMT))
4804 if (image->CanHardBindToZapModule(pParentMT->GetLoaderModule()))
4806 _ASSERTE(!m_pParentMethodTable.IsIndirectPtr());
4807 image->FixupField(this, offsetof(MethodTable, m_pParentMethodTable), pParentMT, 0, relocType);
4811 pImport = image->GetTypeHandleImport(pParentMT);
4816 if (!pParentMT->IsCanonicalMethodTable())
4819 IMDInternalImport *pInternalImport = GetModule()->GetMDImport();
4822 pInternalImport->GetTypeDefProps(GetCl(),
4826 _ASSERTE(TypeFromToken(crExtends) == mdtTypeSpec);
4829 // Use unique cell for now since we are first going to set the parent method table to
4830 // approx one first, and then to the exact one later. This would mess up the shared cell.
4831 // It would be nice to clean it up to use the shared cell - we should set the parent method table
4832 // just once at the end.
4833 pImport = image->GetTypeHandleImport(pParentMT, this /* pUniqueId */);
4837 pImport = image->GetTypeHandleImport(pParentMT);
4841 if (pImport != NULL)
4843 image->FixupFieldToNode(this, offsetof(MethodTable, m_pParentMethodTable), pImport, FIXUP_POINTER_INDIRECTION, relocType);
4847 if (HasNonVirtualSlotsArray())
4849 TADDR ppNonVirtualSlots = GetNonVirtualSlotsPtr();
4850 PREFIX_ASSUME(ppNonVirtualSlots != NULL);
4851 image->FixupRelativePointerField(this, (BYTE *)ppNonVirtualSlots - (BYTE *)this);
4854 if (HasInterfaceMap())
4856 image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pInterfaceMap);
4858 FixupExtraInterfaceInfo(image);
4861 _ASSERTE(GetWriteableData());
4862 image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pWriteableData);
4863 m_pWriteableData.GetValue()->Fixup(image, this, needsRestore);
4865 #ifdef FEATURE_COMINTEROP
4868 GuidInfo **ppGuidInfo = GetGuidInfoPtr();
4869 if (*ppGuidInfo != NULL)
4871 image->FixupPointerField(this, (BYTE *)ppGuidInfo - (BYTE *)this);
4875 image->ZeroPointerField(this, (BYTE *)ppGuidInfo - (BYTE *)this);
4879 if (HasCCWTemplate())
4881 ComCallWrapperTemplate **ppTemplate = GetCCWTemplatePtr();
4882 image->ZeroPointerField(this, (BYTE *)ppTemplate - (BYTE *)this);
4885 if (HasRCWPerTypeData())
4887 // it would be nice to save these but the impact on mscorlib.ni size is prohibitive
4888 RCWPerTypeData **ppData = GetRCWPerTypeDataPtr();
4889 image->ZeroPointerField(this, (BYTE *)ppData - (BYTE *)this);
4891 #endif // FEATURE_COMINTEROP
4898 _ASSERTE((pNewMT->GetFlag(enum_flag_IsZapped) == 0));
4899 pNewMT->SetFlag(enum_flag_IsZapped);
4901 _ASSERTE((pNewMT->GetFlag(enum_flag_IsPreRestored) == 0));
4903 pNewMT->SetFlag(enum_flag_IsPreRestored);
4907 // If the canonical method table lives in a different loader module
4908 // then just zero out the entries and copy them across from the canonical
4909 // vtable on restore.
4911 // Note the canonical method table will be the same as the current method table
4912 // if the method table is not a generic instantiation.
4914 if (HasDispatchMapSlot())
4916 TADDR pSlot = GetMultipurposeSlotPtr(enum_flag_HasDispatchMapSlot, c_DispatchMapSlotOffsets);
4917 DispatchMap * pDispatchMap = RelativePointer<PTR_DispatchMap>::GetValueAtPtr(pSlot);
4918 image->FixupField(this, pSlot - (TADDR)this, pDispatchMap, 0, IMAGE_REL_BASED_RelativePointer);
4919 pDispatchMap->Fixup(image);
4922 if (HasModuleOverride())
4924 image->FixupModulePointer(this, GetModuleOverridePtr());
4928 VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots();
4931 if (VTableIndir_t::isRelative)
4933 image->FixupRelativePointerField(this, it.GetOffsetFromMethodTable());
4937 image->FixupPointerField(this, it.GetOffsetFromMethodTable());
4942 unsigned numVTableSlots = GetNumVtableSlots();
4943 for (unsigned slotNumber = 0; slotNumber < numVTableSlots; slotNumber++)
4946 // Find the method desc from the slot.
4948 MethodDesc *pMD = GetMethodDescForSlot(slotNumber);
4949 _ASSERTE(pMD != NULL);
4950 pMD->CheckRestore();
4955 if (slotNumber < GetNumVirtuals())
4957 // Virtual slots live in chunks pointed to by vtable indirections
4959 slotBase = (PVOID) GetVtableIndirections()[GetIndexOfVtableIndirection(slotNumber)].GetValueMaybeNull();
4960 slotOffset = GetIndexAfterVtableIndirection(slotNumber) * sizeof(PCODE);
4962 else if (HasSingleNonVirtualSlot())
4964 // Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member,
4965 // except when there is only one in which case it lives in the optional member itself
4967 _ASSERTE(slotNumber == GetNumVirtuals());
4968 slotBase = (PVOID) this;
4969 slotOffset = (BYTE *)GetSlotPtr(slotNumber) - (BYTE *)this;
4973 // Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member
4975 _ASSERTE(HasNonVirtualSlotsArray());
4976 slotBase = (PVOID) GetNonVirtualSlotsArray();
4977 slotOffset = (slotNumber - GetNumVirtuals()) * sizeof(PCODE);
4980 // Attempt to make the slot point directly at the prejitted code.
4981 // Note that changes to this logic may require or enable an update to CanInternVtableChunk.
4982 // If a necessary update is not made, an assert will fire in ZapStoredStructure::Save.
4984 if (pMD->GetMethodTable() == this)
4986 ZapRelocationType relocType;
4987 if (slotNumber >= GetNumVirtuals())
4988 relocType = IMAGE_REL_BASED_RelativePointer;
4990 relocType = IMAGE_REL_BASED_PTR;
4992 pMD->FixupSlot(image, slotBase, slotOffset, relocType);
4999 // Static method should be in the owning methodtable only.
5000 _ASSERTE(!pMD->IsStatic());
5002 MethodTable *pSourceMT = isCanonical
5003 ? GetParentMethodTable()
5004 : GetCanonicalMethodTable();
5006 // It must be inherited from the parent or copied from the canonical
5007 _ASSERTE(pSourceMT->GetMethodDescForSlot(slotNumber) == pMD);
5010 if (image->CanEagerBindToMethodDesc(pMD) && pMD->GetLoaderModule() == pZapModule)
5012 pMD->FixupSlot(image, slotBase, slotOffset);
5016 if (!pMD->IsGenericMethodDefinition())
5018 ZapNode * importThunk = image->GetVirtualImportThunk(pMD->GetMethodTable(), pMD, slotNumber);
5019 // On ARM, make sure that the address to the virtual thunk that we write into the
5020 // vtable "chunk" has the Thumb bit set.
5021 image->FixupFieldToNode(slotBase, slotOffset, importThunk ARM_ARG(THUMB_CODE));
5025 // Virtual generic methods don't/can't use their vtable slot
5026 image->ZeroPointerField(slotBase, slotOffset);
5033 // Fixup Interface map
5036 InterfaceMapIterator it = IterateInterfaceMap();
5039 image->FixupMethodTablePointer(GetInterfaceMap(), &it.GetInterfaceInfo()->m_pMethodTable);
5044 image->HardBindTypeHandlePointer(this, offsetof(MethodTable, m_ElementTypeHnd));
5048 // Fixup per-inst pointers for this method table
5051 if (HasPerInstInfo())
5053 // Fixup the pointer to the per-inst table
5054 image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pPerInstInfo);
5056 for (MethodTable *pChain = this; pChain != NULL; pChain = pChain->GetParentMethodTable())
5058 if (pChain->HasInstantiation())
5060 DWORD dictNum = pChain->GetNumDicts()-1;
5062 // If we can't hardbind then the value will be copied down from
5063 // the parent upon restore.
5065 // We special-case the dictionary for this method table because we must always
5066 // hard bind to it even if it's not in its preferred zap module
5067 size_t sizeDict = sizeof(PerInstInfoElem_t);
5071 if (PerInstInfoElem_t::isRelative)
5073 image->FixupRelativePointerField(GetPerInstInfo(), dictNum * sizeDict);
5077 image->FixupPointerField(GetPerInstInfo(), dictNum * sizeDict);
5082 HardBindOrClearDictionaryPointer(image, pChain, GetPerInstInfo(), dictNum * sizeDict, PerInstInfoElem_t::isRelative);
5088 // Fixup instantiation+dictionary for this method table (if any)
5090 if (GetDictionary())
5092 LOG((LF_JIT, LL_INFO10000, "GENERICS: Fixup dictionary for MT %s\n", GetDebugClassName()));
5094 // CanEagerBindToMethodTable would not work for targeted patching here. The dictionary
5095 // layout is sensitive to compilation order that can be changed by TP compatible changes.
5096 BOOL canSaveSlots = !IsCanonicalMethodTable() && (image->GetModule() == GetCanonicalMethodTable()->GetLoaderModule());
5098 // See comment on Dictionary::Fixup
5099 GetDictionary()->Fixup(image,
5102 GetNumGenericArgs(),
5104 GetClass()->GetDictionaryLayout());
5107 // Fixup per-inst statics info
5108 if (HasGenericsStaticsInfo())
5110 GenericsStaticsInfo *pInfo = GetGenericsStaticsInfo();
5112 image->FixupRelativePointerField(this, (BYTE *)&pInfo->m_pFieldDescs - (BYTE *)this);
5115 for (DWORD i = 0; i < GetClass()->GetNumStaticFields(); i++)
5117 FieldDesc *pFld = GetGenericsStaticFieldDescs() + i;
5122 if (NeedsCrossModuleGenericsStaticsInfo())
5124 MethodTableWriteableData * pNewWriteableData = (MethodTableWriteableData *)image->GetImagePointer(m_pWriteableData.GetValue());
5125 CrossModuleGenericsStaticsInfo * pNewCrossModuleGenericsStaticsInfo = pNewWriteableData->GetCrossModuleGenericsStaticsInfo();
5127 pNewCrossModuleGenericsStaticsInfo->m_DynamicTypeID = pInfo->m_DynamicTypeID;
5129 image->ZeroPointerField(m_pWriteableData.GetValue(), sizeof(MethodTableWriteableData) + offsetof(CrossModuleGenericsStaticsInfo, m_pModuleForStatics));
5131 pNewMT->SetFlag(enum_flag_StaticsMask_IfGenericsThenCrossModule);
5136 _ASSERTE(!NeedsCrossModuleGenericsStaticsInfo());
5140 LOG((LF_ZAP, LL_INFO10000, "MethodTable::Fixup %s (%p) complete\n", GetDebugClassName(), this));
5142 // If this method table is canonical (one-to-one with EEClass) then fix up the EEClass also
5144 GetClass()->Fixup(image, this);
5146 // Mark method table as fixed-up
5147 GetWriteableDataForWrite()->SetFixedUp();
5149 } // MethodTable::Fixup
5151 //==========================================================================================
5152 void MethodTableWriteableData::Save(DataImage *image, MethodTable *pMT, DWORD profilingFlags) const
5154 STANDARD_VM_CONTRACT;
5156 SIZE_T size = sizeof(MethodTableWriteableData);
5158 // MethodTableWriteableData is followed by optional CrossModuleGenericsStaticsInfo in NGen images
5159 if (pMT->NeedsCrossModuleGenericsStaticsInfo())
5160 size += sizeof(CrossModuleGenericsStaticsInfo);
5162 DataImage::ItemKind kindWriteable = DataImage::ITEM_METHOD_TABLE_DATA_COLD_WRITEABLE;
5163 if ((profilingFlags & (1 << WriteMethodTableWriteableData)) != 0)
5164 kindWriteable = DataImage::ITEM_METHOD_TABLE_DATA_HOT_WRITEABLE;
5166 ZapStoredStructure * pNode = image->StoreStructure(NULL, size, kindWriteable);
5167 image->BindPointer(this, pNode, 0);
5168 image->CopyData(pNode, this, sizeof(MethodTableWriteableData));
5171 //==========================================================================================
5172 void MethodTableWriteableData::Fixup(DataImage *image, MethodTable *pMT, BOOL needsRestore)
5174 STANDARD_VM_CONTRACT;
5176 image->ZeroField(this, offsetof(MethodTableWriteableData, m_hExposedClassObject), sizeof(m_hExposedClassObject));
5178 MethodTableWriteableData *pNewNgenPrivateMT = (MethodTableWriteableData*) image->GetImagePointer(this);
5179 _ASSERTE(pNewNgenPrivateMT != NULL);
5181 pNewNgenPrivateMT->m_dwFlags &= ~(enum_flag_RemotingConfigChecked |
5182 enum_flag_CriticalTypePrepared);
5185 pNewNgenPrivateMT->m_dwFlags |= (enum_flag_UnrestoredTypeKey |
5186 enum_flag_Unrestored |
5187 enum_flag_HasApproxParent |
5188 enum_flag_IsNotFullyLoaded);
5191 pNewNgenPrivateMT->m_dwLastVerifedGCCnt = (DWORD)-1;
5195 #endif // !DACCESS_COMPILE
5197 #endif // FEATURE_NATIVE_IMAGE_GENERATION
5199 #ifdef FEATURE_PREJIT
5201 //==========================================================================================
5202 void MethodTable::CheckRestore()
5206 if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS;
5207 if (FORBIDGC_LOADER_USE_ENABLED()) GC_NOTRIGGER; else GC_TRIGGERS;
5211 if (!IsFullyLoaded())
5213 ClassLoader::EnsureLoaded(this);
5214 _ASSERTE(IsFullyLoaded());
5217 g_IBCLogger.LogMethodTableAccess(this);
5220 #else // !FEATURE_PREJIT
5221 //==========================================================================================
5222 void MethodTable::CheckRestore()
5224 LIMITED_METHOD_CONTRACT;
5226 #endif // !FEATURE_PREJIT
5229 #ifndef DACCESS_COMPILE
5231 BOOL SatisfiesClassConstraints(TypeHandle instanceTypeHnd, TypeHandle typicalTypeHnd,
5232 const InstantiationContext *pInstContext);
5234 static VOID DoAccessibilityCheck(MethodTable *pAskingMT, MethodTable *pTargetMT, UINT resIDWhy, BOOL checkTargetTypeTransparency)
5243 StaticAccessCheckContext accessContext(NULL, pAskingMT);
5245 if (!ClassLoader::CanAccessClass(&accessContext,
5246 pTargetMT, //the desired class
5247 pTargetMT->GetAssembly(), //the desired class's assembly
5248 *AccessCheckOptions::s_pNormalAccessChecks,
5249 checkTargetTypeTransparency
5252 SString displayName;
5253 pAskingMT->GetAssembly()->GetDisplayName(displayName);
5256 // Error string is either E_ACCESSDENIED which requires the type name of the target, vs
5257 // a more normal TypeLoadException which displays the requesting type.
5258 _ASSERTE((resIDWhy == (UINT)E_ACCESSDENIED) || (resIDWhy == (UINT)IDS_CLASSLOAD_INTERFACE_NO_ACCESS));
5259 TypeString::AppendType(targetName, TypeHandle((resIDWhy == (UINT)E_ACCESSDENIED) ? pTargetMT : pAskingMT));
5261 COMPlusThrow(kTypeLoadException, resIDWhy, targetName.GetUnicode(), displayName.GetUnicode());
5266 VOID DoAccessibilityCheckForConstraint(MethodTable *pAskingMT, TypeHandle thConstraint, UINT resIDWhy)
5275 if (thConstraint.IsTypeDesc())
5277 TypeDesc *pTypeDesc = thConstraint.AsTypeDesc();
5279 if (pTypeDesc->IsGenericVariable())
5281 // since the metadata respresents a generic type param constraint as an index into
5282 // the declaring type's list of generic params, it is structurally impossible
5283 // to express a violation this way. So there's no check to be done here.
5286 if (pTypeDesc->HasTypeParam())
5288 DoAccessibilityCheckForConstraint(pAskingMT, pTypeDesc->GetTypeParam(), resIDWhy);
5292 COMPlusThrow(kTypeLoadException, E_ACCESSDENIED);
5298 DoAccessibilityCheck(pAskingMT, thConstraint.GetMethodTable(), resIDWhy, FALSE);
5303 VOID DoAccessibilityCheckForConstraints(MethodTable *pAskingMT, TypeVarTypeDesc *pTyVar, UINT resIDWhy)
5312 DWORD numConstraints;
5313 TypeHandle *pthConstraints = pTyVar->GetCachedConstraints(&numConstraints);
5314 for (DWORD cidx = 0; cidx < numConstraints; cidx++)
5316 TypeHandle thConstraint = pthConstraints[cidx];
5318 DoAccessibilityCheckForConstraint(pAskingMT, thConstraint, resIDWhy);
5323 // Recursive worker that pumps the transitive closure of a type's dependencies to the specified target level.
5324 // Dependencies include:
5328 // - canonical type, for non-canonical instantiations
5329 // - typical type, for non-typical instantiations
5333 // pVisited - used to prevent endless recursion in the case of cyclic dependencies
5335 // level - target level to pump to - must be CLASS_DEPENDENCIES_LOADED or CLASS_LOADED
5337 // if CLASS_DEPENDENCIES_LOADED, all transitive dependencies are resolved to their
5340 // if CLASS_LOADED, all type-safety checks are done on the type and all its transitive
5341 // dependencies. Note that for the CLASS_LOADED case, some types may be left
5342 // on the pending list rather that pushed to CLASS_LOADED in the case of cyclic
5343 // dependencies - the root caller must handle this.
5345 // pfBailed - if we or one of our depedencies bails early due to cyclic dependencies, we
5346 // must set *pfBailed to TRUE. Otherwise, we must *leave it unchanged* (thus, the
5347 // boolean acts as a cumulative OR.)
5349 // pPending - if one of our dependencies bailed, the type cannot yet be promoted to CLASS_LOADED
5350 // as the dependencies will be checked later and may fail a security check then.
5351 // Instead, DoFullyLoad() will add the type to the pending list - the root caller
5352 // is responsible for promoting the type after the full transitive closure has been
5353 // walked. Note that it would be just as correct to always defer to the pending list -
5354 // however, that is a little less performant.
5358 // Closure of locals necessary for implementing CheckForEquivalenceAndFullyLoadType.
5359 // Used so that we can have one valuetype walking algorithm used for type equivalence walking of the parameters of the method.
5360 struct DoFullyLoadLocals
5362 DoFullyLoadLocals(DFLPendingList *pPendingParam, ClassLoadLevel levelParam, MethodTable *pMT, Generics::RecursionGraph *pVisited) :
5363 newVisited(pVisited, TypeHandle(pMT)),
5364 pPending(pPendingParam),
5367 #ifdef FEATURE_COMINTEROP
5368 , fHasEquivalentStructParameter(FALSE)
5370 , fHasTypeForwarderDependentStructParameter(FALSE)
5371 , fDependsOnEquivalentOrForwardedStructs(FALSE)
5373 LIMITED_METHOD_CONTRACT;
5376 Generics::RecursionGraph newVisited;
5377 DFLPendingList * const pPending;
5378 const ClassLoadLevel level;
5380 #ifdef FEATURE_COMINTEROP
5381 BOOL fHasEquivalentStructParameter;
5383 BOOL fHasTypeForwarderDependentStructParameter;
5384 BOOL fDependsOnEquivalentOrForwardedStructs;
5387 #if defined(FEATURE_TYPEEQUIVALENCE) && !defined(DACCESS_COMPILE)
5388 static void CheckForEquivalenceAndFullyLoadType(Module *pModule, mdToken token, Module *pDefModule, mdToken defToken, const SigParser *ptr, SigTypeContext *pTypeContext, void *pData)
5398 SigPointer sigPtr(*ptr);
5400 DoFullyLoadLocals *pLocals = (DoFullyLoadLocals *)pData;
5402 if (IsTypeDefEquivalent(defToken, pDefModule))
5404 TypeHandle th = sigPtr.GetTypeHandleThrowing(pModule, pTypeContext, ClassLoader::LoadTypes, (ClassLoadLevel)(pLocals->level - 1));
5405 CONSISTENCY_CHECK(!th.IsNull());
5407 th.DoFullyLoad(&pLocals->newVisited, pLocals->level, pLocals->pPending, &pLocals->fBailed, NULL);
5408 pLocals->fDependsOnEquivalentOrForwardedStructs = TRUE;
5409 pLocals->fHasEquivalentStructParameter = TRUE;
5413 #endif // defined(FEATURE_TYPEEQUIVALENCE) && !defined(DACCESS_COMPILE)
5415 struct CheckForTypeForwardedTypeRefParameterLocals
5418 BOOL * pfTypeForwarderFound;
5421 // Callback for code:WalkValueTypeTypeDefOrRefs of type code:PFN_WalkValueTypeTypeDefOrRefs
5422 static void CheckForTypeForwardedTypeRef(
5423 mdToken tkTypeDefOrRef,
5426 STANDARD_VM_CONTRACT;
5428 CheckForTypeForwardedTypeRefParameterLocals * pLocals = (CheckForTypeForwardedTypeRefParameterLocals *)pData;
5430 // If a type forwarder was found, return - we're done
5431 if ((pLocals->pfTypeForwarderFound != NULL) && (*(pLocals->pfTypeForwarderFound)))
5434 // Only type ref's are interesting
5435 if (TypeFromToken(tkTypeDefOrRef) == mdtTypeRef)
5437 Module * pDummyModule;
5439 ClassLoader::ResolveTokenToTypeDefThrowing(
5445 pLocals->pfTypeForwarderFound);
5449 typedef void (* PFN_WalkValueTypeTypeDefOrRefs)(mdToken tkTypeDefOrRef, void * pData);
5451 // Call 'function' for ValueType in the signature.
5452 void WalkValueTypeTypeDefOrRefs(
5453 const SigParser * pSig,
5454 PFN_WalkValueTypeTypeDefOrRefs function,
5457 STANDARD_VM_CONTRACT;
5459 SigParser sig(*pSig);
5462 IfFailThrow(sig.GetElemType(&typ));
5466 case ELEMENT_TYPE_VALUETYPE:
5468 IfFailThrow(sig.GetToken(&token));
5469 function(token, pData);
5472 case ELEMENT_TYPE_GENERICINST:
5473 // Process and skip generic type
5474 WalkValueTypeTypeDefOrRefs(&sig, function, pData);
5475 IfFailThrow(sig.SkipExactlyOne());
5477 // Get number of parameters
5479 IfFailThrow(sig.GetData(&argCnt));
5480 while (argCnt-- != 0)
5481 { // Process and skip generic parameter
5482 WalkValueTypeTypeDefOrRefs(&sig, function, pData);
5483 IfFailThrow(sig.SkipExactlyOne());
5491 // Callback for code:MethodDesc::WalkValueTypeParameters (of type code:WalkValueTypeParameterFnPtr)
5492 static void CheckForTypeForwardedTypeRefParameter(
5495 Module * pDefModule,
5497 const SigParser *ptr,
5498 SigTypeContext * pTypeContext,
5501 STANDARD_VM_CONTRACT;
5503 DoFullyLoadLocals * pLocals = (DoFullyLoadLocals *)pData;
5505 // If a type forwarder was found, return - we're done
5506 if (pLocals->fHasTypeForwarderDependentStructParameter)
5509 CheckForTypeForwardedTypeRefParameterLocals locals;
5510 locals.pModule = pModule;
5511 locals.pfTypeForwarderFound = &pLocals->fHasTypeForwarderDependentStructParameter; // By not passing NULL here, we determine if there is a type forwarder involved.
5513 WalkValueTypeTypeDefOrRefs(ptr, CheckForTypeForwardedTypeRef, &locals);
5515 if (pLocals->fHasTypeForwarderDependentStructParameter)
5516 pLocals->fDependsOnEquivalentOrForwardedStructs = TRUE;
5519 // Callback for code:MethodDesc::WalkValueTypeParameters (of type code:WalkValueTypeParameterFnPtr)
5520 static void LoadTypeDefOrRefAssembly(
5523 Module * pDefModule,
5525 const SigParser *ptr,
5526 SigTypeContext * pTypeContext,
5529 STANDARD_VM_CONTRACT;
5531 DoFullyLoadLocals * pLocals = (DoFullyLoadLocals *)pData;
5533 CheckForTypeForwardedTypeRefParameterLocals locals;
5534 locals.pModule = pModule;
5535 locals.pfTypeForwarderFound = NULL; // By passing NULL here, we simply resolve the token to TypeDef.
5537 WalkValueTypeTypeDefOrRefs(ptr, CheckForTypeForwardedTypeRef, &locals);
5540 #endif //!DACCESS_COMPILE
5542 void MethodTable::DoFullyLoad(Generics::RecursionGraph * const pVisited, const ClassLoadLevel level, DFLPendingList * const pPending,
5543 BOOL * const pfBailed, const InstantiationContext * const pInstContext)
5545 STANDARD_VM_CONTRACT;
5547 _ASSERTE(level == CLASS_LOADED || level == CLASS_DEPENDENCIES_LOADED);
5548 _ASSERTE(pfBailed != NULL);
5549 _ASSERTE(!(level == CLASS_LOADED && pPending == NULL));
5552 #ifndef DACCESS_COMPILE
5554 if (Generics::RecursionGraph::HasSeenType(pVisited, TypeHandle(this)))
5560 if (GetLoadLevel() >= level)
5565 if (level == CLASS_LOADED)
5567 UINT numTH = pPending->Count();
5568 TypeHandle *pTypeHndPending = pPending->Table();
5569 for (UINT idxPending = 0; idxPending < numTH; idxPending++)
5571 if (pTypeHndPending[idxPending] == this)
5580 BEGIN_SO_INTOLERANT_CODE(GetThread());
5581 // First ensure that we're loaded to just below CLASS_DEPENDENCIES_LOADED
5582 ClassLoader::EnsureLoaded(this, (ClassLoadLevel) (level-1));
5584 CONSISTENCY_CHECK(IsRestored_NoLogging());
5585 CONSISTENCY_CHECK(!HasApproxParent());
5588 DoFullyLoadLocals locals(pPending, level, this, pVisited);
5590 bool fNeedsSanityChecks = !IsZapped(); // Validation has been performed for NGened classes already
5592 #ifdef FEATURE_READYTORUN
5593 if (fNeedsSanityChecks)
5595 Module * pModule = GetModule();
5597 // No sanity checks for ready-to-run compiled images if possible
5598 if (pModule->IsReadyToRun() && pModule->GetReadyToRunInfo()->SkipTypeValidation())
5599 fNeedsSanityChecks = false;
5603 bool fNeedAccessChecks = (level == CLASS_LOADED) &&
5604 fNeedsSanityChecks &&
5605 IsTypicalTypeDefinition();
5607 TypeHandle typicalTypeHnd;
5609 if (!IsZapped()) // Validation has been performed for NGened classes already
5611 // Fully load the typical instantiation. Make sure that this is done before loading other dependencies
5612 // as the recursive generics detection algorithm needs to examine typical instantiations of the types
5614 if (!IsTypicalTypeDefinition())
5616 typicalTypeHnd = ClassLoader::LoadTypeDefThrowing(GetModule(), GetCl(),
5617 ClassLoader::ThrowIfNotFound, ClassLoader::PermitUninstDefOrRef, tdNoTypes,
5618 (ClassLoadLevel) (level - 1));
5619 CONSISTENCY_CHECK(!typicalTypeHnd.IsNull());
5620 typicalTypeHnd.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5622 else if (level == CLASS_DEPENDENCIES_LOADED && HasInstantiation())
5624 // This is a typical instantiation of a generic type. When attaining CLASS_DEPENDENCIES_LOADED, the
5625 // recursive inheritance graph (ECMA part.II Section 9.2) will be constructed and checked for "expanding
5626 // cycles" to detect infinite recursion, e.g. A<T> : B<A<A<T>>>.
5628 // The dependencies loaded by this method (parent type, implemented interfaces, generic arguments)
5629 // ensure that we will generate the finite instantiation closure as defined in ECMA. This load level
5630 // is not being attained under lock so it's not possible to use TypeVarTypeDesc to represent graph
5631 // nodes because multiple threads trying to fully load types from the closure at the same time would
5632 // interfere with each other. In addition, the graph is only used for loading and can be discarded
5633 // when the closure is fully loaded (TypeVarTypeDesc need to stay).
5635 // The graph is represented by Generics::RecursionGraph instances organized in a linked list with
5636 // each of them holding part of the graph. They live on the stack and are cleaned up automatically
5637 // before returning from DoFullyLoad.
5639 if (locals.newVisited.CheckForIllegalRecursion())
5641 // An expanding cycle was detected, this type is part of a closure that is defined recursively.
5642 IMDInternalImport* pInternalImport = GetModule()->GetMDImport();
5643 GetModule()->GetAssembly()->ThrowTypeLoadException(pInternalImport, GetCl(), IDS_CLASSLOAD_GENERICTYPE_RECURSIVE);
5648 // Fully load the parent
5649 MethodTable *pParentMT = GetParentMethodTable();
5653 pParentMT->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5655 if (fNeedAccessChecks)
5657 if (!IsComObjectType()) //RCW's are special - they are manufactured by the runtime and derive from the non-public type System.__ComObject
5659 // A transparenct type should not be allowed to derive from a critical type.
5660 // However since this has never been enforced before we have many classes that
5661 // violate this rule. Enforcing it now will be a breaking change.
5662 DoAccessibilityCheck(this, pParentMT, E_ACCESSDENIED, /* checkTargetTypeTransparency*/ FALSE);
5667 // Fully load the interfaces
5668 MethodTable::InterfaceMapIterator it = IterateInterfaceMap();
5671 it.GetInterface()->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5673 if (fNeedAccessChecks)
5675 if (IsInterfaceDeclaredOnClass(it.GetIndex())) // only test directly implemented interfaces (it's
5676 // legal for an inherited interface to be private.)
5678 // A transparenct type should not be allowed to implement a critical interface.
5679 // However since this has never been enforced before we have many classes that
5680 // violate this rule. Enforcing it now will be a breaking change.
5681 DoAccessibilityCheck(this, it.GetInterface(), IDS_CLASSLOAD_INTERFACE_NO_ACCESS, /* checkTargetTypeTransparency*/ FALSE);
5686 // Fully load the generic arguments
5687 Instantiation inst = GetInstantiation();
5688 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
5690 inst[i].DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5693 // Fully load the canonical methodtable
5694 if (!IsCanonicalMethodTable())
5696 GetCanonicalMethodTable()->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, NULL);
5699 if (fNeedsSanityChecks)
5701 // Fully load the exact field types for value type fields
5702 // Note that MethodTableBuilder::InitializeFieldDescs() loads the type of the
5703 // field only upto level CLASS_LOAD_APPROXPARENTS.
5704 FieldDesc *pField = GetApproxFieldDescListRaw();
5705 FieldDesc *pFieldEnd = pField + GetNumStaticFields() + GetNumIntroducedInstanceFields();
5707 while (pField < pFieldEnd)
5709 g_IBCLogger.LogFieldDescsAccess(pField);
5711 if (pField->GetFieldType() == ELEMENT_TYPE_VALUETYPE)
5713 TypeHandle th = pField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level - 1));
5714 CONSISTENCY_CHECK(!th.IsNull());
5716 th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5718 if (fNeedAccessChecks)
5720 DoAccessibilityCheck(this, th.GetMethodTable(), E_ACCESSDENIED, FALSE);
5727 // Fully load the exact field types for generic value type fields
5728 if (HasGenericsStaticsInfo())
5730 FieldDesc *pGenStaticField = GetGenericsStaticFieldDescs();
5731 FieldDesc *pGenStaticFieldEnd = pGenStaticField + GetNumStaticFields();
5732 while (pGenStaticField < pGenStaticFieldEnd)
5734 if (pGenStaticField->GetFieldType() == ELEMENT_TYPE_VALUETYPE)
5736 TypeHandle th = pGenStaticField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level - 1));
5737 CONSISTENCY_CHECK(!th.IsNull());
5739 th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5741 // The accessibility check is not necessary for generic fields. The generic fields are copy
5742 // of the regular fields, the only difference is that they have the exact type.
5749 #ifdef FEATURE_NATIVE_IMAGE_GENERATION
5750 // Fully load the types of fields associated with a field marshaler when ngenning
5751 if (HasLayout() && GetAppDomain()->IsCompilationDomain() && !IsZapped())
5753 FieldMarshaler* pFM = this->GetLayoutInfo()->GetFieldMarshalers();
5754 UINT numReferenceFields = this->GetLayoutInfo()->GetNumCTMFields();
5756 while (numReferenceFields--)
5759 FieldDesc *pMarshalerField = pFM->GetFieldDesc();
5761 // If the fielddesc pointer here is a token tagged pointer, then the field marshaler that we are
5762 // working with will not need to be saved into this ngen image. And as that was the reason that we
5763 // needed to load this type, thus we will not need to fully load the type associated with this field desc.
5765 if (!CORCOMPILE_IS_POINTER_TAGGED(pMarshalerField))
5767 TypeHandle th = pMarshalerField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level-1));
5768 CONSISTENCY_CHECK(!th.IsNull());
5770 th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext);
5772 // The accessibility check is not used here to prevent functional differences between ngen and non-ngen scenarios.
5773 ((BYTE*&)pFM) += MAXFIELDMARSHALERSIZE;
5776 #endif //FEATURE_NATIVE_IMAGE_GENERATION
5778 // Fully load exact parameter types for value type parameters opted into equivalence. This is required in case GC is
5779 // triggered during prestub. GC needs to know where references are on the stack and if the parameter (as read from
5780 // the method signature) is a structure, it relies on the loaded type to get the layout information from. For ordinary
5781 // structures we are guaranteed to have loaded the type before entering prestub - the caller must have loaded it.
5782 // However due to type equivalence, the caller may work with a different type than what's in the method signature.
5784 // We deal with situation by eagerly loading types that may cause these problems, i.e. value types in signatures of
5785 // methods introduced by this type. To avoid the perf hit for scenarios without type equivalence, we only preload
5786 // structures that marked as type equivalent. In the no-PIA world
5787 // these structures are called "local types" and are usually generated automatically by the compiler. Note that there
5788 // is a related logic in code:CompareTypeDefsForEquivalence that declares two tokens corresponding to structures as
5789 // equivalent based on an extensive set of equivalency checks..
5791 // To address this situation for NGENed types and methods, we prevent pre-restoring them - see code:ComputeNeedsRestoreWorker
5792 // for details. That forces them to go through the final stages of loading at run-time and hit the same code below.
5794 if ((level == CLASS_LOADED)
5795 && (GetCl() != mdTypeDefNil)
5796 && !ContainsGenericVariables()
5798 || DependsOnEquivalentOrForwardedStructs()
5800 || TRUE // Always load types in debug builds so that we calculate fDependsOnEquivalentOrForwardedStructs all of the time
5805 MethodTable::IntroducedMethodIterator itMethods(this, FALSE);
5806 for (; itMethods.IsValid(); itMethods.Next())
5808 MethodDesc * pMD = itMethods.GetMethodDesc();
5810 if (IsCompilationProcess())
5812 locals.fHasTypeForwarderDependentStructParameter = FALSE;
5815 pMD->WalkValueTypeParameters(this, CheckForTypeForwardedTypeRefParameter, &locals);
5820 EX_END_CATCH(RethrowTerminalExceptions);
5822 // This marks the class as needing restore.
5823 if (locals.fHasTypeForwarderDependentStructParameter && !pMD->IsZapped())
5824 pMD->SetHasForwardedValuetypeParameter();
5826 else if (pMD->IsZapped() && pMD->HasForwardedValuetypeParameter())
5828 pMD->WalkValueTypeParameters(this, LoadTypeDefOrRefAssembly, NULL);
5829 locals.fDependsOnEquivalentOrForwardedStructs = TRUE;
5832 #ifdef FEATURE_TYPEEQUIVALENCE
5833 if (!pMD->DoesNotHaveEquivalentValuetypeParameters() && pMD->IsVirtual())
5835 locals.fHasEquivalentStructParameter = FALSE;
5836 pMD->WalkValueTypeParameters(this, CheckForEquivalenceAndFullyLoadType, &locals);
5837 if (!locals.fHasEquivalentStructParameter && !IsZapped())
5838 pMD->SetDoesNotHaveEquivalentValuetypeParameters();
5841 #ifdef FEATURE_PREJIT
5842 if (!IsZapped() && pMD->IsVirtual() && !IsCompilationProcess() )
5844 pMD->PrepareForUseAsADependencyOfANativeImage();
5847 #endif //FEATURE_TYPEEQUIVALENCE
5851 _ASSERTE(!IsZapped() || !IsCanonicalMethodTable() || (level != CLASS_LOADED) || ((!!locals.fDependsOnEquivalentOrForwardedStructs) == (!!DependsOnEquivalentOrForwardedStructs())));
5852 if (locals.fDependsOnEquivalentOrForwardedStructs)
5856 // if this type declares a method that has an equivalent or type forwarded structure as a parameter type,
5857 // make sure we come here and pre-load these structure types in NGENed cases as well
5858 SetDependsOnEquivalentOrForwardedStructs();
5862 // The rules for constraint cycles are same as rules for acccess checks
5863 if (fNeedAccessChecks)
5865 // Check for cyclical class constraints
5867 Instantiation formalParams = GetInstantiation();
5869 for (DWORD i = 0; i < formalParams.GetNumArgs(); i++)
5871 BOOL Bounded(TypeVarTypeDesc *tyvar, DWORD depth);
5873 TypeVarTypeDesc *pTyVar = formalParams[i].AsGenericVariable();
5874 pTyVar->LoadConstraints(CLASS_DEPENDENCIES_LOADED);
5875 if (!Bounded(pTyVar, formalParams.GetNumArgs()))
5877 COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_VAR_CONSTRAINTS);
5880 DoAccessibilityCheckForConstraints(this, pTyVar, E_ACCESSDENIED);
5884 // Check for cyclical method constraints
5886 if (GetCl() != mdTypeDefNil) // Make sure this is actually a metadata type!
5888 MethodTable::IntroducedMethodIterator itMethods(this, FALSE);
5889 for (; itMethods.IsValid(); itMethods.Next())
5891 MethodDesc * pMD = itMethods.GetMethodDesc();
5893 if (pMD->IsGenericMethodDefinition() && pMD->IsTypicalMethodDefinition())
5895 BOOL fHasCircularClassConstraints = TRUE;
5896 BOOL fHasCircularMethodConstraints = TRUE;
5898 pMD->LoadConstraintsForTypicalMethodDefinition(&fHasCircularClassConstraints, &fHasCircularMethodConstraints, CLASS_DEPENDENCIES_LOADED);
5900 if (fHasCircularClassConstraints)
5902 COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_VAR_CONSTRAINTS);
5904 if (fHasCircularMethodConstraints)
5906 COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_MVAR_CONSTRAINTS);
5917 if (LoggingOn(LF_CLASSLOADER, LL_INFO10000))
5920 TypeString::AppendTypeDebug(name, this);
5921 LOG((LF_CLASSLOADER, LL_INFO10000, "PHASEDLOAD: Completed full dependency load of type %S\n", name.GetUnicode()));
5927 case CLASS_DEPENDENCIES_LOADED:
5928 SetIsDependenciesLoaded();
5930 #if defined(FEATURE_COMINTEROP) && !defined(DACCESS_COMPILE)
5931 if (WinRTSupported() && g_fEEStarted && !ContainsIntrospectionOnlyTypes())
5933 _ASSERTE(GetAppDomain() != NULL);
5935 AppDomain* pAppDomain = GetAppDomain();
5936 if (pAppDomain->CanCacheWinRTTypeByGuid(this))
5938 pAppDomain->CacheWinRTTypeByGuid(this);
5941 #endif // FEATURE_COMINTEROP && !DACCESS_COMPILE
5946 if (!IsZapped() && // Constraint checks have been performed for NGened classes already
5947 !IsTypicalTypeDefinition() &&
5948 !IsSharedByGenericInstantiations())
5950 TypeHandle thThis = TypeHandle(this);
5952 // If we got here, we about to mark a generic instantiation as fully loaded. Before we do so,
5953 // check to see if has constraints that aren't being satisfied.
5954 SatisfiesClassConstraints(thThis, typicalTypeHnd, pInstContext);
5960 // We couldn't complete security checks on some dependency because he is already being processed by one of our callers.
5961 // Do not mark this class fully loaded yet. Put him on the pending list and he will be marked fully loaded when
5962 // everything unwinds.
5966 TypeHandle *pTHPending = pPending->AppendThrowing();
5967 *pTHPending = TypeHandle(this);
5971 // Finally, mark this method table as fully loaded
5977 _ASSERTE(!"Can't get here.");
5982 if (level >= CLASS_DEPENDENCIES_LOADED && IsArray())
5984 // The array type should be loaded, if template method table is loaded
5985 // See also: ArrayBase::SetArrayMethodTable, ArrayBase::SetArrayMethodTableForLargeObject
5986 TypeHandle th = ClassLoader::LoadArrayTypeThrowing(GetApproxArrayElementTypeHandle(),
5987 GetInternalCorElementType(),
5989 ClassLoader::LoadTypes,
5991 _ASSERTE(th.IsTypeDesc() && th.IsArray());
5992 _ASSERTE(!(level == CLASS_LOADED && !th.IsFullyLoaded()));
5995 END_SO_INTOLERANT_CODE;
5997 #endif //!DACCESS_COMPILE
5998 } //MethodTable::DoFullyLoad
6001 #ifndef DACCESS_COMPILE
6003 #ifdef FEATURE_PREJIT
6005 // For a MethodTable in a native image, decode sufficient encoded pointers
6006 // that the TypeKey for this type is recoverable.
6008 // For instantiated generic types, we need the generic type arguments,
6009 // the EEClass pointer, and its Module pointer.
6010 // (For non-generic types, the EEClass and Module are always hard bound).
6012 // The process is applied recursively e.g. consider C<D<string>[]>.
6013 // It is guaranteed to terminate because types cannot contain cycles in their structure.
6015 // Also note that no lock is required; the process of restoring this information is idempotent.
6016 // (Note the atomic action at the end though)
6018 void MethodTable::DoRestoreTypeKey()
6027 // If we have an indirection cell then restore the m_pCanonMT and its module pointer
6029 if (union_getLowBits(m_pCanonMT.GetValue()) == UNION_INDIRECTION)
6031 Module::RestoreMethodTablePointerRaw((MethodTable **)(union_getPointer(m_pCanonMT.GetValue())),
6032 GetLoaderModule(), CLASS_LOAD_UNRESTORED);
6035 MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable();
6036 if (pMTForModule->HasModuleOverride())
6038 Module::RestoreModulePointer(pMTForModule->GetModuleOverridePtr(), pMTForModule->GetLoaderModule());
6044 // Restore array element type handle
6046 Module::RestoreTypeHandlePointerRaw(GetApproxArrayElementTypeHandlePtr(),
6047 GetLoaderModule(), CLASS_LOAD_UNRESTORED);
6050 // Next restore the instantiation and recurse
6051 Instantiation inst = GetInstantiation();
6052 for (DWORD j = 0; j < inst.GetNumArgs(); j++)
6054 Module::RestoreTypeHandlePointer(&inst.GetRawArgs()[j], GetLoaderModule(), CLASS_LOAD_UNRESTORED);
6057 FastInterlockAnd(&(EnsureWritablePages(GetWriteableDataForWrite())->m_dwFlags), ~MethodTableWriteableData::enum_flag_UnrestoredTypeKey);
6060 //==========================================================================================
6061 // For a MethodTable in a native image, apply Restore actions
6062 // * Decode any encoded pointers
6063 // * Instantiate static handles
6064 // * Propagate Restore to EEClass
6065 // For array method tables, Restore MUST BE IDEMPOTENT as it can be entered from multiple threads
6066 // For other classes, restore cannot be entered twice because the loader maintains locks
6068 // When you actually restore the MethodTable for a generic type, the generic
6069 // dictionary is restored. That means:
6070 // * Parent slots in the PerInstInfo are restored by this method eagerly. They are copied down from the
6071 // parent in code:ClassLoader.LoadExactParentAndInterfacesTransitively
6072 // * Instantiation parameters in the dictionary are restored eagerly when the type is restored. These are
6073 // either hard bound pointers, or tagged tokens (fixups).
6074 // * All other dictionary entries are either hard bound pointers or they are NULL (they are cleared when we
6075 // freeze the Ngen image). They are *never* tagged tokens.
6076 void MethodTable::Restore()
6082 PRECONDITION(IsZapped());
6083 PRECONDITION(!IsRestored_NoLogging());
6084 PRECONDITION(!HasUnrestoredTypeKey());
6088 g_IBCLogger.LogMethodTableAccess(this);
6090 STRESS_LOG1(LF_ZAP, LL_INFO10000, "MethodTable::Restore: Restoring type %pT\n", this);
6091 LOG((LF_ZAP, LL_INFO10000,
6092 "Restoring methodtable %s at " FMT_ADDR ".\n", GetDebugClassName(), DBG_ADDR(this)));
6094 // Class pointer should be restored already (in DoRestoreTypeKey)
6095 CONSISTENCY_CHECK(IsClassPointerValid());
6097 // If this isn't the canonical method table itself, then restore the canonical method table
6098 // We will load the canonical method table to level EXACTPARENTS in LoadExactParents
6099 if (!IsCanonicalMethodTable())
6101 ClassLoader::EnsureLoaded(GetCanonicalMethodTable(), CLASS_LOAD_APPROXPARENTS);
6105 // Restore parent method table
6107 Module::RestoreMethodTablePointer(&m_pParentMethodTable, GetLoaderModule(), CLASS_LOAD_APPROXPARENTS);
6110 // Restore interface classes
6112 InterfaceMapIterator it = IterateInterfaceMap();
6115 // Just make sure that approximate interface is loaded. LoadExactParents fill in the exact interface later.
6116 MethodTable * pIftMT;
6117 pIftMT = it.GetInterfaceInfo()->GetApproxMethodTable(GetLoaderModule());
6118 _ASSERTE(pIftMT != NULL);
6121 if (HasCrossModuleGenericStaticsInfo())
6123 MethodTableWriteableData * pWriteableData = GetWriteableDataForWrite();
6124 CrossModuleGenericsStaticsInfo * pInfo = pWriteableData->GetCrossModuleGenericsStaticsInfo();
6126 EnsureWritablePages(pWriteableData, sizeof(MethodTableWriteableData) + sizeof(CrossModuleGenericsStaticsInfo));
6128 if (IsDomainNeutral())
6130 // If we are domain neutral, we have to use constituent of the instantiation to store
6131 // statics. We need to ensure that we can create DomainModule in all domains
6132 // that this instantiations may get activated in. PZM is good approximation of such constituent.
6133 Module * pModuleForStatics = Module::GetPreferredZapModuleForMethodTable(this);
6135 pInfo->m_pModuleForStatics = pModuleForStatics;
6136 pInfo->m_DynamicTypeID = pModuleForStatics->AllocateDynamicEntry(this);
6140 pInfo->m_pModuleForStatics = GetLoaderModule();
6144 LOG((LF_ZAP, LL_INFO10000,
6145 "Restored methodtable %s at " FMT_ADDR ".\n", GetDebugClassName(), DBG_ADDR(this)));
6147 // This has to be last!
6150 #endif // FEATURE_PREJIT
6152 #ifdef FEATURE_COMINTEROP
6154 //==========================================================================================
6155 BOOL MethodTable::IsExtensibleRCW()
6157 WRAPPER_NO_CONTRACT;
6158 _ASSERTE(GetClass());
6159 return IsComObjectType() && !GetClass()->IsComImport();
6162 //==========================================================================================
6163 OBJECTHANDLE MethodTable::GetOHDelegate()
6165 WRAPPER_NO_CONTRACT;
6166 _ASSERTE(GetClass());
6167 return GetClass()->GetOHDelegate();
6170 //==========================================================================================
6171 void MethodTable::SetOHDelegate (OBJECTHANDLE _ohDelegate)
6173 LIMITED_METHOD_CONTRACT;
6174 _ASSERTE(GetClass());
6175 g_IBCLogger.LogEEClassCOWTableAccess(this);
6176 GetClass_NoLogging()->SetOHDelegate(_ohDelegate);
6179 //==========================================================================================
6180 // Helper to skip over COM class in the hierarchy
6181 MethodTable* MethodTable::GetComPlusParentMethodTable()
6191 MethodTable* pParent = GetParentMethodTable();
6193 if (pParent && pParent->IsComImport())
6195 if (pParent->IsProjectedFromWinRT())
6197 // skip all Com Import classes
6200 pParent = pParent->GetParentMethodTable();
6201 _ASSERTE(pParent != NULL);
6202 }while(pParent->IsComImport());
6204 // Now we have either System.__ComObject or WindowsRuntime.RuntimeClass
6205 if (pParent != g_pBaseCOMObject)
6212 // Skip the single ComImport class we expect
6213 _ASSERTE(pParent->GetParentMethodTable() != NULL);
6214 pParent = pParent->GetParentMethodTable();
6216 _ASSERTE(!pParent->IsComImport());
6218 // Skip over System.__ComObject, expect System.MarshalByRefObject
6219 pParent=pParent->GetParentMethodTable();
6220 _ASSERTE(pParent != NULL);
6221 _ASSERTE(pParent->GetParentMethodTable() != NULL);
6222 _ASSERTE(pParent->GetParentMethodTable() == g_pObjectClass);
6228 BOOL MethodTable::IsWinRTObjectType()
6230 LIMITED_METHOD_CONTRACT;
6232 // Try to determine if this object represents a WindowsRuntime object - i.e. is either
6233 // ProjectedFromWinRT or derived from a class that is
6235 if (!IsComObjectType())
6238 // Ideally we'd compute this once in BuildMethodTable and track it with another
6239 // flag, but we're now out of bits on m_dwFlags, and this is used very rarely
6240 // so for now we'll just recompute it when necessary.
6241 MethodTable* pMT = this;
6244 if (pMT->IsProjectedFromWinRT())
6246 // Found a WinRT COM object
6249 if (pMT->IsComImport())
6251 // Found a class that is actually imported from COM but not WinRT
6252 // this is definitely a non-WinRT COM object
6255 pMT = pMT->GetParentMethodTable();
6256 }while(pMT != NULL);
6261 #endif // FEATURE_COMINTEROP
6263 #endif // !DACCESS_COMPILE
6265 //==========================================================================================
6266 // Return a pointer to the dictionary for an instantiated type
6267 // Return NULL if not instantiated
6268 PTR_Dictionary MethodTable::GetDictionary()
6270 LIMITED_METHOD_DAC_CONTRACT;
6272 if (HasInstantiation())
6274 // The instantiation for this class is stored in the type slots table
6275 // *after* any inherited slots
6276 TADDR base = dac_cast<TADDR>(&(GetPerInstInfo()[GetNumDicts()-1]));
6277 return PerInstInfoElem_t::GetValueMaybeNullAtPtr(base);
6285 //==========================================================================================
6286 // As above, but assert if an instantiated type is not restored
6287 Instantiation MethodTable::GetInstantiation()
6289 LIMITED_METHOD_CONTRACT;
6291 if (HasInstantiation())
6293 PTR_GenericsDictInfo pDictInfo = GetGenericsDictInfo();
6294 TADDR base = dac_cast<TADDR>(&(GetPerInstInfo()[pDictInfo->m_wNumDicts-1]));
6295 return Instantiation(PerInstInfoElem_t::GetValueMaybeNullAtPtr(base)->GetInstantiation(), pDictInfo->m_wNumTyPars);
6299 return Instantiation();
6303 //==========================================================================================
6304 // Obtain instantiation from an instantiated type or a pointer to the
6305 // element type of an array
6306 Instantiation MethodTable::GetClassOrArrayInstantiation()
6308 LIMITED_METHOD_CONTRACT;
6311 return GetArrayInstantiation();
6314 return GetInstantiation();
6318 //==========================================================================================
6319 Instantiation MethodTable::GetArrayInstantiation()
6321 LIMITED_METHOD_CONTRACT;
6323 _ASSERTE(IsArray());
6324 return Instantiation((TypeHandle *)&m_ElementTypeHnd, 1);
6327 //==========================================================================================
6328 CorElementType MethodTable::GetInternalCorElementType()
6330 LIMITED_METHOD_CONTRACT;
6333 // This should not touch the EEClass, at least not in the
6334 // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE.
6336 g_IBCLogger.LogMethodTableAccess(this);
6340 switch (GetFlag(enum_flag_Category_ElementTypeMask))
6342 case enum_flag_Category_Array:
6343 ret = ELEMENT_TYPE_ARRAY;
6346 case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray:
6347 ret = ELEMENT_TYPE_SZARRAY;
6350 case enum_flag_Category_ValueType:
6351 ret = ELEMENT_TYPE_VALUETYPE;
6354 case enum_flag_Category_PrimitiveValueType:
6355 // This path should only be taken for the builtin mscorlib types
6356 // and primitive valuetypes
6357 ret = GetClass()->GetInternalCorElementType();
6358 _ASSERTE((ret != ELEMENT_TYPE_CLASS) &&
6359 (ret != ELEMENT_TYPE_VALUETYPE));
6363 ret = ELEMENT_TYPE_CLASS;
6367 // DAC may be targetting a dump; dumps do not guarantee you can retrieve the EEClass from
6368 // the MethodTable so this is not expected to work in a DAC build.
6369 #if defined(_DEBUG) && !defined(DACCESS_COMPILE)
6370 if (IsRestored_NoLogging())
6372 PTR_EEClass pClass = GetClass_NoLogging();
6373 if (ret != pClass->GetInternalCorElementType())
6375 _ASSERTE(!"Mismatched results in MethodTable::GetInternalCorElementType");
6378 #endif // defined(_DEBUG) && !defined(DACCESS_COMPILE)
6382 //==========================================================================================
6383 CorElementType MethodTable::GetVerifierCorElementType()
6385 LIMITED_METHOD_CONTRACT;
6388 // This should not touch the EEClass, at least not in the
6389 // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE.
6391 g_IBCLogger.LogMethodTableAccess(this);
6395 switch (GetFlag(enum_flag_Category_ElementTypeMask))
6397 case enum_flag_Category_Array:
6398 ret = ELEMENT_TYPE_ARRAY;
6401 case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray:
6402 ret = ELEMENT_TYPE_SZARRAY;
6405 case enum_flag_Category_ValueType:
6406 ret = ELEMENT_TYPE_VALUETYPE;
6409 case enum_flag_Category_PrimitiveValueType:
6411 // This is the only difference from MethodTable::GetInternalCorElementType()
6413 if (IsTruePrimitive() || IsEnum())
6414 ret = GetClass()->GetInternalCorElementType();
6416 ret = ELEMENT_TYPE_VALUETYPE;
6420 ret = ELEMENT_TYPE_CLASS;
6427 //==========================================================================================
6428 CorElementType MethodTable::GetSignatureCorElementType()
6430 LIMITED_METHOD_CONTRACT;
6433 // This should not touch the EEClass, at least not in the
6434 // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE.
6436 g_IBCLogger.LogMethodTableAccess(this);
6440 switch (GetFlag(enum_flag_Category_ElementTypeMask))
6442 case enum_flag_Category_Array:
6443 ret = ELEMENT_TYPE_ARRAY;
6446 case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray:
6447 ret = ELEMENT_TYPE_SZARRAY;
6450 case enum_flag_Category_ValueType:
6451 ret = ELEMENT_TYPE_VALUETYPE;
6454 case enum_flag_Category_PrimitiveValueType:
6456 // This is the only difference from MethodTable::GetInternalCorElementType()
6458 if (IsTruePrimitive())
6459 ret = GetClass()->GetInternalCorElementType();
6461 ret = ELEMENT_TYPE_VALUETYPE;
6465 ret = ELEMENT_TYPE_CLASS;
6472 #ifndef DACCESS_COMPILE
6474 //==========================================================================================
6475 void MethodTable::SetInternalCorElementType (CorElementType _NormType)
6477 WRAPPER_NO_CONTRACT;
6481 case ELEMENT_TYPE_CLASS:
6482 _ASSERTE(!IsArray());
6485 case ELEMENT_TYPE_VALUETYPE:
6486 SetFlag(enum_flag_Category_ValueType);
6487 _ASSERTE(GetFlag(enum_flag_Category_Mask) == enum_flag_Category_ValueType);
6490 SetFlag(enum_flag_Category_PrimitiveValueType);
6491 _ASSERTE(GetFlag(enum_flag_Category_Mask) == enum_flag_Category_PrimitiveValueType);
6495 GetClass_NoLogging()->SetInternalCorElementType(_NormType);
6496 _ASSERTE(GetInternalCorElementType() == _NormType);
6499 #endif // !DACCESS_COMPILE
6501 #ifdef FEATURE_COMINTEROP
6502 #ifndef DACCESS_COMPILE
6504 #ifndef CROSSGEN_COMPILE
6505 BOOL MethodTable::IsLegalWinRTType(OBJECTREF *poref)
6512 PRECONDITION(IsProtectedByGCFrame(poref));
6513 PRECONDITION(CheckPointer(poref));
6514 PRECONDITION((*poref) != NULL);
6520 BASEARRAYREF arrayRef = (BASEARRAYREF)(*poref);
6522 // WinRT array must be one-dimensional array with 0 lower-bound
6523 if (arrayRef->GetRank() == 1 && arrayRef->GetLowerBoundsPtr()[0] == 0)
6525 MethodTable *pElementMT = ((BASEARRAYREF)(*poref))->GetArrayElementTypeHandle().GetMethodTable();
6527 // Element must be a legal WinRT type and not an array
6528 if (!pElementMT->IsArray() && pElementMT->IsLegalNonArrayWinRTType())
6536 // Non-Array version of IsLegalNonArrayWinRTType
6537 return IsLegalNonArrayWinRTType();
6540 #endif //#ifndef CROSSGEN_COMPILE
6542 BOOL MethodTable::IsLegalNonArrayWinRTType()
6549 PRECONDITION(!IsArray()); // arrays are not fully described by MethodTable
6553 if (WinRTTypeNameConverter::IsWinRTPrimitiveType(this))
6556 // Attributes are not legal
6557 MethodTable *pParentMT = GetParentMethodTable();
6558 if (pParentMT == MscorlibBinder::GetExistingClass(CLASS__ATTRIBUTE))
6563 bool fIsRedirected = false;
6564 if (!IsProjectedFromWinRT() && !IsExportedToWinRT())
6566 // If the type is not primitive and not coming from .winmd, it can still be legal if
6567 // it's one of the redirected types (e.g. IEnumerable<T>).
6568 if (!WinRTTypeNameConverter::IsRedirectedType(this))
6571 fIsRedirected = true;
6579 ApproxFieldDescIterator fieldIterator(this, ApproxFieldDescIterator::INSTANCE_FIELDS);
6580 for (FieldDesc *pFD = fieldIterator.Next(); pFD != NULL; pFD = fieldIterator.Next())
6582 TypeHandle thField = pFD->GetFieldTypeHandleThrowing(CLASS_LOAD_EXACTPARENTS);
6584 if (thField.IsTypeDesc())
6587 MethodTable *pFieldMT = thField.GetMethodTable();
6589 // the only allowed reference types are System.String and types projected from WinRT value types
6590 if (!pFieldMT->IsValueType() && !pFieldMT->IsString())
6592 WinMDAdapter::RedirectedTypeIndex index;
6593 if (!WinRTTypeNameConverter::ResolveRedirectedType(pFieldMT, &index))
6596 WinMDAdapter::WinMDTypeKind typeKind;
6597 WinMDAdapter::GetRedirectedTypeInfo(index, NULL, NULL, NULL, NULL, NULL, &typeKind);
6598 if (typeKind != WinMDAdapter::WinMDTypeKind_Struct && typeKind != WinMDAdapter::WinMDTypeKind_Enum)
6602 if (!pFieldMT->IsLegalNonArrayWinRTType())
6608 if (IsInterface() || IsDelegate() || (IsValueType() && fIsRedirected))
6610 // interfaces, delegates, and redirected structures can be generic - check the instantiation
6611 if (HasInstantiation())
6613 Instantiation inst = GetInstantiation();
6614 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
6616 // arrays are not allowed as generic arguments
6617 if (inst[i].IsArrayType())
6620 if (inst[i].IsTypeDesc())
6623 if (!inst[i].AsMethodTable()->IsLegalNonArrayWinRTType())
6630 // generic structures and runtime clases are not supported
6631 if (HasInstantiation())
6638 //==========================================================================================
6639 // Returns the default WinRT interface if this is a WinRT class, NULL otherwise.
6640 MethodTable *MethodTable::GetDefaultWinRTInterface()
6650 if (!IsProjectedFromWinRT() && !IsExportedToWinRT())
6656 // System.Runtime.InteropServices.WindowsRuntime.RuntimeClass is weird
6657 // It is ProjectedFromWinRT but isn't really a WinRT class
6658 if (this == g_pBaseRuntimeClass)
6661 WinRTClassFactory *pFactory = ::GetComClassFactory(this)->AsWinRTClassFactory();
6662 return pFactory->GetDefaultInterface();
6665 #endif // !DACCESS_COMPILE
6666 #endif // FEATURE_COMINTEROP
6668 #ifdef FEATURE_COMINTEROP
6669 #ifndef DACCESS_COMPILE
6671 WORD GetEquivalentMethodSlot(MethodTable * pOldMT, MethodTable * pNewMT, WORD wMTslot, BOOL *pfFound)
6678 MethodDesc * pMDRet = NULL;
6681 // Get the COM vtable slot corresponding to the given MT slot
6683 if (pOldMT->IsSparseForCOMInterop())
6685 wVTslot = pOldMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(wMTslot);
6692 // If the other MT is not sparse, we can return the COM slot directly
6693 if (!pNewMT->IsSparseForCOMInterop())
6695 if (wVTslot < pNewMT->GetNumVirtuals())
6701 // Otherwise we iterate over all virtuals in the other MT trying to find a match
6702 for (WORD wSlot = 0; wSlot < pNewMT->GetNumVirtuals(); wSlot++)
6704 if (wVTslot == pNewMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(wSlot))
6711 _ASSERTE(!*pfFound);
6714 #endif // #ifdef DACCESS_COMPILE
6715 #endif // #ifdef FEATURE_COMINTEROP
6717 //==========================================================================================
6719 MethodTable::FindEncodedMapDispatchEntry(
6722 DispatchMapEntry * pEntry)
6725 // NOTE: LookupDispatchMapType may or may not throw. Currently, it
6726 // should never throw because lazy interface restore is disabled.
6730 PRECONDITION(CheckPointer(pEntry));
6731 PRECONDITION(typeID != TYPE_ID_THIS_CLASS);
6734 CONSISTENCY_CHECK(HasDispatchMap());
6736 MethodTable * dispatchTokenType = GetThread()->GetDomain()->LookupType(typeID);
6738 // Search for an exact type match.
6740 DispatchMap::EncodedMapIterator it(this);
6741 for (; it.IsValid(); it.Next())
6743 DispatchMapEntry * pCurEntry = it.Entry();
6744 if (pCurEntry->GetSlotNumber() == slotNumber)
6746 MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID());
6747 if (pCurEntryType == dispatchTokenType)
6749 *pEntry = *pCurEntry;
6756 // Repeat the search if any variance is involved, allowing a CanCastTo match. (We do
6757 // this in a separate pass because we want to avoid touching the type
6758 // to see if it has variance or not)
6760 // NOTE: CERs are not guaranteed for interfaces with co- and contra-variance involved.
6761 if (dispatchTokenType->HasVariance() || dispatchTokenType->HasTypeEquivalence())
6763 DispatchMap::EncodedMapIterator it(this);
6764 for (; it.IsValid(); it.Next())
6766 DispatchMapEntry * pCurEntry = it.Entry();
6767 if (pCurEntry->GetSlotNumber() == slotNumber)
6769 #ifndef DACCESS_COMPILE
6770 MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID());
6771 //@TODO: This is currently not guaranteed to work without throwing,
6772 //@TODO: even with lazy interface restore disabled.
6773 if (dispatchTokenType->HasVariance() &&
6774 pCurEntryType->CanCastByVarianceToInterfaceOrDelegate(dispatchTokenType, NULL))
6776 *pEntry = *pCurEntry;
6780 if (dispatchTokenType->HasInstantiation() && dispatchTokenType->HasTypeEquivalence())
6782 if (dispatchTokenType->IsEquivalentTo(pCurEntryType))
6784 *pEntry = *pCurEntry;
6788 #endif // !DACCESS_COMPILE
6790 #if !defined(DACCESS_COMPILE) && defined(FEATURE_TYPEEQUIVALENCE)
6791 if (this->HasTypeEquivalence() &&
6792 !dispatchTokenType->HasInstantiation() &&
6793 dispatchTokenType->HasTypeEquivalence() &&
6794 dispatchTokenType->GetClass()->IsEquivalentType())
6796 _ASSERTE(dispatchTokenType->IsInterface());
6797 MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID());
6799 if (pCurEntryType->IsEquivalentTo(dispatchTokenType))
6801 MethodDesc * pMD = dispatchTokenType->GetMethodDescForSlot(slotNumber);
6802 _ASSERTE(FitsIn<WORD>(slotNumber));
6803 BOOL fNewSlotFound = FALSE;
6804 DWORD newSlot = GetEquivalentMethodSlot(
6807 static_cast<WORD>(slotNumber),
6809 if (fNewSlotFound && (newSlot == pCurEntry->GetSlotNumber()))
6811 MethodDesc * pNewMD = pCurEntryType->GetMethodDescForSlot(newSlot);
6814 MetaSig msignew(pNewMD);
6816 if (MetaSig::CompareMethodSigs(msig, msignew, FALSE))
6818 *pEntry = *pCurEntry;
6828 } // MethodTable::FindEncodedMapDispatchEntry
6830 //==========================================================================================
6831 BOOL MethodTable::FindDispatchEntryForCurrentType(UINT32 typeID,
6833 DispatchMapEntry *pEntry)
6839 PRECONDITION(CheckPointer(pEntry));
6840 PRECONDITION(typeID != TYPE_ID_THIS_CLASS);
6845 if (HasDispatchMap())
6847 fRes = FindEncodedMapDispatchEntry(
6848 typeID, slotNumber, pEntry);
6854 //==========================================================================================
6855 BOOL MethodTable::FindDispatchEntry(UINT32 typeID,
6857 DispatchMapEntry *pEntry)
6864 POSTCONDITION(!RETVAL || pEntry->IsValid());
6865 PRECONDITION(typeID != TYPE_ID_THIS_CLASS);
6868 // Start at the current type and work up the inheritance chain
6869 MethodTable *pCurMT = this;
6870 UINT32 iCurInheritanceChainDelta = 0;
6871 while (pCurMT != NULL)
6873 g_IBCLogger.LogMethodTableAccess(pCurMT);
6874 if (pCurMT->FindDispatchEntryForCurrentType(
6875 typeID, slotNumber, pEntry))
6879 pCurMT = pCurMT->GetParentMethodTable();
6880 iCurInheritanceChainDelta++;
6885 //==========================================================================================
6887 // 1. Typed (interface) contract
6888 // a. To non-virtual implementation (NYI). Just
6889 // return the DispatchSlot as the implementation
6890 // b. Mapped virtually to virtual slot on 'this'. Need to
6891 // further resolve the new 'this' virtual slot.
6892 // 2. 'this' contract
6893 // a. To non-virtual implementation. Return the DispatchSlot
6894 // as the implementation.
6895 // b. Mapped virtually to another virtual slot. Need to further
6896 // resolve the new slot on 'this'.
6898 MethodTable::FindDispatchImpl(
6901 DispatchSlot * pImplSlot)
6908 PRECONDITION(CheckPointer(pImplSlot));
6909 POSTCONDITION(!RETVAL || !pImplSlot->IsNull() || IsComObjectType());
6912 LOG((LF_LOADER, LL_INFO10000, "SD: MT::FindDispatchImpl: searching %s.\n", GetClass()->GetDebugClassName()));
6914 ///////////////////////////////////
6915 // 1. Typed (interface) contract
6917 INDEBUG(MethodTable *dbg_pMTTok = NULL; dbg_pMTTok = this;)
6918 DispatchMapEntry declEntry;
6919 DispatchMapEntry implEntry;
6921 #ifndef DACCESS_COMPILE
6922 if (typeID != TYPE_ID_THIS_CLASS)
6924 INDEBUG(dbg_pMTTok = GetThread()->GetDomain()->LookupType(typeID));
6926 if (!FindDispatchEntry(typeID, slotNumber, &e))
6928 // A call to an array thru IList<T> (or IEnumerable<T> or ICollection<T>) has to be handled specially.
6929 // These interfaces are "magic" (mostly due to working set concerned - they are created on demand internally
6930 // even though semantically, these are static interfaces.)
6932 // NOTE: CERs are not currently supported with generic array interfaces.
6935 // At this, we know that we're trying to cast an array to an interface and that the normal static lookup failed.
6937 // FindDispatchImpl assumes that the cast is legal so we should be able to assume now that it is a valid
6938 // IList<T> call thru an array.
6940 // Get the MT of IList<T> or IReadOnlyList<T>
6941 MethodTable *pIfcMT = GetThread()->GetDomain()->LookupType(typeID);
6943 // Quick sanity check
6944 if (!(pIfcMT->HasInstantiation()))
6946 _ASSERTE(!"Should not have gotten here. If you did, it's probably because multiple interface instantiation hasn't been checked in yet. This code only works on top of that.");
6950 // Get the type of T (as in IList<T>)
6951 TypeHandle theT = pIfcMT->GetInstantiation()[0];
6953 // Figure out which method of IList<T> the caller requested.
6954 MethodDesc * pIfcMD = pIfcMT->GetMethodDescForSlot(slotNumber);
6956 // Retrieve the corresponding method of SZArrayHelper. This is the guy that will actually execute.
6957 // This method will be an instantiation of a generic method. I.e. if the caller requested
6958 // IList<T>.Meth(), he will actually be diverted to SZArrayHelper.Meth<T>().
6959 MethodDesc * pActualImplementor = GetActualImplementationForArrayGenericIListOrIReadOnlyListMethod(pIfcMD, theT);
6961 // Now, construct a DispatchSlot to return in *pImplSlot
6962 DispatchSlot ds(pActualImplementor->GetMethodEntryPoint());
6964 if (pImplSlot != NULL)
6973 // This contract is not implemented by this class or any parent class.
6977 /////////////////////////////////
6978 // 1.1. Update the typeID and slotNumber so that the full search can commense below
6979 typeID = TYPE_ID_THIS_CLASS;
6980 slotNumber = e.GetTargetSlotNumber();
6982 #endif // !DACCESS_COMPILE
6984 //////////////////////////////////
6985 // 2. 'this' contract
6987 // Just grab the target out of the vtable
6988 *pImplSlot = GetRestoredSlot(slotNumber);
6990 // Successfully determined the target for the given target
6994 //==========================================================================================
6995 DispatchSlot MethodTable::FindDispatchSlot(UINT32 typeID, UINT32 slotNumber)
6997 WRAPPER_NO_CONTRACT;
6998 STATIC_CONTRACT_SO_TOLERANT;
6999 DispatchSlot implSlot(NULL);
7000 FindDispatchImpl(typeID, slotNumber, &implSlot);
7004 //==========================================================================================
7005 DispatchSlot MethodTable::FindDispatchSlot(DispatchToken tok)
7015 return FindDispatchSlot(tok.GetTypeID(), tok.GetSlotNumber());
7018 #ifndef DACCESS_COMPILE
7020 //==========================================================================================
7021 DispatchSlot MethodTable::FindDispatchSlotForInterfaceMD(MethodDesc *pMD)
7023 WRAPPER_NO_CONTRACT;
7024 CONSISTENCY_CHECK(CheckPointer(pMD));
7025 CONSISTENCY_CHECK(pMD->IsInterface());
7026 return FindDispatchSlotForInterfaceMD(TypeHandle(pMD->GetMethodTable()), pMD);
7029 //==========================================================================================
7030 DispatchSlot MethodTable::FindDispatchSlotForInterfaceMD(TypeHandle ownerType, MethodDesc *pMD)
7032 WRAPPER_NO_CONTRACT;
7033 CONSISTENCY_CHECK(!ownerType.IsNull());
7034 CONSISTENCY_CHECK(CheckPointer(pMD));
7035 CONSISTENCY_CHECK(pMD->IsInterface());
7036 return FindDispatchSlot(ownerType.GetMethodTable()->GetTypeID(), pMD->GetSlot());
7039 //==========================================================================================
7040 // This is used for reverse methodimpl lookups by ComPlusMethodCall MDs.
7041 // This assumes the following:
7042 // The methodimpl is for an interfaceToken->slotNumber
7043 // There is ONLY ONE such mapping for this slot number
7044 // The mapping exists in this type, not a parent type.
7045 MethodDesc * MethodTable::ReverseInterfaceMDLookup(UINT32 slotNumber)
7051 DispatchMap::Iterator it(this);
7052 for (; it.IsValid(); it.Next())
7054 if (it.Entry()->GetTargetSlotNumber() == slotNumber)
7056 DispatchMapTypeID typeID = it.Entry()->GetTypeID();
7057 _ASSERTE(!typeID.IsThisClass());
7058 UINT32 slotNum = it.Entry()->GetSlotNumber();
7059 MethodTable * pMTItf = LookupDispatchMapType(typeID);
7060 CONSISTENCY_CHECK(CheckPointer(pMTItf));
7062 MethodDesc *pCanonMD = pMTItf->GetMethodDescForSlot((DWORD)slotNum);
7063 return MethodDesc::FindOrCreateAssociatedMethodDesc(
7066 FALSE, // forceBoxedEntryPoint
7067 Instantiation(), // methodInst
7068 FALSE, // allowInstParam
7069 TRUE); // forceRemotableMethod
7075 //==========================================================================================
7076 UINT32 MethodTable::GetTypeID()
7083 PTR_MethodTable pMT = PTR_MethodTable(this);
7085 return GetDomain()->GetTypeID(pMT);
7088 //==========================================================================================
7089 UINT32 MethodTable::LookupTypeID()
7099 PTR_MethodTable pMT = PTR_MethodTable(this);
7101 return GetDomain()->LookupTypeID(pMT);
7104 //==========================================================================================
7105 BOOL MethodTable::ImplementsInterfaceWithSameSlotsAsParent(MethodTable *pItfMT, MethodTable *pParentMT)
7111 PRECONDITION(!IsInterface() && !pParentMT->IsInterface());
7112 PRECONDITION(pItfMT->IsInterface());
7115 MethodTable *pMT = this;
7118 DispatchMap::EncodedMapIterator it(pMT);
7119 for (; it.IsValid(); it.Next())
7121 DispatchMapEntry *pCurEntry = it.Entry();
7122 if (LookupDispatchMapType(pCurEntry->GetTypeID()) == pItfMT)
7124 // this class and its parents up to pParentMT must have no mappings for the interface
7129 pMT = pMT->GetParentMethodTable();
7130 _ASSERTE(pMT != NULL);
7132 while (pMT != pParentMT);
7137 //==========================================================================================
7138 BOOL MethodTable::HasSameInterfaceImplementationAsParent(MethodTable *pItfMT, MethodTable *pParentMT)
7144 PRECONDITION(!IsInterface() && !pParentMT->IsInterface());
7145 PRECONDITION(pItfMT->IsInterface());
7148 if (!ImplementsInterfaceWithSameSlotsAsParent(pItfMT, pParentMT))
7150 // if the slots are not same, this class reimplements the interface
7154 // The target slots are the same, but they can still be overriden. We'll iterate
7155 // the dispatch map beginning with pParentMT up the hierarchy and for each pItfMT
7156 // entry check the target slot contents (pParentMT vs. this class). A mismatch
7157 // means that there is an override. We'll keep track of source (interface) slots
7158 // we have seen so that we can ignore entries higher in the hierarchy that are no
7159 // longer in effect at pParentMT level.
7162 WORD wSeenSlots = 0;
7163 WORD wTotalSlots = pItfMT->GetNumVtableSlots();
7165 MethodTable *pMT = pParentMT;
7168 DispatchMap::EncodedMapIterator it(pMT);
7169 for (; it.IsValid(); it.Next())
7171 DispatchMapEntry *pCurEntry = it.Entry();
7172 if (LookupDispatchMapType(pCurEntry->GetTypeID()) == pItfMT)
7174 UINT32 ifaceSlot = pCurEntry->GetSlotNumber();
7175 if (!bitMask.TestBit(ifaceSlot))
7177 bitMask.SetBit(ifaceSlot);
7179 UINT32 targetSlot = pCurEntry->GetTargetSlotNumber();
7180 if (GetRestoredSlot(targetSlot) != pParentMT->GetRestoredSlot(targetSlot))
7182 // the target slot is overriden
7186 if (++wSeenSlots == wTotalSlots)
7188 // we've resolved all slots, no reason to continue
7194 pMT = pMT->GetParentMethodTable();
7196 while (pMT != NULL);
7201 #endif // !DACCESS_COMPILE
7203 //==========================================================================================
7204 MethodTable * MethodTable::LookupDispatchMapType(DispatchMapTypeID typeID)
7211 _ASSERTE(!typeID.IsThisClass());
7213 InterfaceMapIterator intIt = IterateInterfaceMapFrom(typeID.GetInterfaceNum());
7214 return intIt.GetInterface();
7217 //==========================================================================================
7218 MethodDesc * MethodTable::GetIntroducingMethodDesc(DWORD slotNumber)
7229 MethodDesc * pCurrentMD = GetMethodDescForSlot(slotNumber);
7230 DWORD dwSlot = pCurrentMD->GetSlot();
7231 MethodDesc * pIntroducingMD = NULL;
7233 MethodTable * pParentType = GetParentMethodTable();
7234 MethodTable * pPrevParentType = NULL;
7236 // Find this method in the parent.
7237 // If it does exist in the parent, it would be at the same vtable slot.
7238 while ((pParentType != NULL) &&
7239 (dwSlot < pParentType->GetNumVirtuals()))
7241 pPrevParentType = pParentType;
7242 pParentType = pParentType->GetParentMethodTable();
7245 if (pPrevParentType != NULL)
7247 pIntroducingMD = pPrevParentType->GetMethodDescForSlot(dwSlot);
7250 return pIntroducingMD;
7253 //==========================================================================================
7254 // There is a case where a method declared in a type can be explicitly
7255 // overridden by a methodImpl on another method within the same type. In
7256 // this case, we need to call the methodImpl target, and this will map
7257 // things appropriately for us.
7258 MethodDesc * MethodTable::MapMethodDeclToMethodImpl(MethodDesc * pMDDecl)
7260 STATIC_CONTRACT_THROWS;
7261 STATIC_CONTRACT_GC_TRIGGERS;
7263 MethodTable * pMT = pMDDecl->GetMethodTable();
7266 // Fast negative case check
7269 // If it's not virtual, then it could not have been methodImpl'd.
7270 if (!pMDDecl->IsVirtual() ||
7271 // Is it a non-virtual call to the instantiating stub
7272 (pMT->IsValueType() && !pMDDecl->IsUnboxingStub()))
7277 MethodDesc * pMDImpl = pMT->GetParallelMethodDesc(pMDDecl);
7279 // If the method is instantiated, then we need to resolve to the corresponding
7280 // instantiated MD for the new slot number.
7281 if (pMDDecl->HasMethodInstantiation())
7283 if (pMDDecl->GetSlot() != pMDImpl->GetSlot())
7285 if (!pMDDecl->IsGenericMethodDefinition())
7287 #ifndef DACCESS_COMPILE
7288 pMDImpl = pMDDecl->FindOrCreateAssociatedMethodDesc(
7291 pMDDecl->IsUnboxingStub(),
7292 pMDDecl->GetMethodInstantiation(),
7293 pMDDecl->IsInstantiatingStub());
7301 // Since the generic method definition is always in the actual
7302 // slot for the method table, and since the slot numbers for
7303 // the Decl and Impl MDs are the same, then the call to
7304 // FindOrCreateAssociatedMethodDesc would just result in the
7305 // same pMDDecl being returned. In this case, we can skip all
7311 CONSISTENCY_CHECK(CheckPointer(pMDImpl));
7312 CONSISTENCY_CHECK(!pMDImpl->IsGenericMethodDefinition());
7314 } // MethodTable::MapMethodDeclToMethodImpl
7317 //==========================================================================================
7318 HRESULT MethodTable::GetGuidNoThrow(GUID *pGuid, BOOL bGenerateIfNotFound, BOOL bClassic /*= TRUE*/)
7330 GetGuid(pGuid, bGenerateIfNotFound, bClassic);
7332 EX_CATCH_HRESULT(hr);
7334 // ensure we return a failure hr when pGuid is not filled in
7335 if (SUCCEEDED(hr) && (*pGuid == GUID_NULL))
7341 //==========================================================================================
7342 // Returns the GUID of this MethodTable.
7343 // If metadata does not specify GUID for the type, GUID_NULL is returned (if bGenerateIfNotFound
7344 // is FALSE) or a GUID is auto-generated on the fly from the name and members of the type
7345 // (bGenerateIfNotFound is TRUE).
7347 // Redirected WinRT types may have two GUIDs, the "classic" one which matches the return value
7348 // of Type.Guid, and the new one which is the GUID of the WinRT type to which it is redirected.
7349 // The bClassic parameter controls which one is returned from this method. Note that the parameter
7350 // is ignored for genuine WinRT types, i.e. types loaded from .winmd files, those always return
7353 void MethodTable::GetGuid(GUID *pGuid, BOOL bGenerateIfNotFound, BOOL bClassic /*=TRUE*/)
7363 #ifdef DACCESS_COMPILE
7365 _ASSERTE(pGuid != NULL);
7366 PTR_GuidInfo pGuidInfo = (bClassic ? GetClass()->GetGuidInfo() : GetGuidInfo());
7367 if (pGuidInfo != NULL)
7368 *pGuid = pGuidInfo->m_Guid;
7372 #else // DACCESS_COMPILE
7374 SIZE_T cchName = 0; // Length of the name (possibly after decoration).
7375 SIZE_T cbCur; // Current offset.
7376 LPCWSTR szName = NULL; // Name to turn to a guid.
7377 CQuickArray<BYTE> rName; // Buffer to accumulate signatures.
7378 BOOL bGenerated = FALSE; // A flag indicating if we generated the GUID from name.
7380 _ASSERTE(pGuid != NULL);
7382 // Use the per-EEClass GuidInfo if we are asked for the "classic" non-WinRT GUID of non-WinRT type
7383 GuidInfo *pInfo = ((bClassic && !IsProjectedFromWinRT()) ? GetClass()->GetGuidInfo() : GetGuidInfo());
7385 // First check to see if we have already cached the guid for this type.
7386 // We currently only cache guids on interfaces and WinRT delegates.
7387 // In classic mode, though, ensure we don't retrieve the GuidInfo for redirected interfaces
7388 if ((IsInterface() || IsWinRTDelegate()) && pInfo != NULL
7389 && (!bClassic || !SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeRedirected)))
7391 if (pInfo->m_bGeneratedFromName)
7393 // If the GUID was generated from the name then only return it
7394 // if bGenerateIfNotFound is set.
7395 if (bGenerateIfNotFound)
7396 *pGuid = pInfo->m_Guid;
7402 *pGuid = pInfo->m_Guid;
7407 #ifdef FEATURE_COMINTEROP
7408 if ((SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeProjected))
7410 && SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeRedirected)
7411 && IsLegalNonArrayWinRTType()))
7413 // Closed generic WinRT interfaces/delegates have their GUID computed
7414 // based on the "PIID" in metadata and the instantiation.
7415 // Note that we explicitly do this computation for redirected mscorlib
7416 // interfaces only if !bClassic, so typeof(Enumerable<T>).GUID
7417 // for example still returns the same result as pre-v4.5 runtimes.
7418 // ComputeGuidForGenericType() may throw for generics nested beyond 64 levels.
7419 WinRTGuidGenerator::ComputeGuidForGenericType(this, pGuid);
7421 // This GUID is per-instantiation so make sure that the cache
7422 // where we are going to keep it is per-instantiation as well.
7423 _ASSERTE(IsCanonicalMethodTable() || HasGuidInfo());
7426 #endif // FEATURE_COMINTEROP
7427 if (GetClass()->HasNoGuid())
7433 // If there is a GUID in the metadata then return that.
7434 IfFailThrow(GetMDImport()->GetItemGuid(GetCl(), pGuid));
7436 if (*pGuid == GUID_NULL)
7438 // Remember that we didn't find the GUID, so we can skip looking during
7439 // future checks. (Note that this is a very important optimization in the
7441 g_IBCLogger.LogEEClassCOWTableAccess(this);
7442 GetClass_NoLogging()->SetHasNoGuid();
7446 if (*pGuid == GUID_NULL && bGenerateIfNotFound)
7448 // For interfaces, concatenate the signatures of the methods and fields.
7449 if (!IsNilToken(GetCl()) && IsInterface())
7451 // Retrieve the stringized interface definition.
7452 cbCur = GetStringizedItfDef(TypeHandle(this), rName);
7454 // Pad up to a whole WCHAR.
7455 if (cbCur % sizeof(WCHAR))
7457 SIZE_T cbDelta = sizeof(WCHAR) - (cbCur % sizeof(WCHAR));
7458 rName.ReSizeThrows(cbCur + cbDelta);
7459 memset(rName.Ptr() + cbCur, 0, cbDelta);
7463 // Point to the new buffer.
7464 cchName = cbCur / sizeof(WCHAR);
7465 szName = reinterpret_cast<LPWSTR>(rName.Ptr());
7469 // Get the name of the class.
7470 DefineFullyQualifiedNameForClassW();
7471 szName = GetFullyQualifiedNameForClassNestedAwareW(this);
7474 cchName = wcslen(szName);
7476 // Enlarge buffer for class name.
7477 cbCur = cchName * sizeof(WCHAR);
7478 rName.ReSizeThrows(cbCur + sizeof(WCHAR));
7479 wcscpy_s(reinterpret_cast<LPWSTR>(rName.Ptr()), cchName + 1, szName);
7481 // Add the assembly guid string to the class name.
7482 ULONG cbCurOUT = (ULONG)cbCur;
7483 IfFailThrow(GetStringizedTypeLibGuidForAssembly(GetAssembly(), rName, (ULONG)cbCur, &cbCurOUT));
7484 cbCur = (SIZE_T) cbCurOUT;
7486 // Pad to a whole WCHAR.
7487 if (cbCur % sizeof(WCHAR))
7489 rName.ReSizeThrows(cbCur + sizeof(WCHAR)-(cbCur%sizeof(WCHAR)));
7490 while (cbCur % sizeof(WCHAR))
7494 // Point to the new buffer.
7495 szName = reinterpret_cast<LPWSTR>(rName.Ptr());
7496 cchName = cbCur / sizeof(WCHAR);
7497 // Dont' want to have to pad.
7498 _ASSERTE((sizeof(GUID) % sizeof(WCHAR)) == 0);
7501 // Generate guid from name.
7502 CorGuidFromNameW(pGuid, szName, cchName);
7504 // Remeber we generated the guid from the type name.
7508 // Cache the guid in the type, if not already cached.
7509 // We currently only do this for interfaces.
7510 // Also, in classic mode do NOT cache GUID for redirected interfaces.
7511 if ((IsInterface() || IsWinRTDelegate()) && (pInfo == NULL) && (*pGuid != GUID_NULL)
7512 #ifdef FEATURE_COMINTEROP
7514 && SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeRedirected)
7515 && IsLegalNonArrayWinRTType())
7516 #endif // FEATURE_COMINTEROP
7519 AllocMemTracker amTracker;
7520 BOOL bStoreGuidInfoOnEEClass = false;
7521 PTR_LoaderAllocator pLoaderAllocator;
7523 #if FEATURE_COMINTEROP
7524 if ((bClassic && !IsProjectedFromWinRT()) || !HasGuidInfo())
7526 bStoreGuidInfoOnEEClass = true;
7529 // We will always store the GuidInfo on the methodTable.
7530 bStoreGuidInfoOnEEClass = true;
7532 if(bStoreGuidInfoOnEEClass)
7534 // Since the GUIDInfo will be stored on the EEClass,
7535 // the memory should be allocated on the loaderAllocator of the class.
7536 // The definining module and the loaded module could be different in some scenarios.
7537 // For example - in case of shared generic instantiations
7538 // a shared generic i.e. System.__Canon which would be loaded in shared domain
7539 // but the this->GetLoaderAllocator will be the loader allocator for the definining
7540 // module which can get unloaded anytime.
7541 _ASSERTE(GetClass());
7542 _ASSERTE(GetClass()->GetMethodTable());
7543 pLoaderAllocator = GetClass()->GetMethodTable()->GetLoaderAllocator();
7547 pLoaderAllocator = GetLoaderAllocator();
7550 _ASSERTE(pLoaderAllocator);
7552 // Allocate the guid information.
7553 pInfo = (GuidInfo *)amTracker.Track(
7554 pLoaderAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(GuidInfo))));
7555 pInfo->m_Guid = *pGuid;
7556 pInfo->m_bGeneratedFromName = bGenerated;
7558 // Set in in the interface method table.
7559 if (bClassic && !IsProjectedFromWinRT())
7561 // Set the per-EEClass GuidInfo if we are asked for the "classic" non-WinRT GUID.
7562 // The MethodTable may be NGENed and read-only - and there's no point in saving
7563 // classic GUIDs in non-WinRT MethodTables anyway.
7564 _ASSERTE(bStoreGuidInfoOnEEClass);
7565 GetClass()->SetGuidInfo(pInfo);
7569 #if FEATURE_COMINTEROP
7570 _ASSERTE(bStoreGuidInfoOnEEClass || HasGuidInfo());
7572 _ASSERTE(bStoreGuidInfoOnEEClass);
7577 amTracker.SuppressRelease();
7579 #endif // !DACCESS_COMPILE
7583 //==========================================================================================
7584 MethodDesc* MethodTable::GetMethodDescForSlotAddress(PCODE addr, BOOL fSpeculative /*=FALSE*/)
7586 CONTRACT(MethodDesc *)
7591 POSTCONDITION(CheckPointer(RETVAL, NULL_NOT_OK));
7592 POSTCONDITION(RETVAL->m_pDebugMethodTable.IsNull() || // We must be in BuildMethdTableThrowing()
7593 RETVAL->SanityCheck());
7597 // If we see shared fcall implementation as an argument to this
7598 // function, it means that a vtable slot for the shared fcall
7599 // got backpatched when it shouldn't have. The reason we can't
7600 // backpatch this method is that it is an FCall that has many
7601 // MethodDescs for one implementation. If we backpatch delegate
7602 // constructors, this function will not be able to recover the
7603 // MethodDesc for the method.
7605 _ASSERTE_IMPL(!ECall::IsSharedFCallImpl(addr) &&
7606 "someone backpatched shared fcall implementation -- "
7607 "see comment in code");
7609 MethodDesc* pMethodDesc = ExecutionManager::GetCodeMethodDesc(addr);
7610 if (NULL != pMethodDesc)
7615 #ifdef FEATURE_INTERPRETER
7616 // I don't really know why this helps. Figure it out.
7617 #ifndef DACCESS_COMPILE
7618 // If we didn't find it above, try as an Interpretation stub...
7619 pMethodDesc = Interpreter::InterpretationStubToMethodInfo(addr);
7621 if (NULL != pMethodDesc)
7626 #endif // FEATURE_INTERPRETER
7629 pMethodDesc = ECall::MapTargetBackToMethod(addr);
7630 if (pMethodDesc != 0)
7635 pMethodDesc = MethodDesc::GetMethodDescFromStubAddr(addr, fSpeculative);
7639 RETURN(pMethodDesc);
7642 //==========================================================================================
7644 BOOL MethodTable::ComputeContainsGenericVariables(Instantiation inst)
7655 for (DWORD j = 0; j < inst.GetNumArgs(); j++)
7657 if (inst[j].ContainsGenericVariables())
7665 //==========================================================================================
7666 BOOL MethodTable::SanityCheck()
7668 LIMITED_METHOD_CONTRACT;
7671 // strings have component size2, all other non-arrays should have 0
7672 _ASSERTE((GetComponentSize() <= 2) || IsArray());
7674 if (m_pEEClass.IsNull())
7676 if (IsAsyncPinType())
7686 EEClass * pClass = GetClass();
7687 MethodTable * pCanonMT = pClass->GetMethodTable();
7689 // Let's try to make sure we have a valid EEClass pointer.
7690 if (pCanonMT == NULL)
7693 if (GetNumGenericArgs() != 0)
7694 return (pCanonMT->GetClass() == pClass);
7696 return (pCanonMT == this) || IsArray() || IsTransparentProxy();
7699 //==========================================================================================
7701 // Structs containing GC pointers whose size is at most this are always stack-allocated.
7702 const unsigned MaxStructBytesForLocalVarRetBuffBytes = 2 * sizeof(void*); // 4 pointer-widths.
7704 BOOL MethodTable::IsStructRequiringStackAllocRetBuf()
7706 LIMITED_METHOD_DAC_CONTRACT;
7708 // Disable this optimization. It has limited value (only kicks in on x86, and only for less common structs),
7709 // causes bugs and introduces odd ABI differences not compatible with ReadyToRun.
7713 //==========================================================================================
7714 unsigned MethodTable::GetTypeDefRid()
7716 LIMITED_METHOD_DAC_CONTRACT;
7718 g_IBCLogger.LogMethodTableAccess(this);
7719 return GetTypeDefRid_NoLogging();
7722 //==========================================================================================
7723 unsigned MethodTable::GetTypeDefRid_NoLogging()
7725 LIMITED_METHOD_DAC_CONTRACT;
7727 WORD token = m_wToken;
7729 if (token == METHODTABLE_TOKEN_OVERFLOW)
7730 return (unsigned)*GetTokenOverflowPtr();
7735 //==========================================================================================
7736 void MethodTable::SetCl(mdTypeDef token)
7738 LIMITED_METHOD_CONTRACT;
7740 unsigned rid = RidFromToken(token);
7741 if (rid >= METHODTABLE_TOKEN_OVERFLOW)
7743 m_wToken = METHODTABLE_TOKEN_OVERFLOW;
7744 *GetTokenOverflowPtr() = rid;
7748 _ASSERTE(FitsIn<U2>(rid));
7749 m_wToken = (WORD)rid;
7752 _ASSERTE(GetCl() == token);
7755 //==========================================================================================
7756 MethodDesc * MethodTable::GetClassConstructor()
7766 return GetMethodDescForSlot(GetClassConstructorSlot());
7769 //==========================================================================================
7770 DWORD MethodTable::HasFixedAddressVTStatics()
7772 LIMITED_METHOD_CONTRACT;
7774 return GetClass()->HasFixedAddressVTStatics();
7777 //==========================================================================================
7778 WORD MethodTable::GetNumHandleRegularStatics()
7780 LIMITED_METHOD_CONTRACT;
7782 return GetClass()->GetNumHandleRegularStatics();
7785 //==========================================================================================
7786 WORD MethodTable::GetNumBoxedRegularStatics()
7788 LIMITED_METHOD_CONTRACT;
7790 return GetClass()->GetNumBoxedRegularStatics();
7793 //==========================================================================================
7794 WORD MethodTable::GetNumBoxedThreadStatics ()
7796 LIMITED_METHOD_CONTRACT;
7798 return GetClass()->GetNumBoxedThreadStatics();
7801 //==========================================================================================
7802 ClassCtorInfoEntry* MethodTable::GetClassCtorInfoIfExists()
7804 LIMITED_METHOD_CONTRACT;
7809 g_IBCLogger.LogCCtorInfoReadAccess(this);
7811 if (HasBoxedRegularStatics())
7813 ModuleCtorInfo *pModuleCtorInfo = GetZapModule()->GetZapModuleCtorInfo();
7814 DPTR(RelativePointer<PTR_MethodTable>) ppMT = pModuleCtorInfo->ppMT;
7815 PTR_DWORD hotHashOffsets = pModuleCtorInfo->hotHashOffsets;
7816 PTR_DWORD coldHashOffsets = pModuleCtorInfo->coldHashOffsets;
7818 if (pModuleCtorInfo->numHotHashes)
7820 DWORD hash = pModuleCtorInfo->GenerateHash(PTR_MethodTable(this), ModuleCtorInfo::HOT);
7821 _ASSERTE(hash < pModuleCtorInfo->numHotHashes);
7823 for (DWORD i = hotHashOffsets[hash]; i != hotHashOffsets[hash + 1]; i++)
7825 _ASSERTE(!ppMT[i].IsNull());
7826 if (dac_cast<TADDR>(pModuleCtorInfo->GetMT(i)) == dac_cast<TADDR>(this))
7828 return pModuleCtorInfo->cctorInfoHot + i;
7833 if (pModuleCtorInfo->numColdHashes)
7835 DWORD hash = pModuleCtorInfo->GenerateHash(PTR_MethodTable(this), ModuleCtorInfo::COLD);
7836 _ASSERTE(hash < pModuleCtorInfo->numColdHashes);
7838 for (DWORD i = coldHashOffsets[hash]; i != coldHashOffsets[hash + 1]; i++)
7840 _ASSERTE(!ppMT[i].IsNull());
7841 if (dac_cast<TADDR>(pModuleCtorInfo->GetMT(i)) == dac_cast<TADDR>(this))
7843 return pModuleCtorInfo->cctorInfoCold + (i - pModuleCtorInfo->numElementsHot);
7853 //==========================================================================================
7854 // Returns true if pointer to the parent method table has been initialized/restored already.
7855 BOOL MethodTable::IsParentMethodTablePointerValid()
7857 LIMITED_METHOD_CONTRACT;
7860 // workaround: Type loader accesses partially initialized datastructures that interferes with IBC logging.
7861 // Once type loader is fixed to do not access partially initialized datastructures, this can go away.
7862 if (!GetWriteableData_NoLogging()->IsParentMethodTablePointerValid())
7865 TADDR base = dac_cast<TADDR>(this) + offsetof(MethodTable, m_pParentMethodTable);
7866 return !m_pParentMethodTable.IsTagged(base);
7871 //---------------------------------------------------------------------------------------
7873 // Ascends the parent class chain of "this", until a MethodTable is found whose typeDef
7874 // matches that of the specified pWhichParent. Why is this useful? See
7875 // code:MethodTable::GetInstantiationOfParentClass below and
7876 // code:Generics::GetExactInstantiationsOfMethodAndItsClassFromCallInformation for use
7880 // pWhichParent - MethodTable whose typeDef we're trying to match as we go up
7881 // "this"'s parent chain.
7884 // If a matching parent MethodTable is found, it is returned. Else, NULL is
7888 MethodTable * MethodTable::GetMethodTableMatchingParentClass(MethodTable * pWhichParent)
7895 PRECONDITION(CheckPointer(pWhichParent));
7896 PRECONDITION(IsRestored_NoLogging());
7897 PRECONDITION(pWhichParent->IsRestored_NoLogging());
7901 MethodTable *pMethodTableSearch = this;
7903 #ifdef DACCESS_COMPILE
7904 unsigned parentCount = 0;
7905 MethodTable *pOldMethodTable = NULL;
7906 #endif // DACCESS_COMPILE
7908 while (pMethodTableSearch != NULL)
7910 #ifdef DACCESS_COMPILE
7911 if (pMethodTableSearch == pOldMethodTable ||
7916 pOldMethodTable = pMethodTableSearch;
7918 #endif // DACCESS_COMPILE
7920 if (pMethodTableSearch->HasSameTypeDefAs(pWhichParent))
7922 return pMethodTableSearch;
7925 pMethodTableSearch = pMethodTableSearch->GetParentMethodTable();
7932 //==========================================================================================
7933 // Given D<T> : C<List<T>> and a type handle D<string> we sometimes
7934 // need to find the corresponding type handle
7935 // C<List<string>> (C may also be some type
7936 // further up the inheritance hierarchy). GetInstantiationOfParentClass
7937 // helps us do this by getting the corresponding instantiation of C, i.e.
7940 // pWhichParent: this is used identify which parent type we're interested in.
7941 // It must be a canonical EEClass, e.g. for C<ref>. This is used as a token for
7942 // C<List<T>>. This method can also be called with the minimal methodtable used
7943 // for dynamic methods. In that case, we need to return an empty instantiation.
7945 // Note this only works for parent classes, not parent interfaces.
7946 Instantiation MethodTable::GetInstantiationOfParentClass(MethodTable *pWhichParent)
7952 PRECONDITION(CheckPointer(pWhichParent));
7953 PRECONDITION(IsRestored_NoLogging());
7954 PRECONDITION(pWhichParent->IsRestored_NoLogging());
7959 MethodTable * pMatchingParent = GetMethodTableMatchingParentClass(pWhichParent);
7960 if (pMatchingParent != NULL)
7962 return pMatchingParent->GetInstantiation();
7965 // The parameter should always be a parent class or the dynamic method
7966 // class. Since there is no bit on the dynamicclass methodtable to indicate
7967 // that it is the dynamic method methodtable, we simply check the debug name
7968 // This is good enough for an assert.
7969 _ASSERTE(strcmp(pWhichParent->GetDebugClassName(), "dynamicClass") == 0);
7970 return Instantiation();
7973 #ifndef DACCESS_COMPILE
7975 #ifdef FEATURE_COMINTEROP
7978 // This is for COM Interop backwards compatibility
7981 //==========================================================================================
7982 // Returns the data pointer if present, NULL otherwise
7983 InteropMethodTableData *MethodTable::LookupComInteropData()
7985 WRAPPER_NO_CONTRACT;
7986 return GetDomain()->LookupComInteropData(this);
7989 //==========================================================================================
7990 // Returns TRUE if successfully inserted, FALSE if this would be a duplicate entry
7991 BOOL MethodTable::InsertComInteropData(InteropMethodTableData *pData)
7993 WRAPPER_NO_CONTRACT;
7994 return GetDomain()->InsertComInteropData(this, pData);
7997 //==========================================================================================
7998 InteropMethodTableData *MethodTable::CreateComInteropData(AllocMemTracker *pamTracker)
8002 PRECONDITION(GetParentMethodTable() == NULL || GetParentMethodTable()->LookupComInteropData() != NULL);
8005 ClassCompat::MethodTableBuilder builder(this);
8007 InteropMethodTableData *pData = builder.BuildInteropVTable(pamTracker);
8012 //==========================================================================================
8013 InteropMethodTableData *MethodTable::GetComInteropData()
8020 InteropMethodTableData *pData = LookupComInteropData();
8026 // Make sure that the parent's interop data has been created
8027 MethodTable *pParentMT = GetParentMethodTable();
8029 pParentMT->GetComInteropData();
8031 AllocMemTracker amTracker;
8033 pData = CreateComInteropData(&amTracker);
8034 if (InsertComInteropData(pData))
8036 amTracker.SuppressRelease();
8040 pData = LookupComInteropData();
8048 #endif // FEATURE_COMINTEROP
8050 //==========================================================================================
8051 ULONG MethodTable::MethodData::Release()
8053 LIMITED_METHOD_CONTRACT;
8054 //@TODO: Must adjust this to use an alternate allocator so that we don't
8055 //@TODO: potentially cause deadlocks on the debug thread.
8056 SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE;
8057 ULONG cRef = (ULONG) InterlockedDecrement((LONG*)&m_cRef);
8064 //==========================================================================================
8066 MethodTable::MethodData::ProcessMap(
8067 const DispatchMapTypeID * rgTypeIDs,
8070 UINT32 iCurrentChainDepth,
8071 MethodDataEntry * rgWorkingData)
8073 LIMITED_METHOD_CONTRACT;
8075 for (DispatchMap::EncodedMapIterator it(pMT); it.IsValid(); it.Next())
8077 for (UINT32 nTypeIDIndex = 0; nTypeIDIndex < cTypeIDs; nTypeIDIndex++)
8079 if (it.Entry()->GetTypeID() == rgTypeIDs[nTypeIDIndex])
8081 UINT32 curSlot = it.Entry()->GetSlotNumber();
8082 // If we're processing an interface, or it's for a virtual, or it's for a non-virtual
8083 // for the most derived type, we want to process the entry. In other words, we
8084 // want to ignore non-virtuals for parent classes.
8085 if ((curSlot < pMT->GetNumVirtuals()) || (iCurrentChainDepth == 0))
8087 MethodDataEntry * pCurEntry = &rgWorkingData[curSlot];
8088 if (!pCurEntry->IsDeclInit() && !pCurEntry->IsImplInit())
8090 pCurEntry->SetImplData(it.Entry()->GetTargetSlotNumber());
8096 } // MethodTable::MethodData::ProcessMap
8098 //==========================================================================================
8099 UINT32 MethodTable::MethodDataObject::GetObjectSize(MethodTable *pMT)
8101 WRAPPER_NO_CONTRACT;
8102 UINT32 cb = sizeof(MethodTable::MethodDataObject);
8103 cb += pMT->GetCanonicalMethodTable()->GetNumMethods() * sizeof(MethodDataObjectEntry);
8107 //==========================================================================================
8108 // This will fill in all the MethodEntry slots present in the current MethodTable
8109 void MethodTable::MethodDataObject::Init(MethodTable *pMT, MethodData *pParentData)
8113 WRAPPER(GC_TRIGGERS);
8114 PRECONDITION(CheckPointer(pMT));
8115 PRECONDITION(CheckPointer(pParentData, NULL_OK));
8116 PRECONDITION(!pMT->IsInterface());
8117 PRECONDITION(pParentData == NULL ||
8118 (pMT->ParentEquals(pParentData->GetDeclMethodTable()) &&
8119 pMT->ParentEquals(pParentData->GetImplMethodTable())));
8123 m_iNextChainDepth = 0;
8124 m_containsMethodImpl = FALSE;
8126 ZeroMemory(GetEntryData(), sizeof(MethodDataObjectEntry) * GetNumMethods());
8127 } // MethodTable::MethodDataObject::Init
8129 //==========================================================================================
8130 BOOL MethodTable::MethodDataObject::PopulateNextLevel()
8132 LIMITED_METHOD_CONTRACT;
8134 // Get the chain depth to next decode.
8135 UINT32 iChainDepth = GetNextChainDepth();
8137 // If the chain depth is MAX_CHAIN_DEPTH, then we've already parsed every parent.
8138 if (iChainDepth == MAX_CHAIN_DEPTH) {
8141 // Now move up the chain to the target.
8142 MethodTable *pMTCur = m_pMT;
8143 for (UINT32 i = 0; pMTCur != NULL && i < iChainDepth; i++) {
8144 pMTCur = pMTCur->GetParentMethodTable();
8147 // If we reached the end, then we're done.
8148 if (pMTCur == NULL) {
8149 SetNextChainDepth(MAX_CHAIN_DEPTH);
8153 FillEntryDataForAncestor(pMTCur);
8155 SetNextChainDepth(iChainDepth + 1);
8158 } // MethodTable::MethodDataObject::PopulateNextLevel
8160 //==========================================================================================
8161 void MethodTable::MethodDataObject::FillEntryDataForAncestor(MethodTable * pMT)
8163 LIMITED_METHOD_CONTRACT;
8165 // Since we traverse ancestors from lowest in the inheritance hierarchy
8166 // to highest, the first method we come across for a slot is normally
8167 // both the declaring and implementing method desc.
8169 // However if this slot is the target of a methodImpl, pMD is not
8170 // necessarily either. Rather than track this on a per-slot basis,
8171 // we conservatively avoid filling out virtual methods once we
8172 // have found that this inheritance chain contains a methodImpl.
8174 // Note that there may be a methodImpl higher in the inheritance chain
8175 // that we have not seen yet, and so we will fill out virtual methods
8176 // until we reach that level. We are safe doing that because the slots
8177 // we fill have been introduced/overridden by a subclass and so take
8178 // precedence over any inherited methodImpl.
8180 // Before we fill the entry data, find if the current ancestor has any methodImpls
8182 if (pMT->GetClass()->ContainsMethodImpls())
8183 m_containsMethodImpl = TRUE;
8185 if (m_containsMethodImpl && pMT != m_pMT)
8188 unsigned nVirtuals = pMT->GetNumVirtuals();
8190 MethodTable::IntroducedMethodIterator it(pMT, FALSE);
8191 for (; it.IsValid(); it.Next())
8193 MethodDesc * pMD = it.GetMethodDesc();
8194 g_IBCLogger.LogMethodDescAccess(pMD);
8196 unsigned slot = pMD->GetSlot();
8197 if (slot == MethodTable::NO_SLOT)
8200 // We want to fill all methods introduced by the actual type we're gathering
8201 // data for, and the virtual methods of the parent and above
8204 if (m_containsMethodImpl && slot < nVirtuals)
8209 if (slot >= nVirtuals)
8213 MethodDataObjectEntry * pEntry = GetEntry(slot);
8215 if (pEntry->GetDeclMethodDesc() == NULL)
8217 pEntry->SetDeclMethodDesc(pMD);
8220 if (pEntry->GetImplMethodDesc() == NULL)
8222 pEntry->SetImplMethodDesc(pMD);
8225 } // MethodTable::MethodDataObject::FillEntryDataForAncestor
8227 //==========================================================================================
8228 MethodDesc * MethodTable::MethodDataObject::GetDeclMethodDesc(UINT32 slotNumber)
8230 WRAPPER_NO_CONTRACT;
8231 _ASSERTE(slotNumber < GetNumMethods());
8233 MethodDataObjectEntry * pEntry = GetEntry(slotNumber);
8235 // Fill the entries one level of inheritance at a time,
8236 // stopping when we have filled the MD we are looking for.
8237 while (!pEntry->GetDeclMethodDesc() && PopulateNextLevel());
8239 MethodDesc * pMDRet = pEntry->GetDeclMethodDesc();
8242 pMDRet = GetImplMethodDesc(slotNumber)->GetDeclMethodDesc(slotNumber);
8243 _ASSERTE(CheckPointer(pMDRet));
8244 pEntry->SetDeclMethodDesc(pMDRet);
8248 _ASSERTE(pMDRet == GetImplMethodDesc(slotNumber)->GetDeclMethodDesc(slotNumber));
8253 //==========================================================================================
8254 DispatchSlot MethodTable::MethodDataObject::GetImplSlot(UINT32 slotNumber)
8256 WRAPPER_NO_CONTRACT;
8257 _ASSERTE(slotNumber < GetNumMethods());
8258 return DispatchSlot(m_pMT->GetRestoredSlot(slotNumber));
8261 //==========================================================================================
8262 UINT32 MethodTable::MethodDataObject::GetImplSlotNumber(UINT32 slotNumber)
8264 WRAPPER_NO_CONTRACT;
8265 _ASSERTE(slotNumber < GetNumMethods());
8269 //==========================================================================================
8270 MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber)
8280 _ASSERTE(slotNumber < GetNumMethods());
8281 MethodDataObjectEntry *pEntry = GetEntry(slotNumber);
8283 // Fill the entries one level of inheritance at a time,
8284 // stopping when we have filled the MD we are looking for.
8285 while (!pEntry->GetImplMethodDesc() && PopulateNextLevel());
8287 MethodDesc *pMDRet = pEntry->GetImplMethodDesc();
8291 _ASSERTE(slotNumber < GetNumVirtuals());
8292 pMDRet = m_pMT->GetMethodDescForSlot(slotNumber);
8293 _ASSERTE(CheckPointer(pMDRet));
8294 pEntry->SetImplMethodDesc(pMDRet);
8298 _ASSERTE(slotNumber >= GetNumVirtuals() || pMDRet == m_pMT->GetMethodDescForSlot(slotNumber));
8304 //==========================================================================================
8305 void MethodTable::MethodDataObject::InvalidateCachedVirtualSlot(UINT32 slotNumber)
8307 WRAPPER_NO_CONTRACT;
8308 _ASSERTE(slotNumber < GetNumVirtuals());
8310 MethodDataObjectEntry *pEntry = GetEntry(slotNumber);
8311 pEntry->SetImplMethodDesc(NULL);
8314 //==========================================================================================
8315 MethodDesc *MethodTable::MethodDataInterface::GetDeclMethodDesc(UINT32 slotNumber)
8317 WRAPPER_NO_CONTRACT;
8318 return m_pMT->GetMethodDescForSlot(slotNumber);
8321 //==========================================================================================
8322 MethodDesc *MethodTable::MethodDataInterface::GetImplMethodDesc(UINT32 slotNumber)
8324 WRAPPER_NO_CONTRACT;
8325 return MethodTable::MethodDataInterface::GetDeclMethodDesc(slotNumber);
8328 //==========================================================================================
8329 void MethodTable::MethodDataInterface::InvalidateCachedVirtualSlot(UINT32 slotNumber)
8331 LIMITED_METHOD_CONTRACT;
8333 // MethodDataInterface does not store any cached MethodDesc values
8337 //==========================================================================================
8338 UINT32 MethodTable::MethodDataInterfaceImpl::GetObjectSize(MethodTable *pMTDecl)
8340 WRAPPER_NO_CONTRACT;
8341 UINT32 cb = sizeof(MethodDataInterfaceImpl);
8342 cb += pMTDecl->GetNumMethods() * sizeof(MethodDataEntry);
8346 //==========================================================================================
8347 // This will fill in all the MethodEntry slots present in the current MethodTable
8349 MethodTable::MethodDataInterfaceImpl::Init(
8350 const DispatchMapTypeID * rgDeclTypeIDs,
8351 UINT32 cDeclTypeIDs,
8357 WRAPPER(GC_TRIGGERS);
8358 PRECONDITION(CheckPointer(pDecl));
8359 PRECONDITION(CheckPointer(pImpl));
8360 PRECONDITION(pDecl->GetDeclMethodTable()->IsInterface());
8361 PRECONDITION(!pImpl->GetDeclMethodTable()->IsInterface());
8362 PRECONDITION(pDecl->GetDeclMethodTable() == pDecl->GetImplMethodTable());
8363 PRECONDITION(pImpl->GetDeclMethodTable() == pImpl->GetImplMethodTable());
8364 PRECONDITION(pDecl != pImpl);
8367 // Store and AddRef the decl and impl data.
8373 m_iNextChainDepth = 0;
8374 // Need side effects of the calls, but not the result.
8375 /* MethodTable *pDeclMT = */ pDecl->GetDeclMethodTable();
8376 /* MethodTable *pImplMT = */ pImpl->GetImplMethodTable();
8377 m_rgDeclTypeIDs = rgDeclTypeIDs;
8378 m_cDeclTypeIDs = cDeclTypeIDs;
8380 // Initialize each entry.
8381 for (UINT32 i = 0; i < GetNumMethods(); i++) {
8382 // Initialize the entry
8383 GetEntry(i)->Init();
8385 } // MethodTable::MethodDataInterfaceImpl::Init
8387 //==========================================================================================
8388 MethodTable::MethodDataInterfaceImpl::MethodDataInterfaceImpl(
8389 const DispatchMapTypeID * rgDeclTypeIDs,
8390 UINT32 cDeclTypeIDs,
8394 WRAPPER_NO_CONTRACT;
8395 Init(rgDeclTypeIDs, cDeclTypeIDs, pDecl, pImpl);
8398 //==========================================================================================
8399 MethodTable::MethodDataInterfaceImpl::~MethodDataInterfaceImpl()
8401 WRAPPER_NO_CONTRACT;
8402 CONSISTENCY_CHECK(CheckPointer(m_pDecl));
8403 CONSISTENCY_CHECK(CheckPointer(m_pImpl));
8408 //==========================================================================================
8410 MethodTable::MethodDataInterfaceImpl::PopulateNextLevel()
8412 LIMITED_METHOD_CONTRACT;
8414 // Get the chain depth to next decode.
8415 UINT32 iChainDepth = GetNextChainDepth();
8417 // If the chain depth is MAX_CHAIN_DEPTH, then we've already parsed every parent.
8418 if (iChainDepth == MAX_CHAIN_DEPTH) {
8422 // Now move up the chain to the target.
8423 MethodTable *pMTCur = m_pImpl->GetImplMethodTable();
8424 for (UINT32 i = 0; pMTCur != NULL && i < iChainDepth; i++) {
8425 pMTCur = pMTCur->GetParentMethodTable();
8428 // If we reached the end, then we're done.
8429 if (pMTCur == NULL) {
8430 SetNextChainDepth(MAX_CHAIN_DEPTH);
8434 if (m_cDeclTypeIDs != 0)
8435 { // We got the TypeIDs from TypeLoader, use them
8436 ProcessMap(m_rgDeclTypeIDs, m_cDeclTypeIDs, pMTCur, iChainDepth, GetEntryData());
8439 { // We should decode all interface duplicates of code:m_pDecl
8440 MethodTable * pDeclMT = m_pDecl->GetImplMethodTable();
8441 INDEBUG(BOOL dbg_fInterfaceFound = FALSE);
8443 // Call code:ProcessMap for every (duplicate) occurence of interface code:pDeclMT in the interface
8444 // map of code:m_pImpl
8445 MethodTable::InterfaceMapIterator it = m_pImpl->GetImplMethodTable()->IterateInterfaceMap();
8448 if (pDeclMT == it.GetInterface())
8449 { // We found the interface
8450 INDEBUG(dbg_fInterfaceFound = TRUE);
8451 DispatchMapTypeID declTypeID = DispatchMapTypeID::InterfaceClassID(it.GetIndex());
8453 ProcessMap(&declTypeID, 1, pMTCur, iChainDepth, GetEntryData());
8456 // The interface code:m_Decl should be found at least once in the interface map of code:m_pImpl,
8457 // otherwise someone passed wrong information
8458 _ASSERTE(dbg_fInterfaceFound);
8461 SetNextChainDepth(iChainDepth + 1);
8464 } // MethodTable::MethodDataInterfaceImpl::PopulateNextLevel
8466 //==========================================================================================
8467 UINT32 MethodTable::MethodDataInterfaceImpl::MapToImplSlotNumber(UINT32 slotNumber)
8469 LIMITED_METHOD_CONTRACT;
8471 _ASSERTE(slotNumber < GetNumMethods());
8473 MethodDataEntry *pEntry = GetEntry(slotNumber);
8474 while (!pEntry->IsImplInit() && PopulateNextLevel()) {}
8475 if (pEntry->IsImplInit()) {
8476 return pEntry->GetImplSlotNum();
8479 return INVALID_SLOT_NUMBER;
8483 //==========================================================================================
8484 DispatchSlot MethodTable::MethodDataInterfaceImpl::GetImplSlot(UINT32 slotNumber)
8486 WRAPPER_NO_CONTRACT;
8487 UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber);
8488 if (implSlotNumber == INVALID_SLOT_NUMBER) {
8489 return DispatchSlot(NULL);
8491 return m_pImpl->GetImplSlot(implSlotNumber);
8494 //==========================================================================================
8495 UINT32 MethodTable::MethodDataInterfaceImpl::GetImplSlotNumber(UINT32 slotNumber)
8497 WRAPPER_NO_CONTRACT;
8498 return MapToImplSlotNumber(slotNumber);
8501 //==========================================================================================
8502 MethodDesc *MethodTable::MethodDataInterfaceImpl::GetImplMethodDesc(UINT32 slotNumber)
8504 WRAPPER_NO_CONTRACT;
8505 UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber);
8506 if (implSlotNumber == INVALID_SLOT_NUMBER) {
8509 return m_pImpl->GetImplMethodDesc(MapToImplSlotNumber(slotNumber));
8512 //==========================================================================================
8513 void MethodTable::MethodDataInterfaceImpl::InvalidateCachedVirtualSlot(UINT32 slotNumber)
8515 WRAPPER_NO_CONTRACT;
8516 UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber);
8517 if (implSlotNumber == INVALID_SLOT_NUMBER) {
8520 return m_pImpl->InvalidateCachedVirtualSlot(MapToImplSlotNumber(slotNumber));
8523 //==========================================================================================
8524 void MethodTable::CheckInitMethodDataCache()
8531 if (s_pMethodDataCache == NULL)
8533 UINT32 cb = MethodDataCache::GetObjectSize(8);
8534 NewHolder<BYTE> hb(new BYTE[cb]);
8535 MethodDataCache *pCache = new (hb.GetValue()) MethodDataCache(8);
8536 if (InterlockedCompareExchangeT(
8537 &s_pMethodDataCache, pCache, NULL) == NULL)
8539 hb.SuppressRelease();
8541 // If somebody beat us, return and allow the holders to take care of cleanup.
8549 //==========================================================================================
8550 void MethodTable::ClearMethodDataCache()
8552 LIMITED_METHOD_CONTRACT;
8553 if (s_pMethodDataCache != NULL) {
8554 s_pMethodDataCache->Clear();
8558 //==========================================================================================
8559 MethodTable::MethodData *MethodTable::FindMethodDataHelper(MethodTable *pMTDecl, MethodTable *pMTImpl)
8564 CONSISTENCY_CHECK(s_fUseMethodDataCache);
8567 return s_pMethodDataCache->Find(pMTDecl, pMTImpl);
8570 //==========================================================================================
8571 MethodTable::MethodData *MethodTable::FindParentMethodDataHelper(MethodTable *pMT)
8580 MethodData *pData = NULL;
8581 if (s_fUseMethodDataCache && s_fUseParentMethodData) {
8582 if (!pMT->IsInterface()) {
8583 //@todo : this won't be correct for non-shared code
8584 MethodTable *pMTParent = pMT->GetParentMethodTable();
8585 if (pMTParent != NULL) {
8586 pData = FindMethodDataHelper(pMTParent, pMTParent);
8593 //==========================================================================================
8594 // This method does not cache the resulting MethodData object in the global MethodDataCache.
8595 // The TypeIDs (rgDeclTypeIDs with cDeclTypeIDs items) have to be sorted.
8596 MethodTable::MethodData *
8597 MethodTable::GetMethodDataHelper(
8598 const DispatchMapTypeID * rgDeclTypeIDs,
8599 UINT32 cDeclTypeIDs,
8600 MethodTable * pMTDecl,
8601 MethodTable * pMTImpl)
8605 WRAPPER(GC_TRIGGERS);
8606 PRECONDITION(CheckPointer(pMTDecl));
8607 PRECONDITION(CheckPointer(pMTImpl));
8610 //@TODO: Must adjust this to use an alternate allocator so that we don't
8611 //@TODO: potentially cause deadlocks on the debug thread.
8612 SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE;
8614 CONSISTENCY_CHECK(pMTDecl->IsInterface() && !pMTImpl->IsInterface());
8617 // Check that rgDeclTypeIDs are sorted, are valid interface indexes and reference only pMTDecl interface
8619 InterfaceInfo_t * rgImplInterfaceMap = pMTImpl->GetInterfaceMap();
8620 UINT32 cImplInterfaceMap = pMTImpl->GetNumInterfaces();
8621 // Verify that all types referenced by code:rgDeclTypeIDs are code:pMTDecl (declared interface)
8622 for (UINT32 nDeclTypeIDIndex = 0; nDeclTypeIDIndex < cDeclTypeIDs; nDeclTypeIDIndex++)
8624 if (nDeclTypeIDIndex > 0)
8625 { // Verify that interface indexes are sorted
8626 _ASSERTE(rgDeclTypeIDs[nDeclTypeIDIndex - 1].GetInterfaceNum() < rgDeclTypeIDs[nDeclTypeIDIndex].GetInterfaceNum());
8628 UINT32 nInterfaceIndex = rgDeclTypeIDs[nDeclTypeIDIndex].GetInterfaceNum();
8629 _ASSERTE(nInterfaceIndex <= cImplInterfaceMap);
8631 OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS);
8632 _ASSERTE(rgImplInterfaceMap[nInterfaceIndex].GetApproxMethodTable(pMTImpl->GetLoaderModule())->HasSameTypeDefAs(pMTDecl));
8638 // Can't cache, since this is a custom method used in BuildMethodTable
8639 MethodDataWrapper hDecl(GetMethodData(pMTDecl, FALSE));
8640 MethodDataWrapper hImpl(GetMethodData(pMTImpl, FALSE));
8642 UINT32 cb = MethodDataInterfaceImpl::GetObjectSize(pMTDecl);
8643 NewHolder<BYTE> pb(new BYTE[cb]);
8644 MethodDataInterfaceImpl * pData = new (pb.GetValue()) MethodDataInterfaceImpl(rgDeclTypeIDs, cDeclTypeIDs, hDecl, hImpl);
8645 pb.SuppressRelease();
8648 } // MethodTable::GetMethodDataHelper
8650 //==========================================================================================
8651 // The fCanCache argument determines if the resulting MethodData object can
8652 // be added to the global MethodDataCache. This is used when requesting a
8653 // MethodData object for a type currently being built.
8654 MethodTable::MethodData *MethodTable::GetMethodDataHelper(MethodTable *pMTDecl,
8655 MethodTable *pMTImpl,
8660 WRAPPER(GC_TRIGGERS);
8661 PRECONDITION(CheckPointer(pMTDecl));
8662 PRECONDITION(CheckPointer(pMTImpl));
8663 PRECONDITION(pMTDecl == pMTImpl ||
8664 (pMTDecl->IsInterface() && !pMTImpl->IsInterface()));
8667 //@TODO: Must adjust this to use an alternate allocator so that we don't
8668 //@TODO: potentially cause deadlocks on the debug thread.
8669 SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE;
8671 if (s_fUseMethodDataCache) {
8672 MethodData *pData = FindMethodDataHelper(pMTDecl, pMTImpl);
8673 if (pData != NULL) {
8678 // If we get here, there are no entries in the cache.
8679 MethodData *pData = NULL;
8680 if (pMTDecl == pMTImpl) {
8681 if (pMTDecl->IsInterface()) {
8682 pData = new MethodDataInterface(pMTDecl);
8685 UINT32 cb = MethodDataObject::GetObjectSize(pMTDecl);
8686 NewHolder<BYTE> pb(new BYTE[cb]);
8687 MethodDataHolder h(FindParentMethodDataHelper(pMTDecl));
8688 pData = new (pb.GetValue()) MethodDataObject(pMTDecl, h.GetValue());
8689 pb.SuppressRelease();
8693 pData = GetMethodDataHelper(
8700 // Insert in the cache if it is active.
8701 if (fCanCache && s_fUseMethodDataCache) {
8702 s_pMethodDataCache->Insert(pData);
8705 // Do not AddRef, already initialized to 1.
8709 //==========================================================================================
8710 // The fCanCache argument determines if the resulting MethodData object can
8711 // be added to the global MethodDataCache. This is used when requesting a
8712 // MethodData object for a type currently being built.
8713 MethodTable::MethodData *MethodTable::GetMethodData(MethodTable *pMTDecl,
8714 MethodTable *pMTImpl,
8719 WRAPPER(GC_TRIGGERS);
8722 MethodDataWrapper hData(GetMethodDataHelper(pMTDecl, pMTImpl, fCanCache));
8723 hData.SuppressRelease();
8727 //==========================================================================================
8728 // This method does not cache the resulting MethodData object in the global MethodDataCache.
8729 MethodTable::MethodData *
8730 MethodTable::GetMethodData(
8731 const DispatchMapTypeID * rgDeclTypeIDs,
8732 UINT32 cDeclTypeIDs,
8733 MethodTable * pMTDecl,
8734 MethodTable * pMTImpl)
8738 WRAPPER(GC_TRIGGERS);
8739 PRECONDITION(pMTDecl != pMTImpl);
8740 PRECONDITION(pMTDecl->IsInterface());
8741 PRECONDITION(!pMTImpl->IsInterface());
8744 MethodDataWrapper hData(GetMethodDataHelper(rgDeclTypeIDs, cDeclTypeIDs, pMTDecl, pMTImpl));
8745 hData.SuppressRelease();
8749 //==========================================================================================
8750 // The fCanCache argument determines if the resulting MethodData object can
8751 // be added to the global MethodDataCache. This is used when requesting a
8752 // MethodData object for a type currently being built.
8753 MethodTable::MethodData *MethodTable::GetMethodData(MethodTable *pMT,
8756 WRAPPER_NO_CONTRACT;
8757 return GetMethodData(pMT, pMT, fCanCache);
8760 //==========================================================================================
8761 MethodTable::MethodIterator::MethodIterator(MethodTable *pMTDecl, MethodTable *pMTImpl)
8763 WRAPPER_NO_CONTRACT;
8764 Init(pMTDecl, pMTImpl);
8767 //==========================================================================================
8768 MethodTable::MethodIterator::MethodIterator(MethodTable *pMT)
8770 WRAPPER_NO_CONTRACT;
8774 //==========================================================================================
8775 MethodTable::MethodIterator::MethodIterator(MethodData *pMethodData)
8780 PRECONDITION(CheckPointer(pMethodData));
8783 m_pMethodData = pMethodData;
8784 m_pMethodData->AddRef();
8786 m_iMethods = (INT32)m_pMethodData->GetNumMethods();
8789 //==========================================================================================
8790 MethodTable::MethodIterator::MethodIterator(const MethodIterator &it)
8792 WRAPPER_NO_CONTRACT;
8793 m_pMethodData = it.m_pMethodData;
8794 m_pMethodData->AddRef();
8796 m_iMethods = it.m_iMethods;
8799 //==========================================================================================
8800 void MethodTable::MethodIterator::Init(MethodTable *pMTDecl, MethodTable *pMTImpl)
8804 WRAPPER(GC_TRIGGERS);
8805 INJECT_FAULT(COMPlusThrowOM());
8806 PRECONDITION(CheckPointer(pMTDecl));
8807 PRECONDITION(CheckPointer(pMTImpl));
8810 LOG((LF_LOADER, LL_INFO10000, "SD: MT::MethodIterator created for %s.\n", pMTDecl->GetDebugClassName()));
8812 m_pMethodData = MethodTable::GetMethodData(pMTDecl, pMTImpl);
8813 CONSISTENCY_CHECK(CheckPointer(m_pMethodData));
8815 m_iMethods = (INT32)m_pMethodData->GetNumMethods();
8817 #endif // !DACCESS_COMPILE
8819 //==========================================================================================
8821 void MethodTable::IntroducedMethodIterator::SetChunk(MethodDescChunk * pChunk)
8823 LIMITED_METHOD_CONTRACT;
8827 m_pMethodDesc = pChunk->GetFirstMethodDesc();
8830 m_pChunkEnd = dac_cast<TADDR>(pChunk) + pChunk->SizeOf();
8834 m_pMethodDesc = NULL;
8838 //==========================================================================================
8840 MethodDesc * MethodTable::IntroducedMethodIterator::GetFirst(MethodTable *pMT)
8842 LIMITED_METHOD_CONTRACT;
8843 MethodDescChunk * pChunk = pMT->GetClass()->GetChunks();
8844 return (pChunk != NULL) ? pChunk->GetFirstMethodDesc() : NULL;
8847 //==========================================================================================
8848 MethodDesc * MethodTable::IntroducedMethodIterator::GetNext(MethodDesc * pMD)
8850 WRAPPER_NO_CONTRACT;
8852 MethodDescChunk * pChunk = pMD->GetMethodDescChunk();
8854 // Check whether the next MethodDesc is still within the bounds of the current chunk
8855 TADDR pNext = dac_cast<TADDR>(pMD) + pMD->SizeOf();
8856 TADDR pEnd = dac_cast<TADDR>(pChunk) + pChunk->SizeOf();
8860 // Just skip to the next method in the same chunk
8861 pMD = PTR_MethodDesc(pNext);
8865 _ASSERTE(pNext == pEnd);
8867 // We have walked all the methods in the current chunk. Move on
8868 // to the next chunk.
8869 pChunk = pChunk->GetNextChunk();
8871 pMD = (pChunk != NULL) ? pChunk->GetFirstMethodDesc() : NULL;
8877 //==========================================================================================
8878 PTR_GuidInfo MethodTable::GetGuidInfo()
8888 #ifdef FEATURE_COMINTEROP
8891 return *GetGuidInfoPtr();
8893 #endif // FEATURE_COMINTEROP
8894 _ASSERTE(GetClass());
8895 return GetClass()->GetGuidInfo();
8898 //==========================================================================================
8899 void MethodTable::SetGuidInfo(GuidInfo* pGuidInfo)
8909 #ifndef DACCESS_COMPILE
8911 #ifdef FEATURE_COMINTEROP
8914 *EnsureWritablePages(GetGuidInfoPtr()) = pGuidInfo;
8917 #endif // FEATURE_COMINTEROP
8918 _ASSERTE(GetClass());
8919 GetClass()->SetGuidInfo (pGuidInfo);
8921 #endif // DACCESS_COMPILE
8924 #if defined(FEATURE_COMINTEROP) && !defined(DACCESS_COMPILE)
8926 //==========================================================================================
8927 RCWPerTypeData *MethodTable::CreateRCWPerTypeData(bool bThrowOnOOM)
8931 if (bThrowOnOOM) THROWS; else NOTHROW;
8934 PRECONDITION(HasRCWPerTypeData());
8938 AllocMemTracker amTracker;
8940 RCWPerTypeData *pData;
8943 TaggedMemAllocPtr ptr = GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(RCWPerTypeData)));
8944 pData = (RCWPerTypeData *)amTracker.Track(ptr);
8948 TaggedMemAllocPtr ptr = GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem_NoThrow(S_SIZE_T(sizeof(RCWPerTypeData)));
8949 pData = (RCWPerTypeData *)amTracker.Track_NoThrow(ptr);
8956 // memory is zero-inited which means that nothing has been computed yet
8957 _ASSERTE(pData->m_dwFlags == 0);
8959 RCWPerTypeData **pDataPtr = GetRCWPerTypeDataPtr();
8963 EnsureWritablePages(pDataPtr);
8967 if (!EnsureWritablePagesNoThrow(pDataPtr, sizeof(*pDataPtr)))
8973 if (InterlockedCompareExchangeT(pDataPtr, pData, NULL) == NULL)
8975 amTracker.SuppressRelease();
8979 // another thread already published the pointer
8986 //==========================================================================================
8987 RCWPerTypeData *MethodTable::GetRCWPerTypeData(bool bThrowOnOOM /*= true*/)
8991 if (bThrowOnOOM) THROWS; else NOTHROW;
8997 if (!HasRCWPerTypeData())
9000 RCWPerTypeData *pData = *GetRCWPerTypeDataPtr();
9003 // creation is factored out into a separate routine to avoid paying the EH cost here
9004 pData = CreateRCWPerTypeData(bThrowOnOOM);
9010 #endif // FEATURE_COMINTEROP && !DACCESS_COMPILE
9012 //==========================================================================================
9013 CHECK MethodTable::CheckActivated()
9015 WRAPPER_NO_CONTRACT;
9019 CHECK(GetModule()->CheckActivated());
9022 // <TODO> Check all generic type parameters as well </TODO>
9028 // Optimization intended for EnsureInstanceActive, IsIntrospectionOnly, EnsureActive only
9029 #pragma optimize("t", on)
9031 //==========================================================================================
9033 #ifndef DACCESS_COMPILE
9034 VOID MethodTable::EnsureInstanceActive()
9044 Module * pModule = GetModule();
9045 pModule->EnsureActive();
9047 MethodTable * pMT = this;
9048 while (pMT->HasModuleDependencies())
9050 pMT = pMT->GetParentMethodTable();
9051 _ASSERTE(pMT != NULL);
9053 Module * pParentModule = pMT->GetModule();
9054 if (pParentModule != pModule)
9056 pModule = pParentModule;
9057 pModule->EnsureActive();
9061 if (HasInstantiation())
9063 // This is going to go recursive, so we need to use an interior stack probe
9065 INTERIOR_STACK_PROBE(GetThread());
9067 Instantiation inst = GetInstantiation();
9068 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
9070 TypeHandle thArg = inst[i];
9071 if (!thArg.IsTypeDesc())
9073 thArg.AsMethodTable()->EnsureInstanceActive();
9077 END_INTERIOR_STACK_PROBE;
9081 #endif //!DACCESS_COMPILE
9083 //==========================================================================================
9084 BOOL MethodTable::IsIntrospectionOnly()
9086 WRAPPER_NO_CONTRACT;
9087 return GetAssembly()->IsIntrospectionOnly();
9090 //==========================================================================================
9091 BOOL MethodTable::ContainsIntrospectionOnlyTypes()
9102 if (IsIntrospectionOnly())
9105 // check the instantiation
9106 Instantiation inst = GetInstantiation();
9107 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
9109 CONSISTENCY_CHECK(!inst[i].IsEncodedFixup());
9110 if (inst[i].ContainsIntrospectionOnlyTypes())
9117 //==========================================================================================
9118 #ifndef DACCESS_COMPILE
9119 VOID MethodTable::EnsureActive()
9121 WRAPPER_NO_CONTRACT;
9123 GetModule()->EnsureActive();
9128 #pragma optimize("", on)
9131 //==========================================================================================
9132 CHECK MethodTable::CheckInstanceActivated()
9134 WRAPPER_NO_CONTRACT;
9139 Module * pModule = GetModule();
9140 CHECK(pModule->CheckActivated());
9142 MethodTable * pMT = this;
9143 while (pMT->HasModuleDependencies())
9145 pMT = pMT->GetParentMethodTable();
9146 _ASSERTE(pMT != NULL);
9148 Module * pParentModule = pMT->GetModule();
9149 if (pParentModule != pModule)
9151 pModule = pParentModule;
9152 CHECK(pModule->CheckActivated());
9159 #ifdef DACCESS_COMPILE
9161 //==========================================================================================
9163 MethodTable::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
9165 WRAPPER_NO_CONTRACT;
9167 DAC_CHECK_ENUM_THIS();
9168 EMEM_OUT(("MEM: %p MethodTable\n", dac_cast<TADDR>(this)));
9170 DWORD size = GetEndOffsetOfOptionalMembers();
9171 DacEnumMemoryRegion(dac_cast<TADDR>(this), size);
9173 if (!IsCanonicalMethodTable())
9175 PTR_MethodTable pMTCanonical = GetCanonicalMethodTable();
9177 if (pMTCanonical.IsValid())
9179 pMTCanonical->EnumMemoryRegions(flags);
9184 PTR_EEClass pClass = GetClass();
9186 if (pClass.IsValid())
9190 // This is kind of a workaround, in that ArrayClass is derived from EEClass, but
9191 // it's not virtual, we only cast if the IsArray() predicate holds above.
9192 // For minidumps, DAC will choke if we don't have the full size given
9193 // by ArrayClass available. If ArrayClass becomes more complex, it
9194 // should get it's own EnumMemoryRegions().
9195 DacEnumMemoryRegion(dac_cast<TADDR>(pClass), sizeof(ArrayClass));
9197 pClass->EnumMemoryRegions(flags, this);
9201 PTR_MethodTable pMTParent = GetParentMethodTable();
9203 if (pMTParent.IsValid())
9205 pMTParent->EnumMemoryRegions(flags);
9208 if (HasNonVirtualSlotsArray())
9210 DacEnumMemoryRegion(dac_cast<TADDR>(GetNonVirtualSlotsArray()), GetNonVirtualSlotsArraySize());
9213 if (HasInterfaceMap())
9215 #ifdef FEATURE_COMINTEROP
9216 if (HasDynamicInterfaceMap())
9217 DacEnumMemoryRegion(dac_cast<TADDR>(GetInterfaceMap()) - sizeof(DWORD_PTR), GetInterfaceMapSize());
9219 #endif // FEATURE_COMINTEROP
9220 DacEnumMemoryRegion(dac_cast<TADDR>(GetInterfaceMap()), GetInterfaceMapSize());
9222 EnumMemoryRegionsForExtraInterfaceInfo();
9225 if (HasPerInstInfo() != NULL)
9227 DacEnumMemoryRegion(dac_cast<TADDR>(GetPerInstInfo()) - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo));
9230 if (GetDictionary() != NULL)
9232 DacEnumMemoryRegion(dac_cast<TADDR>(GetDictionary()), GetInstAndDictSize());
9235 VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots();
9238 DacEnumMemoryRegion(dac_cast<TADDR>(it.GetIndirectionSlot()), it.GetSize());
9241 PTR_MethodTableWriteableData pWriteableData = ReadPointer(this, &MethodTable::m_pWriteableData);
9242 if (pWriteableData.IsValid())
9244 pWriteableData.EnumMem();
9247 if (flags != CLRDATA_ENUM_MEM_MINI && flags != CLRDATA_ENUM_MEM_TRIAGE)
9249 DispatchMap * pMap = GetDispatchMap();
9252 pMap->EnumMemoryRegions(flags);
9255 } // MethodTable::EnumMemoryRegions
9257 #endif // DACCESS_COMPILE
9259 //==========================================================================================
9260 BOOL MethodTable::ContainsGenericMethodVariables()
9271 Instantiation inst = GetInstantiation();
9272 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
9274 CONSISTENCY_CHECK(!inst[i].IsEncodedFixup());
9275 if (inst[i].ContainsGenericVariables(TRUE))
9282 //==========================================================================================
9283 Module *MethodTable::GetDefiningModuleForOpenType()
9290 POSTCONDITION((ContainsGenericVariables() != 0) == (RETVAL != NULL));
9295 if (ContainsGenericVariables())
9297 Instantiation inst = GetInstantiation();
9298 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
9300 // Encoded fixups are never open types
9301 if (!inst[i].IsEncodedFixup())
9303 Module *pModule = inst[i].GetDefiningModuleForOpenType();
9304 if (pModule != NULL)
9313 //==========================================================================================
9314 PCODE MethodTable::GetRestoredSlot(DWORD slotNumber)
9325 // Keep in sync with code:MethodTable::GetRestoredSlotMT
9328 MethodTable * pMT = this;
9331 g_IBCLogger.LogMethodTableAccess(pMT);
9333 pMT = pMT->GetCanonicalMethodTable();
9335 _ASSERTE(pMT != NULL);
9337 PCODE slot = pMT->GetSlot(slotNumber);
9340 #ifdef FEATURE_PREJIT
9341 && !pMT->GetLoaderModule()->IsVirtualImportThunk(slot)
9348 // This is inherited slot that has not been fixed up yet. Find
9349 // the value by walking up the inheritance chain
9350 pMT = pMT->GetParentMethodTable();
9354 //==========================================================================================
9355 MethodTable * MethodTable::GetRestoredSlotMT(DWORD slotNumber)
9366 // Keep in sync with code:MethodTable::GetRestoredSlot
9369 MethodTable * pMT = this;
9372 g_IBCLogger.LogMethodTableAccess(pMT);
9374 pMT = pMT->GetCanonicalMethodTable();
9376 _ASSERTE(pMT != NULL);
9378 PCODE slot = pMT->GetSlot(slotNumber);
9381 #ifdef FEATURE_PREJIT
9382 && !pMT->GetLoaderModule()->IsVirtualImportThunk(slot)
9389 // This is inherited slot that has not been fixed up yet. Find
9390 // the value by walking up the inheritance chain
9391 pMT = pMT->GetParentMethodTable();
9395 //==========================================================================================
9396 MethodDesc * MethodTable::GetParallelMethodDesc(MethodDesc * pDefMD)
9406 return GetMethodDescForSlot(pDefMD->GetSlot());
9409 #ifndef DACCESS_COMPILE
9411 //==========================================================================================
9412 void MethodTable::SetSlot(UINT32 slotNumber, PCODE slotCode)
9421 if (slotNumber < GetNumVirtuals())
9424 // Verify that slots in shared vtable chunks not owned by this methodtable are only ever patched to stable entrypoint.
9425 // This invariant is required to prevent races with code:MethodDesc::SetStableEntryPointInterlocked.
9427 BOOL fSharedVtableChunk = FALSE;
9428 DWORD indirectionIndex = MethodTable::GetIndexOfVtableIndirection(slotNumber);
9430 if (!IsCanonicalMethodTable())
9432 if (GetVtableIndirections()[indirectionIndex].GetValueMaybeNull() == GetCanonicalMethodTable()->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull())
9433 fSharedVtableChunk = TRUE;
9436 if (slotNumber < GetNumParentVirtuals())
9438 if (GetVtableIndirections()[indirectionIndex].GetValueMaybeNull() == GetParentMethodTable()->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull())
9439 fSharedVtableChunk = TRUE;
9442 if (fSharedVtableChunk)
9444 MethodDesc* pMD = GetMethodDescForSlotAddress(slotCode);
9445 #ifndef FEATURE_INTERPRETER
9446 // TBD: Make this take a "stable" debug arg, determining whether to make these assertions.
9447 _ASSERTE(pMD->HasStableEntryPoint());
9448 _ASSERTE(pMD->GetStableEntryPoint() == slotCode);
9449 #endif // FEATURE_INTERPRETER
9454 // IBC logging is not needed here - slots in ngen images are immutable.
9457 // Ensure on ARM that all target addresses are marked as thumb code.
9458 _ASSERTE(IsThumbCode(slotCode));
9461 *GetSlotPtrRaw(slotNumber) = slotCode;
9464 //==========================================================================================
9465 BOOL MethodTable::HasExplicitOrImplicitPublicDefaultConstructor()
9476 // valuetypes have public default ctors implicitly
9480 if (!HasDefaultConstructor())
9485 MethodDesc * pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot());
9486 return pCanonMD != NULL && pCanonMD->IsPublic();
9489 //==========================================================================================
9490 MethodDesc *MethodTable::GetDefaultConstructor()
9492 WRAPPER_NO_CONTRACT;
9493 _ASSERTE(HasDefaultConstructor());
9494 MethodDesc *pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot());
9495 // The default constructor for a value type is an instantiating stub.
9496 // The easiest way to find the right stub is to use the following function,
9497 // which in the simple case of the default constructor for a class simply
9498 // returns pCanonMD immediately.
9499 return MethodDesc::FindOrCreateAssociatedMethodDesc(pCanonMD,
9501 FALSE /* no BoxedEntryPointStub */,
9502 Instantiation(), /* no method instantiation */
9503 FALSE /* no allowInstParam */);
9506 //==========================================================================================
9507 // Finds the (non-unboxing) MethodDesc that implements the interface method pInterfaceMD.
9509 // Note our ability to resolve constraint methods is affected by the degree of code sharing we are
9510 // performing for generic code.
9513 // MethodDesc which can be used as unvirtualized call. Returns NULL if VSD has to be used.
9515 MethodTable::TryResolveConstraintMethodApprox(
9516 TypeHandle thInterfaceType,
9517 MethodDesc * pInterfaceMD,
9518 BOOL * pfForceUseRuntimeLookup) // = NULL
9525 // We can't resolve constraint calls effectively for reference types, and there's
9526 // not a lot of perf. benefit in doing it anyway.
9530 LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintmethodApprox: not a value type %s\n", GetDebugClassName()));
9534 // 1. Find the (possibly generic) method that would implement the
9535 // constraint if we were making a call on a boxed value type.
9537 MethodTable * pCanonMT = GetCanonicalMethodTable();
9539 MethodDesc * pGenInterfaceMD = pInterfaceMD->StripMethodInstantiation();
9540 MethodDesc * pMD = NULL;
9541 if (pGenInterfaceMD->IsInterface())
9543 // Sometimes (when compiling shared generic code)
9544 // we don't have enough exact type information at JIT time
9545 // even to decide whether we will be able to resolve to an unboxed entry point...
9546 // To cope with this case we always go via the helper function if there's any
9547 // chance of this happening by checking for all interfaces which might possibly
9548 // be compatible with the call (verification will have ensured that
9549 // at least one of them will be)
9551 // Enumerate all potential interface instantiations
9552 MethodTable::InterfaceMapIterator it = pCanonMT->IterateInterfaceMap();
9553 DWORD cPotentialMatchingInterfaces = 0;
9556 TypeHandle thPotentialInterfaceType(it.GetInterface());
9557 if (thPotentialInterfaceType.AsMethodTable()->GetCanonicalMethodTable() ==
9558 thInterfaceType.AsMethodTable()->GetCanonicalMethodTable())
9560 cPotentialMatchingInterfaces++;
9561 pMD = pCanonMT->GetMethodDescForInterfaceMethod(thPotentialInterfaceType, pGenInterfaceMD);
9563 // See code:#TryResolveConstraintMethodApprox_DoNotReturnParentMethod
9564 if ((pMD != NULL) && !pMD->GetMethodTable()->IsValueType())
9566 LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintMethodApprox: %s::%s not a value type method\n",
9567 pMD->m_pszDebugClassName, pMD->m_pszDebugMethodName));
9573 _ASSERTE_MSG((cPotentialMatchingInterfaces != 0),
9574 "At least one interface has to implement the method, otherwise there's a bug in JIT/verification.");
9576 if (cPotentialMatchingInterfaces > 1)
9577 { // We have more potentially matching interfaces
9578 MethodTable * pInterfaceMT = thInterfaceType.GetMethodTable();
9579 _ASSERTE(pInterfaceMT->HasInstantiation());
9581 BOOL fIsExactMethodResolved = FALSE;
9583 if (!pInterfaceMT->IsSharedByGenericInstantiations() &&
9584 !pInterfaceMT->IsGenericTypeDefinition() &&
9585 !this->IsSharedByGenericInstantiations() &&
9586 !this->IsGenericTypeDefinition())
9587 { // We have exact interface and type instantiations (no generic variables and __Canon used
9589 if (this->CanCastToInterface(pInterfaceMT))
9591 // We can resolve to exact method
9592 pMD = this->GetMethodDescForInterfaceMethod(pInterfaceMT, pInterfaceMD);
9593 _ASSERTE(pMD != NULL);
9594 fIsExactMethodResolved = TRUE;
9598 if (!fIsExactMethodResolved)
9599 { // We couldn't resolve the interface statically
9600 _ASSERTE(pfForceUseRuntimeLookup != NULL);
9601 // Notify the caller that it should use runtime lookup
9602 // Note that we can leave pMD incorrect, because we will use runtime lookup
9603 *pfForceUseRuntimeLookup = TRUE;
9608 // If we can resolve the interface exactly then do so (e.g. when doing the exact
9609 // lookup at runtime, or when not sharing generic code).
9610 if (pCanonMT->CanCastToInterface(thInterfaceType.GetMethodTable()))
9612 pMD = pCanonMT->GetMethodDescForInterfaceMethod(thInterfaceType, pGenInterfaceMD);
9615 LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintMethodApprox: failed to find method desc for interface method\n"));
9620 else if (pGenInterfaceMD->IsVirtual())
9622 if (pGenInterfaceMD->HasNonVtableSlot() && pGenInterfaceMD->GetMethodTable()->IsValueType())
9623 { // GetMethodDescForSlot would AV for this slot
9624 // We can get here for (invalid and unverifiable) IL:
9625 // constrained. int32
9626 // callvirt System.Int32::GetHashCode()
9627 pMD = pGenInterfaceMD;
9631 pMD = GetMethodDescForSlot(pGenInterfaceMD->GetSlot());
9636 // The pMD will be NULL if calling a non-virtual instance
9637 // methods on System.Object, i.e. when these are used as a constraint.
9642 { // Fall back to VSD
9646 //#TryResolveConstraintMethodApprox_DoNotReturnParentMethod
9647 // Only return a method if the value type itself declares the method,
9648 // otherwise we might get a method from Object or System.ValueType
9649 if (!pMD->GetMethodTable()->IsValueType())
9650 { // Fall back to VSD
9654 // We've resolved the method, ignoring its generic method arguments
9655 // If the method is a generic method then go and get the instantiated descriptor
9656 pMD = MethodDesc::FindOrCreateAssociatedMethodDesc(
9659 FALSE /* no BoxedEntryPointStub */ ,
9660 pInterfaceMD->GetMethodInstantiation(),
9661 FALSE /* no allowInstParam */ );
9663 // FindOrCreateAssociatedMethodDesc won't return an BoxedEntryPointStub.
9664 _ASSERTE(pMD != NULL);
9665 _ASSERTE(!pMD->IsUnboxingStub());
9668 } // MethodTable::TryResolveConstraintMethodApprox
9670 //==========================================================================================
9671 // Make best-case effort to obtain an image name for use in an error message.
9673 // This routine must expect to be called before the this object is fully loaded.
9674 // It can return an empty if the name isn't available or the object isn't initialized
9675 // enough to get a name, but it mustn't crash.
9676 //==========================================================================================
9677 LPCWSTR MethodTable::GetPathForErrorMessages()
9683 INJECT_FAULT(COMPlusThrowOM(););
9687 Module *pModule = GetModule();
9691 return pModule->GetPathForErrorMessages();
9700 bool MethodTable::ClassRequiresUnmanagedCodeCheck()
9702 LIMITED_METHOD_CONTRACT;
9709 BOOL MethodTable::Validate()
9711 LIMITED_METHOD_CONTRACT;
9713 ASSERT_AND_CHECK(SanityCheck());
9716 if (m_pWriteableData.IsNull())
9718 _ASSERTE(IsAsyncPinType());
9722 MethodTableWriteableData *pWriteableData = m_pWriteableData.GetValue();
9723 DWORD dwLastVerifiedGCCnt = pWriteableData->m_dwLastVerifedGCCnt;
9724 // Here we used to assert that (dwLastVerifiedGCCnt <= GCHeapUtilities::GetGCHeap()->GetGcCount()) but
9725 // this is no longer true because with background gc. Since the purpose of having
9726 // m_dwLastVerifedGCCnt is just to only verify the same method table once for each GC
9727 // I am getting rid of the assert.
9728 if (g_pConfig->FastGCStressLevel () > 1 && dwLastVerifiedGCCnt == GCHeapUtilities::GetGCHeap()->GetGcCount())
9734 if (!IsAsyncPinType())
9738 ASSERT_AND_CHECK(!"Detected use of a corrupted OBJECTREF. Possible GC hole.");
9742 else if (!IsCanonicalMethodTable())
9744 // Non-canonical method tables has to have non-empty instantiation
9745 if (GetInstantiation().IsEmpty())
9747 ASSERT_AND_CHECK(!"Detected use of a corrupted OBJECTREF. Possible GC hole.");
9752 // It is not a fatal error to fail the update the counter. We will run slower and retry next time,
9753 // but the system will function properly.
9754 if (EnsureWritablePagesNoThrow(pWriteableData, sizeof(MethodTableWriteableData)))
9755 pWriteableData->m_dwLastVerifedGCCnt = GCHeapUtilities::GetGCHeap()->GetGcCount();
9761 #endif // !DACCESS_COMPILE
9763 NOINLINE BYTE *MethodTable::GetLoaderAllocatorObjectForGC()
9765 WRAPPER_NO_CONTRACT;
9766 if (!Collectible() || ((PTR_AppDomain)GetLoaderModule()->GetDomain())->NoAccessToHandleTable())
9770 BYTE * retVal = *(BYTE**)GetLoaderAllocatorObjectHandle();
9774 #ifdef FEATURE_COMINTEROP
9775 //==========================================================================================
9776 BOOL MethodTable::IsWinRTRedirectedDelegate()
9778 LIMITED_METHOD_DAC_CONTRACT;
9785 return !!WinRTDelegateRedirector::ResolveRedirectedDelegate(this, nullptr);
9788 //==========================================================================================
9789 BOOL MethodTable::IsWinRTRedirectedInterface(TypeHandle::InteropKind interopKind)
9791 LIMITED_METHOD_CONTRACT;
9796 if (!HasRCWPerTypeData())
9798 // All redirected interfaces have per-type RCW data
9802 #ifdef DACCESS_COMPILE
9803 RCWPerTypeData *pData = NULL;
9804 #else // DACCESS_COMPILE
9805 // We want to keep this function LIMITED_METHOD_CONTRACT so we call GetRCWPerTypeData with
9806 // the non-throwing flag. pData can be NULL if it could not be allocated.
9807 RCWPerTypeData *pData = GetRCWPerTypeData(false);
9808 #endif // DACCESS_COMPILE
9810 DWORD dwFlags = (pData != NULL ? pData->m_dwFlags : 0);
9811 if ((dwFlags & RCWPerTypeData::InterfaceFlagsInited) == 0)
9813 dwFlags = RCWPerTypeData::InterfaceFlagsInited;
9815 if (WinRTInterfaceRedirector::ResolveRedirectedInterface(this, NULL))
9817 dwFlags |= RCWPerTypeData::IsRedirectedInterface;
9819 else if (HasSameTypeDefAs(MscorlibBinder::GetExistingClass(CLASS__ICOLLECTIONGENERIC)) ||
9820 HasSameTypeDefAs(MscorlibBinder::GetExistingClass(CLASS__IREADONLYCOLLECTIONGENERIC)) ||
9821 this == MscorlibBinder::GetExistingClass(CLASS__ICOLLECTION))
9823 dwFlags |= RCWPerTypeData::IsICollectionGeneric;
9828 FastInterlockOr(&pData->m_dwFlags, dwFlags);
9832 if ((dwFlags & RCWPerTypeData::IsRedirectedInterface) != 0)
9835 if (interopKind == TypeHandle::Interop_ManagedToNative)
9837 // ICollection<T> is redirected in the managed->WinRT direction (i.e. we have stubs
9838 // that implement ICollection<T> methods in terms of IVector/IMap), but it is not
9839 // treated specially in the WinRT->managed direction (we don't build a WinRT vtable
9840 // for a class that only implements ICollection<T>). IReadOnlyCollection<T> is
9841 // treated similarly.
9842 if ((dwFlags & RCWPerTypeData::IsICollectionGeneric) != 0)
9849 #endif // FEATURE_COMINTEROP
9851 #ifdef FEATURE_READYTORUN_COMPILER
9853 static BOOL ComputeIsLayoutFixedInCurrentVersionBubble(MethodTable * pMT)
9855 STANDARD_VM_CONTRACT;
9857 // Primitive types and enums have fixed layout
9858 if (pMT->IsTruePrimitive() || pMT->IsEnum())
9861 if (!pMT->GetModule()->IsInCurrentVersionBubble())
9863 if (!pMT->IsValueType())
9865 // Eventually, we may respect the non-versionable attribute for reference types too. For now, we are going
9866 // to play is safe and ignore it.
9870 // Valuetypes with non-versionable attribute are candidates for fixed layout. Reject the rest.
9871 if (pMT->GetModule()->GetMDImport()->GetCustomAttributeByName(pMT->GetCl(),
9872 NONVERSIONABLE_TYPE, NULL, NULL) != S_OK)
9878 // If the above condition passed, check that all instance fields have fixed layout as well. In particular,
9879 // it is important for generic types with non-versionable layout (e.g. Nullable<T>)
9880 ApproxFieldDescIterator fieldIterator(pMT, ApproxFieldDescIterator::INSTANCE_FIELDS);
9881 for (FieldDesc *pFD = fieldIterator.Next(); pFD != NULL; pFD = fieldIterator.Next())
9883 if (pFD->GetFieldType() != ELEMENT_TYPE_VALUETYPE)
9886 MethodTable * pFieldMT = pFD->GetApproxFieldTypeHandleThrowing().AsMethodTable();
9887 if (!pFieldMT->IsLayoutFixedInCurrentVersionBubble())
9895 // Is field layout in this type fixed within the current version bubble?
9896 // This check does not take the inheritance chain into account.
9898 BOOL MethodTable::IsLayoutFixedInCurrentVersionBubble()
9900 STANDARD_VM_CONTRACT;
9902 const MethodTableWriteableData * pWriteableData = GetWriteableData();
9903 if (!(pWriteableData->m_dwFlags & MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixedComputed))
9905 MethodTableWriteableData * pWriteableDataForWrite = GetWriteableDataForWrite();
9906 if (ComputeIsLayoutFixedInCurrentVersionBubble(this))
9907 *EnsureWritablePages(&pWriteableDataForWrite->m_dwFlags) |= MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixed;
9908 *EnsureWritablePages(&pWriteableDataForWrite->m_dwFlags) |= MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixedComputed;
9911 return (pWriteableData->m_dwFlags & MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixed) != 0;
9915 // Is field layout of the inheritance chain fixed within the current version bubble?
9917 BOOL MethodTable::IsInheritanceChainLayoutFixedInCurrentVersionBubble()
9919 STANDARD_VM_CONTRACT;
9921 // This method is not expected to be called for value types
9922 _ASSERTE(!IsValueType());
9924 MethodTable * pMT = this;
9926 while ((pMT != g_pObjectClass) && (pMT != NULL))
9928 if (!pMT->IsLayoutFixedInCurrentVersionBubble())
9931 pMT = pMT->GetParentMethodTable();
9936 #endif // FEATURE_READYTORUN_COMPILER