Description: This change replaces the hashtable of types that we dynamically build at module load time with a statically built hashtable at crossgen time, for R2R modules. Typedef tokens and exported type tokens are stored in that hashtable, hashed by their full type name.
Changes include:
- Adding support for NativeFormat hashtables (NativeFormat hashtable reader and writer + supporting functionalities)
- Changes to prevent the allocation and building of m_pAvailableClasses and m_pAvailableClassesCaseIns for a R2R module
- Logic in crossgen to build the hashtable of types, using the NativeFormat (table saved to a new section)
- Refactoring the type lookup functions to return results in a HashedTypeEntry data structure, which supports token based results, as well as EEClassHashEntry_t* based results
- Changes/Cleanup to the ClassLoader::GetClassValue to support the new lookup type
- Fixed small bugs in the std implementation
- Fallback to old hashtable lookup algorithm supported for:
1) Case insensitive type loading
2) R2R image built with older version of crossgen (not having static hashtable)
Tested following scenarios:
- Token based lookups
- Type name based lookups (Type.GetType API), including case insensitive scenarios
- Nested types
- Forwarded types (and forwarded nested types), up to 2 levels of forwarding
- Old R2R image created with previous version of crossgen
template <class iter, class comp>
iter qsort_partition( iter first, iter last, iter pivot, comp compare )
{
- swap(pivot, last - 1);
+ iter lastMinusOne = last - 1;
+ swap(pivot, lastMinusOne);
+
// Pivot is at end
pivot = last - 1;
namespace std
{
//-----------------------------------------------------------------------------------------
+ // TEMPLATE FUNCTION move
+ template<class T> inline
+ typename remove_reference<T>::type&&
+ move(T&& arg)
+ { // forward _Arg as movable
+ return ((typename remove_reference<T>::type&&)arg);
+ }
+
+ //-----------------------------------------------------------------------------------------
// TEMPLATE FUNCTION swap (from <algorithm>)
template<class T> inline
void swap(T& left, T& right)
{ // exchange values stored at left and right
- T tmp = move(left);
- left = move(right);
- right = move(tmp);
+ T tmp = std::move(left);
+ left = std::move(right);
+ right = std::move(tmp);
}
//-----------------------------------------------------------------------------------------
{ // forward _Arg, given explicitly specified type parameter
return ((T&&)_Arg);
}
-
- //-----------------------------------------------------------------------------------------
- // TEMPLATE FUNCTION move
- template<class T> inline
- typename remove_reference<T>::type&&
- move(T&& arg)
- { // forward _Arg as movable
- return ((typename remove_reference<T>::type&&)arg);
- }
}
namespace std
return myiter.m_ptr - right.m_ptr;
}
- _MyIter operator+ (int n)
+ _MyIter operator+ (ptrdiff_t n)
{
_MyIter myiter(m_ptr);
myiter.m_ptr += n;
READYTORUN_SECTION_EXCEPTION_INFO = 104,
READYTORUN_SECTION_DEBUG_INFO = 105,
READYTORUN_SECTION_DELAYLOAD_METHODCALL_THUNKS = 106,
+ READYTORUN_SECTION_AVAILABLE_TYPES = 107,
};
//
CacheManifestFiles();
- CacheManifestExportedTypes(pamTracker);
+ if (!m_pManifest->IsReadyToRun())
+ CacheManifestExportedTypes(pamTracker);
#if !defined(FEATURE_CORECLR) && !defined(CROSSGEN_COMPILE)
GenerateBreadcrumbForServicing();
}
CONTRACTL_END;
- if (!module->IsPersistedObject(module->m_pAvailableClasses)) {
- if (!(module->IsResource()))
- // ! We intentionally do not take the AvailableClass lock here. It creates problems at
- // startup and we haven't yet published the module yet so nobody should be searching it.
- m_pClassLoader->PopulateAvailableClassHashTable(module,
- pamTracker);
+ if (module->m_pAvailableClasses != NULL && !module->IsPersistedObject(module->m_pAvailableClasses))
+ {
+ // ! We intentionally do not take the AvailableClass lock here. It creates problems at
+ // startup and we haven't yet published the module yet so nobody should be searching it.
+ m_pClassLoader->PopulateAvailableClassHashTable(module, pamTracker);
}
#define COR_VTABLE_NOT_PTRSIZED COR_VTABLE_64BIT
#endif // !_WIN64
-// Hash table parameter of available classes (name -> module/class) hash
-#define AVAILABLE_CLASSES_HASH_BUCKETS 1024
-#define AVAILABLE_CLASSES_HASH_BUCKETS_COLLECTIBLE 128
-#define PARAMTYPES_HASH_BUCKETS 23
-#define PARAMMETHODS_HASH_BUCKETS 11
-#define METHOD_STUBS_HASH_BUCKETS 11
-
-#define GUID_TO_TYPE_HASH_BUCKETS 16
-
#define CEE_FILE_GEN_GROWTH_COLLECTIBLE 2048
#define NGEN_STATICS_ALLCLASSES_WERE_LOADED -1
m_dwTransientFlags &= ~((DWORD)CLASSES_FREED); // Set flag indicating LookupMaps are now in a consistent and destructable state
+#ifdef FEATURE_READYTORUN
+ if (!HasNativeImage() && !IsResource())
+ m_pReadyToRunInfo = ReadyToRunInfo::Initialize(this, pamTracker);
+#endif
+
// Initialize the instance fields that we need for all non-Resource Modules
if (!IsResource())
{
- if (m_pAvailableClasses == NULL)
+ if (m_pAvailableClasses == NULL && !IsReadyToRun())
{
m_pAvailableClasses = EEClassHashTable::Create(this,
GetAssembly()->IsCollectible() ? AVAILABLE_CLASSES_HASH_BUCKETS_COLLECTIBLE : AVAILABLE_CLASSES_HASH_BUCKETS,
// Set up native image
if (HasNativeImage())
InitializeNativeImage(pamTracker);
-#ifdef FEATURE_READYTORUN
- else
- if (!IsResource())
- m_pReadyToRunInfo = ReadyToRunInfo::Initialize(this, pamTracker);
-#endif
#endif // FEATURE_PREJIT
class TrackingMap;
class PersistentInlineTrackingMap;
+// Hash table parameter of available classes (name -> module/class) hash
+#define AVAILABLE_CLASSES_HASH_BUCKETS 1024
+#define AVAILABLE_CLASSES_HASH_BUCKETS_COLLECTIBLE 128
+#define PARAMTYPES_HASH_BUCKETS 23
+#define PARAMMETHODS_HASH_BUCKETS 11
+#define METHOD_STUBS_HASH_BUCKETS 11
+#define GUID_TO_TYPE_HASH_BUCKETS 16
+
typedef DPTR(PersistentInlineTrackingMap) PTR_PersistentInlineTrackingMap;
&tkType,
&pTypeModule,
&tkExportedType,
- nullptr, // ppClassHashEntry
+ nullptr, // pFoundEntry
nullptr, // pLookInThisModuleOnly
Loader::DontLoad))
{
if (pName->GetTypeModule()) {
if (TypeFromToken(pName->GetTypeToken()) == mdtBaseType)
{
- if (pName->GetBucket())
+ if (!pName->GetBucket().IsNull())
return TRUE;
return FALSE;
}
return FALSE;
}
-EEClassHashEntry_t *ClassLoader::GetClassValue(NameHandleTable nhTable,
- NameHandle *pName,
- HashDatum *pData,
- EEClassHashTable **ppTable,
- Module* pLookInThisModuleOnly)
+void ClassLoader::GetClassValue(NameHandleTable nhTable,
+ NameHandle *pName,
+ HashDatum *pData,
+ EEClassHashTable **ppTable,
+ Module* pLookInThisModuleOnly,
+ HashedTypeEntry* pFoundEntry,
+ Loader::LoadFlag loadFlag,
+ BOOL& needsToBuildHashtable)
{
CONTRACTL
{
mdToken mdEncloser;
EEClassHashEntry_t *pBucket = NULL;
+ needsToBuildHashtable = FALSE;
+
#if _DEBUG
if (pName->GetName()) {
if (pName->GetNameSpace() == NULL)
}
#endif
- if (IsNested(pName, &mdEncloser))
+ BOOL isNested = IsNested(pName, &mdEncloser);
+
+ PTR_Assembly assembly = GetAssembly();
+ PREFIX_ASSUME(assembly != NULL);
+ ModuleIterator i = assembly->IterateModules();
+
+ while (i.Next())
{
- Module *pModule = pName->GetTypeModule();
- PREFIX_ASSUME(pModule != NULL);
- PTR_Assembly assembly=GetAssembly();
- PREFIX_ASSUME(assembly!=NULL);
- ModuleIterator i = assembly->IterateModules();
- Module *pClsModule = NULL;
+ Module * pCurrentClsModule = i.GetModule();
+ PREFIX_ASSUME(pCurrentClsModule != NULL);
- while (i.Next()) {
- pClsModule = i.GetModule();
- if (pClsModule->IsResource())
- continue;
- if (pLookInThisModuleOnly && (pClsModule != pLookInThisModuleOnly))
- continue;
+ if (pCurrentClsModule->IsResource())
+ continue;
+ if (pLookInThisModuleOnly && (pCurrentClsModule != pLookInThisModuleOnly))
+ continue;
+
+ if (nhTable == nhCaseSensitive && pCurrentClsModule->IsReadyToRun() && pCurrentClsModule->GetReadyToRunInfo()->HasHashtableOfTypes())
+ {
+ // For R2R modules, we only search the hashtable of token types stored in the module's image, and don't fallback
+ // to searching m_pAvailableClasses or m_pAvailableClassesCaseIns (in fact, we don't even allocate them for R2R modules).
+ // Also note that type lookups in R2R modules only support case sensitive lookups.
+
+#ifdef FEATURE_READYTORUN
+ mdToken mdFoundTypeToken;
+ if (pCurrentClsModule->GetReadyToRunInfo()->TryLookupTypeTokenFromName(pName, &mdFoundTypeToken))
+ {
+ if (TypeFromToken(mdFoundTypeToken) == mdtExportedType)
+ {
+ mdToken mdUnused;
+ Module * pTargetModule = GetAssembly()->FindModuleByExportedType(mdFoundTypeToken, loadFlag, mdTypeDefNil, &mdUnused);
+
+ pFoundEntry->SetTokenBasedEntryValue(mdFoundTypeToken, pTargetModule);
+ }
+ else
+ {
+ pFoundEntry->SetTokenBasedEntryValue(mdFoundTypeToken, pCurrentClsModule);
+ }
+ return; // Return on the first success
+ }
+#endif
+ }
+ else
+ {
EEClassHashTable* pTable = NULL;
if (nhTable == nhCaseSensitive)
{
- *ppTable = pTable = pClsModule->GetAvailableClassHash();
-
+ *ppTable = pTable = pCurrentClsModule->GetAvailableClassHash();
+
+ if (pTable == NULL && pCurrentClsModule->IsReadyToRun() && !pCurrentClsModule->GetReadyToRunInfo()->HasHashtableOfTypes())
+ {
+ // Old R2R image generated without the hashtable of types.
+ // We fallback to the slow path of creating the hashtable dynamically
+ // at execution time in that scenario. The caller will handle
+ pFoundEntry->SetClassHashBasedEntryValue(NULL);
+ needsToBuildHashtable = TRUE;
+ return;
+ }
}
- else {
+ else
+ {
// currently we expect only these two kinds--for DAC builds, nhTable will be nhCaseSensitive
_ASSERTE(nhTable == nhCaseInsensitive);
- *ppTable = pTable = pClsModule->GetAvailableClassCaseInsHash();
+ *ppTable = pTable = pCurrentClsModule->GetAvailableClassCaseInsHash();
- if (pTable == NULL) {
+ if (pTable == NULL)
+ {
// We have not built the table yet - the caller will handle
- return NULL;
+ pFoundEntry->SetClassHashBasedEntryValue(NULL);
+ needsToBuildHashtable = TRUE;
+ return;
}
}
-
_ASSERTE(pTable);
- EEClassHashTable::LookupContext sContext;
- if ((pBucket = pTable->GetValue(pName, pData, TRUE, &sContext)) != NULL) {
- switch (TypeFromToken(pName->GetTypeToken())) {
- case mdtTypeDef:
- while ((!CompareNestedEntryWithTypeDef(pModule->GetMDImport(),
- mdEncloser,
- pClsModule->GetAvailableClassHash(),
- pBucket->GetEncloser())) &&
- (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
- break;
- case mdtTypeRef:
- while ((!CompareNestedEntryWithTypeRef(pModule->GetMDImport(),
- mdEncloser,
- pClsModule->GetAvailableClassHash(),
- pBucket->GetEncloser())) &&
- (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
- break;
- case mdtExportedType:
- while ((!CompareNestedEntryWithExportedType(pModule->GetAssembly()->GetManifestImport(),
- mdEncloser,
- pClsModule->GetAvailableClassHash(),
- pBucket->GetEncloser())) &&
- (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
- break;
- default:
- while ((pBucket->GetEncloser() != pName->GetBucket()) &&
- (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
+ if (isNested)
+ {
+ Module *pNameModule = pName->GetTypeModule();
+ PREFIX_ASSUME(pNameModule != NULL);
+
+ EEClassHashTable::LookupContext sContext;
+ if ((pBucket = pTable->GetValue(pName, pData, TRUE, &sContext)) != NULL)
+ {
+ switch (TypeFromToken(pName->GetTypeToken()))
+ {
+ case mdtTypeDef:
+ while ((!CompareNestedEntryWithTypeDef(pNameModule->GetMDImport(),
+ mdEncloser,
+ pCurrentClsModule->GetAvailableClassHash(),
+ pBucket->GetEncloser())) &&
+ (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
+ break;
+ case mdtTypeRef:
+ while ((!CompareNestedEntryWithTypeRef(pNameModule->GetMDImport(),
+ mdEncloser,
+ pCurrentClsModule->GetAvailableClassHash(),
+ pBucket->GetEncloser())) &&
+ (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
+ break;
+ case mdtExportedType:
+ while ((!CompareNestedEntryWithExportedType(pNameModule->GetAssembly()->GetManifestImport(),
+ mdEncloser,
+ pCurrentClsModule->GetAvailableClassHash(),
+ pBucket->GetEncloser())) &&
+ (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
+ break;
+ default:
+ while ((pBucket->GetEncloser() != pName->GetBucket().GetClassHashBasedEntryValue()) &&
+ (pBucket = pTable->FindNextNestedClass(pName, pData, &sContext)) != NULL);
+ }
}
}
- if (pBucket) // break on the first success
- break;
- }
- }
- else {
- // Check if this non-nested class is in the table of available classes.
- ModuleIterator i = GetAssembly()->IterateModules();
- Module *pModule = NULL;
-
- while (i.Next()) {
- pModule = i.GetModule();
- // i.Next will not return TRUE unless i.GetModule will return non-NULL.
- PREFIX_ASSUME(pModule != NULL);
- if (pModule->IsResource())
- continue;
- if (pLookInThisModuleOnly && (pModule != pLookInThisModuleOnly))
- continue;
-
- PREFIX_ASSUME(pModule!=NULL);
- EEClassHashTable* pTable = NULL;
- if (nhTable == nhCaseSensitive)
- *ppTable = pTable = pModule->GetAvailableClassHash();
- else {
- // currently we support only these two types
- _ASSERTE(nhTable == nhCaseInsensitive);
- *ppTable = pTable = pModule->GetAvailableClassCaseInsHash();
-
- // We have not built the table yet - the caller will handle
- if (pTable == NULL)
- return NULL;
+ else
+ {
+ pBucket = pTable->GetValue(pName, pData, FALSE, NULL);
}
- _ASSERTE(pTable);
- pBucket = pTable->GetValue(pName, pData, FALSE, NULL);
- if (pBucket) // break on the first success
- break;
+ if (pBucket) // Return on the first success
+ {
+ pFoundEntry->SetClassHashBasedEntryValue(pBucket);
+ return;
+ }
}
}
- return pBucket;
+ // No results found: default to a NULL EEClassHashEntry_t result
+ pFoundEntry->SetClassHashBasedEntryValue(NULL);
}
#ifndef DACCESS_COMPILE
}
+void ClassLoader::LazyPopulateCaseSensitiveHashTables()
+{
+ CONTRACTL
+ {
+ INSTANCE_CHECK;
+ THROWS;
+ GC_TRIGGERS;
+ MODE_ANY;
+ INJECT_FAULT(COMPlusThrowOM());
+ }
+ CONTRACTL_END;
+
+ AllocMemTracker amTracker;
+ ModuleIterator i = GetAssembly()->IterateModules();
+
+ // Create a case-sensitive hashtable for each module, and fill it with the module's typedef entries
+ while (i.Next())
+ {
+ Module *pModule = i.GetModule();
+ PREFIX_ASSUME(pModule != NULL);
+ if (pModule->IsResource())
+ continue;
+
+ // Lazy construction of the case-sensitive hashtable of types is *only* a scenario for ReadyToRun images
+ // (either images compiled with an old version of crossgen, or for case-insensitive type lookups in R2R modules)
+ _ASSERT(pModule->IsReadyToRun());
+
+ EEClassHashTable * pNewClassHash = EEClassHashTable::Create(pModule, AVAILABLE_CLASSES_HASH_BUCKETS, FALSE /* bCaseInsensitive */, &amTracker);
+ pModule->SetAvailableClassHash(pNewClassHash);
+
+ PopulateAvailableClassHashTable(pModule, &amTracker);
+ }
+
+ // Add exported types of the manifest module to the hashtable
+ if (!GetAssembly()->GetManifestModule()->IsResource())
+ {
+ IMDInternalImport * pManifestImport = GetAssembly()->GetManifestImport();
+ HENUMInternalHolder phEnum(pManifestImport);
+ phEnum.EnumInit(mdtExportedType, mdTokenNil);
+
+ mdToken mdExportedType;
+ while (pManifestImport->EnumNext(&phEnum, &mdExportedType))
+ AddExportedTypeHaveLock(GetAssembly()->GetManifestModule(), mdExportedType, &amTracker);
+ }
+
+ amTracker.SuppressRelease();
+}
+
void ClassLoader::LazyPopulateCaseInsensitiveHashTables()
{
CONTRACTL
}
CONTRACTL_END;
+ if (!GetAssembly()->GetManifestModule()->IsResource() && GetAssembly()->GetManifestModule()->GetAvailableClassHash() == NULL)
+ {
+ // This is a R2R assembly, and a case insensitive type lookup was triggered.
+ // Construct the case-sensitive table first, since the case-insensitive table
+ // create piggy-backs on the first.
+ LazyPopulateCaseSensitiveHashTables();
+ }
// Add any unhashed modules into our hash tables, and try again.
-
+
+ AllocMemTracker amTracker;
ModuleIterator i = GetAssembly()->IterateModules();
- while (i.Next()) {
+ while (i.Next())
+ {
Module *pModule = i.GetModule();
- PREFIX_ASSUME(pModule!=NULL);
if (pModule->IsResource())
continue;
- if (pModule->GetAvailableClassCaseInsHash() == NULL) {
- AllocMemTracker amTracker;
+ if (pModule->GetAvailableClassCaseInsHash() == NULL)
+ {
EEClassHashTable *pNewClassCaseInsHash = pModule->GetAvailableClassHash()->MakeCaseInsensitiveTable(pModule, &amTracker);
LOG((LF_CLASSLOADER, LL_INFO10, "%s's classes being added to case insensitive hash table\n",
// Module/typedef stuff and give you the actual TypeHandle.
//
//
-BOOL
-ClassLoader::FindClassModuleThrowing(
+BOOL ClassLoader::FindClassModuleThrowing(
const NameHandle * pOriginalName,
TypeHandle * pType,
mdToken * pmdClassToken,
Module ** ppModule,
mdToken * pmdFoundExportedType,
- EEClassHashEntry_t ** ppEntry,
+ HashedTypeEntry * pFoundEntry,
Module * pLookInThisModuleOnly,
Loader::LoadFlag loadFlag)
{
HashDatum Data;
EEClassHashTable * pTable = NULL;
- EEClassHashEntry_t * pBucket = GetClassValue(
- nhTable,
- pName,
- &Data,
- &pTable,
- pLookInThisModuleOnly);
+ HashedTypeEntry foundEntry;
+ BOOL needsToBuildHashtable;
+ GetClassValue(nhTable, pName, &Data, &pTable, pLookInThisModuleOnly, &foundEntry, loadFlag, needsToBuildHashtable);
+
+ // In the case of R2R modules, the search is only performed in the hashtable saved in the
+ // R2R image, and this is why we return (whether we found a valid typedef token or not).
+ // Note: case insensitive searches are not used/supported in R2R images.
+ if (foundEntry.GetEntryType() == HashedTypeEntry::EntryType::IsHashedTokenEntry)
+ {
+ *pType = TypeHandle();
+ HashedTypeEntry::TokenTypeEntry tokenAndModulePair = foundEntry.GetTokenBasedEntryValue();
+ switch (TypeFromToken(tokenAndModulePair.m_TypeToken))
+ {
+ case mdtTypeDef:
+ *pmdClassToken = tokenAndModulePair.m_TypeToken;
+ *pmdFoundExportedType = mdTokenNil;
+ break;
+ case mdtExportedType:
+ *pmdClassToken = mdTokenNil;
+ *pmdFoundExportedType = tokenAndModulePair.m_TypeToken;
+ break;
+ default:
+ _ASSERT(false);
+ return FALSE;
+ }
+ *ppModule = tokenAndModulePair.m_pModule;
+ if (pFoundEntry != NULL)
+ *pFoundEntry = foundEntry;
- if (pBucket == NULL)
+ return TRUE;
+ }
+
+ EEClassHashEntry_t * pBucket = foundEntry.GetClassHashBasedEntryValue();
+
+ if (pBucket == NULL && needsToBuildHashtable)
{
- if (nhTable == nhCaseInsensitive)
- {
- AvailableClasses_LockHolder lh(this);
+ AvailableClasses_LockHolder lh(this);
- // Try again with the lock. This will protect against another thread reallocating
- // the hash table underneath us
- pBucket = GetClassValue(
- nhTable,
- pName,
- &Data,
- &pTable,
- pLookInThisModuleOnly);
+ // Try again with the lock. This will protect against another thread reallocating
+ // the hash table underneath us
+ GetClassValue(nhTable, pName, &Data, &pTable, pLookInThisModuleOnly, &foundEntry, loadFlag, needsToBuildHashtable);
+ pBucket = foundEntry.GetClassHashBasedEntryValue();
#ifndef DACCESS_COMPILE
- if ((pBucket == NULL) && (m_cUnhashedModules > 0))
+ if ((pBucket == NULL) && (m_cUnhashedModules > 0))
+ {
+ _ASSERT(needsToBuildHashtable);
+
+ if (nhTable == nhCaseInsensitive)
{
LazyPopulateCaseInsensitiveHashTables();
-
- // Try yet again with the new classes added
- pBucket = GetClassValue(
- nhTable,
- pName,
- &Data,
- &pTable,
- pLookInThisModuleOnly);
}
-#endif
+ else
+ {
+ // Note: This codepath is only valid for R2R scenarios
+ LazyPopulateCaseSensitiveHashTables();
+ }
+
+ // Try yet again with the new classes added
+ GetClassValue(nhTable, pName, &Data, &pTable, pLookInThisModuleOnly, &foundEntry, loadFlag, needsToBuildHashtable);
+ pBucket = foundEntry.GetClassHashBasedEntryValue();
+ _ASSERT(!needsToBuildHashtable);
}
+#endif
}
if (pBucket == NULL)
_ASSERTE(!t.IsNull());
*pType = t;
- if (ppEntry != NULL)
+ if (pFoundEntry != NULL)
{
- *ppEntry = pBucket;
+ pFoundEntry->SetClassHashBasedEntryValue(pBucket);
}
return TRUE;
}
}
*pType = TypeHandle();
- if (ppEntry != NULL)
+ if (pFoundEntry != NULL)
{
- *ppEntry = pBucket;
+ pFoundEntry->SetClassHashBasedEntryValue(pBucket);
}
return TRUE;
} // ClassLoader::FindClassModuleThrowing
Module * pFoundModule = NULL;
mdToken FoundCl;
- EEClassHashEntry_t * pEntry = NULL;
+ HashedTypeEntry foundEntry;
mdExportedType FoundExportedType = mdTokenNil;
UINT32 cLoopIterations = 0;
&FoundCl,
&pFoundModule,
&FoundExportedType,
- &pEntry,
+ &foundEntry,
pLookInThisModuleOnly,
pName->OKToLoad() ? Loader::Load
: Loader::DontLoad))
{ // Didn't find anything, no point looping indefinitely
break;
}
- _ASSERTE(pEntry != NULL);
+ _ASSERTE(!foundEntry.IsNull());
if (pName->GetTypeToken() == mdtBaseType)
{ // We should return the found bucket in the pName
- pName->SetBucket(pEntry);
+ pName->SetBucket(foundEntry);
}
if (!typeHnd.IsNull())
else
{ //#LoadTypeHandle_TypeForwarded
// pName is a host instance so it's okay to set fields in it in a DAC build
- EEClassHashEntry_t * pBucket = pName->GetBucket();
+ HashedTypeEntry& bucket = pName->GetBucket();
- if (pBucket != NULL)
- { // Reset pName's bucket entry
-
+ // Reset pName's bucket entry
+ if (bucket.GetEntryType() == HashedTypeEntry::IsHashedClassEntry && bucket.GetClassHashBasedEntryValue()->GetEncloser())
+ {
// We will be searching for the type name again, so set the nesting/context type to the
// encloser of just found type
- pName->SetBucket(pBucket->GetEncloser());
+ pName->SetBucket(HashedTypeEntry().SetClassHashBasedEntryValue(bucket.GetClassHashBasedEntryValue()->GetEncloser()));
+ }
+ else
+ {
+ pName->SetBucket(HashedTypeEntry());
}
// Update the class loader for the new module/token pair.
// Replace AvailableClasses Module entry with found TypeHandle
if (!typeHnd.IsNull() &&
typeHnd.IsRestored() &&
- (pEntry != NULL) &&
- (pEntry->GetData() != typeHnd.AsPtr()))
+ foundEntry.GetEntryType() == HashedTypeEntry::EntryType::IsHashedClassEntry &&
+ (foundEntry.GetClassHashBasedEntryValue() != NULL) &&
+ (foundEntry.GetClassHashBasedEntryValue()->GetData() != typeHnd.AsPtr()))
{
- pEntry->SetData(typeHnd.AsPtr());
+ foundEntry.GetClassHashBasedEntryValue()->SetData(typeHnd.AsPtr());
}
#endif // !DACCESS_COMPILE
}
nhCaseInsensitive = 1
} NameHandleTable;
+class HashedTypeEntry
+{
+public:
+ typedef enum
+ {
+ IsNullEntry, // Uninitialized HashedTypeEntry
+ IsHashedTokenEntry, // Entry is a token value in a R2R hashtable in from the R2R module
+ IsHashedClassEntry // Entry is a EEClassHashEntry_t from the hashtable constructed at
+ // module load time (or from the hashtable loaded from the native image)
+ } EntryType;
+
+ typedef struct
+ {
+ mdToken m_TypeToken;
+ Module * m_pModule;
+ } TokenTypeEntry;
+
+private:
+ EntryType m_EntryType;
+ union
+ {
+ PTR_EEClassHashEntry m_pClassHashEntry;
+ TokenTypeEntry m_TokenAndModulePair;
+ };
+
+public:
+ HashedTypeEntry()
+ {
+ m_EntryType = EntryType::IsNullEntry;
+ m_pClassHashEntry = PTR_NULL;
+ }
+
+ EntryType GetEntryType() { return m_EntryType; }
+ bool IsNull() { return m_EntryType == EntryType::IsNullEntry; }
+
+ const HashedTypeEntry& SetClassHashBasedEntryValue(EEClassHashEntry_t * pClassHashEntry)
+ {
+ m_EntryType = EntryType::IsHashedClassEntry;
+ m_pClassHashEntry = dac_cast<PTR_EEClassHashEntry>(pClassHashEntry);
+ return *this;
+ }
+ EEClassHashEntry_t * GetClassHashBasedEntryValue()
+ {
+ _ASSERT(m_EntryType == EntryType::IsHashedClassEntry);
+ return m_pClassHashEntry;
+ }
+
+ const HashedTypeEntry& SetTokenBasedEntryValue(mdTypeDef typeToken, Module * pModule)
+ {
+ m_EntryType = EntryType::IsHashedTokenEntry;
+ m_TokenAndModulePair.m_TypeToken = typeToken;
+ m_TokenAndModulePair.m_pModule = pModule;
+ return *this;
+ }
+ const TokenTypeEntry& GetTokenBasedEntryValue()
+ {
+ _ASSERT(m_EntryType == EntryType::IsHashedTokenEntry);
+ return m_TokenAndModulePair;
+ }
+};
class NameHandle
{
mdToken m_mdType;
mdToken m_mdTokenNotToLoad;
NameHandleTable m_WhichTable;
- PTR_EEClassHashEntry m_pBucket;
+ HashedTypeEntry m_Bucket;
public:
m_mdType(mdTokenNil),
m_mdTokenNotToLoad(tdNoTypes),
m_WhichTable(nhCaseSensitive),
- m_pBucket(PTR_NULL)
+ m_Bucket()
{
LIMITED_METHOD_CONTRACT;
}
m_mdType(mdTokenNil),
m_mdTokenNotToLoad(tdNoTypes),
m_WhichTable(nhCaseSensitive),
- m_pBucket(PTR_NULL)
+ m_Bucket()
{
LIMITED_METHOD_CONTRACT;
SUPPORTS_DAC;
m_mdType(token),
m_mdTokenNotToLoad(tdNoTypes),
m_WhichTable(nhCaseSensitive),
- m_pBucket(PTR_NULL)
+ m_Bucket()
{
LIMITED_METHOD_CONTRACT;
SUPPORTS_DAC;
m_mdType = p.m_mdType;
m_mdTokenNotToLoad = p.m_mdTokenNotToLoad;
m_WhichTable = p.m_WhichTable;
- m_pBucket = p.m_pBucket;
+ m_Bucket = p.m_Bucket;
}
void SetName(LPCUTF8 pName)
return m_WhichTable;
}
- void SetBucket(EEClassHashEntry_t * pBucket)
+ void SetBucket(const HashedTypeEntry& bucket)
{
LIMITED_METHOD_CONTRACT;
SUPPORTS_DAC; // "this" must be a host address
- m_pBucket = dac_cast<PTR_EEClassHashEntry>(pBucket);
+ m_Bucket = bucket;
}
- EEClassHashEntry_t * GetBucket()
+ HashedTypeEntry& GetBucket()
{
LIMITED_METHOD_CONTRACT;
SUPPORTS_DAC;
- return m_pBucket;
+ return m_Bucket;
}
static BOOL OKToLoad(mdToken token, mdToken tokenNotToLoad)
VOID PopulateAvailableClassHashTable(Module *pModule,
AllocMemTracker *pamTracker);
+ void LazyPopulateCaseSensitiveHashTables();
void LazyPopulateCaseInsensitiveHashTables();
// Lookup the hash table entry from the hash table
- EEClassHashEntry_t *GetClassValue(NameHandleTable nhTable,
+ void GetClassValue(NameHandleTable nhTable,
NameHandle *pName,
HashDatum *pData,
EEClassHashTable **ppTable,
- Module* pLookInThisModuleOnly);
+ Module* pLookInThisModuleOnly,
+ HashedTypeEntry* pFoundEntry,
+ Loader::LoadFlag loadFlag,
+ BOOL& needsToBuildHashtable);
public:
Instantiation classInst, // the type arguments to the type (if any)
Instantiation methodInst); // the type arguments to the method (if any)
- BOOL
- FindClassModuleThrowing(
+ BOOL FindClassModuleThrowing(
const NameHandle * pName,
TypeHandle * pType,
mdToken * pmdClassToken,
Module ** ppModule,
mdToken * pmdFoundExportedType,
- EEClassHashEntry_t ** ppEntry,
+ HashedTypeEntry * pEntry,
Module * pLookInThisModuleOnly,
Loader::LoadFlag loadFlag);
namespace NativeFormat
{
+ class NativeReader;
+ typedef DPTR(NativeReader) PTR_NativeReader;
+
class NativeReader
{
PTR_BYTE _base;
ThrowHR(COR_E_BADIMAGEFORMAT);
}
+ uint EnsureOffsetInRange(uint offset, uint lookAhead)
+ {
+ if ((int)offset < 0 || offset + lookAhead >= _size)
+ ThrowBadImageFormatException();
+ return offset;
+ }
+
byte ReadUInt8(uint offset)
{
if (offset >= _size)
return offset;
}
+
+ int DecodeSigned(uint offset, int * pValue)
+ {
+ if (offset >= _size)
+ ThrowBadImageFormatException();
+
+ int val = *(_base + offset);
+ if ((val & 1) == 0)
+ {
+ *pValue = val >> 1;
+ offset += 1;
+ }
+ else if ((val & 2) == 0)
+ {
+ if (offset + 1 >= _size)
+ ThrowBadImageFormatException();
+ *pValue = (val >> 2) |
+ (((int)*(_base + offset + 1)) << 6);
+ offset += 2;
+ }
+ else if ((val & 4) == 0)
+ {
+ if (offset + 2 >= _size)
+ ThrowBadImageFormatException();
+ *pValue = (val >> 3) |
+ (((int)*(_base + offset + 1)) << 5) |
+ (((int)*(_base + offset + 2)) << 13);
+ offset += 3;
+ }
+ else if ((val & 8) == 0)
+ {
+ if (offset + 3 >= _size)
+ ThrowBadImageFormatException();
+ *pValue = (val >> 4) |
+ (((int)*(_base + offset + 1)) << 4) |
+ (((int)*(_base + offset + 2)) << 12) |
+ (((int)*(_base + offset + 3)) << 20);
+ offset += 4;
+ }
+ else if ((val & 16) == 0)
+ {
+ *pValue = (int)ReadUInt32(offset + 1);
+ offset += 5;
+ }
+ else
+ {
+ ThrowBadImageFormatException();
+ }
+
+ return offset;
+ }
+
+ uint SkipInteger(uint offset)
+ {
+ EnsureOffsetInRange(offset, 0);
+
+ PTR_BYTE data = (_base + offset);
+ if ((*data & 1) == 0)
+ {
+ return offset + 1;
+ }
+ else if ((*data & 2) == 0)
+ {
+ return offset + 2;
+ }
+ else if ((*data & 4) == 0)
+ {
+ return offset + 3;
+ }
+ else if ((*data & 8) == 0)
+ {
+ return offset + 4;
+ }
+ else if ((*data & 16) == 0)
+ {
+ return offset + 5;
+ }
+ else if ((*data & 32) == 0)
+ {
+ return offset + 9;
+ }
+ else
+ {
+ ThrowBadImageFormatException();
+ return offset;
+ }
+ }
};
+ class NativeParser
+ {
+ PTR_NativeReader _pReader;
+ uint _offset;
+
+ public:
+ NativeParser()
+ : _pReader(PTR_NULL), _offset(0)
+ {
+ }
+
+ NativeParser(NativeReader * pReader, uint offset)
+ {
+ _pReader = dac_cast<PTR_NativeReader>(pReader);
+ _offset = offset;
+ }
+
+ NativeReader * GetNativeReader() { return _pReader; }
+
+ uint GetOffset() { return _offset; }
+ void SetOffset(uint value) { _offset = value; }
+
+ void ThrowBadImageFormatException()
+ {
+ _pReader->ThrowBadImageFormatException();
+ }
+
+ byte GetUInt8()
+ {
+ byte val = _pReader->ReadUInt8(_offset);
+ _offset += 1;
+ return val;
+ }
+
+ uint GetUnsigned()
+ {
+ uint value;
+ _offset = _pReader->DecodeUnsigned(_offset, &value);
+ return value;
+ }
+
+ int GetSigned()
+ {
+ int value;
+ _offset = _pReader->DecodeSigned(_offset, &value);
+ return value;
+ }
+
+ uint GetRelativeOffset()
+ {
+ uint pos = _offset;
+
+ int delta;
+ _offset = _pReader->DecodeSigned(_offset, &delta);
+
+ return pos + (uint)delta;
+ }
+
+ void SkipInteger()
+ {
+ _offset = _pReader->SkipInteger(_offset);
+ }
+
+ NativeParser GetParserFromRelativeOffset()
+ {
+ return NativeParser(_pReader, GetRelativeOffset());
+ }
+ };
+
class NativeArray
{
- NativeReader * _pReader;
+ PTR_NativeReader _pReader;
uint _baseOffset;
uint _nElements;
byte _entryIndexSize;
public:
NativeArray()
- : _pReader(NULL)
+ : _pReader(PTR_NULL), _nElements(0)
{
}
return true;
}
};
+
+ class NativeHashtable
+ {
+ PTR_NativeReader _pReader;
+ uint _baseOffset;
+ uint _bucketMask;
+ byte _entryIndexSize;
+
+ NativeParser GetParserForBucket(uint bucket, uint * pEndOffset)
+ {
+ uint start, end;
+
+ if (_entryIndexSize == 0)
+ {
+ uint bucketOffset = _baseOffset + bucket;
+ start = _pReader->ReadUInt8(bucketOffset);
+ end = _pReader->ReadUInt8(bucketOffset + 1);
+ }
+ else if (_entryIndexSize == 1)
+ {
+ uint bucketOffset = _baseOffset + 2 * bucket;
+ start = _pReader->ReadUInt16(bucketOffset);
+ end = _pReader->ReadUInt16(bucketOffset + 2);
+ }
+ else
+ {
+ uint bucketOffset = _baseOffset + 4 * bucket;
+ start = _pReader->ReadUInt32(bucketOffset);
+ end = _pReader->ReadUInt32(bucketOffset + 4);
+ }
+
+ *pEndOffset = end + _baseOffset;
+ return NativeParser(_pReader, _baseOffset + start);
+ }
+
+ public:
+ NativeHashtable() : _pReader(PTR_NULL), _baseOffset(0), _bucketMask(0), _entryIndexSize(0)
+ {
+ }
+
+ NativeHashtable(NativeParser& parser)
+ {
+ uint header = parser.GetUInt8();
+
+ _pReader = dac_cast<PTR_NativeReader>(parser.GetNativeReader());
+ _baseOffset = parser.GetOffset();
+
+ int numberOfBucketsShift = (int)(header >> 2);
+ if (numberOfBucketsShift > 31)
+ _pReader->ThrowBadImageFormatException();
+ _bucketMask = (uint)((1 << numberOfBucketsShift) - 1);
+
+ byte entryIndexSize = (byte)(header & 3);
+ if (entryIndexSize > 2)
+ _pReader->ThrowBadImageFormatException();
+ _entryIndexSize = entryIndexSize;
+ }
+
+ bool IsNull() { return _pReader == NULL; }
+
+ //
+ // The enumerator does not conform to the regular C# enumerator pattern to avoid paying
+ // its performance penalty (allocation, multiple calls per iteration)
+ //
+ class Enumerator
+ {
+ NativeParser _parser;
+ uint _endOffset;
+ byte _lowHashcode;
+
+ public:
+ Enumerator(NativeParser parser, uint endOffset, byte lowHashcode)
+ {
+ _parser = parser;
+ _endOffset = endOffset;
+ _lowHashcode = lowHashcode;
+ }
+
+ bool GetNext(NativeParser& entryParser)
+ {
+ while (_parser.GetOffset() < _endOffset)
+ {
+ byte lowHashcode = _parser.GetUInt8();
+
+ if (lowHashcode == _lowHashcode)
+ {
+ entryParser = _parser.GetParserFromRelativeOffset();
+ return true;
+ }
+
+ // The entries are sorted by hashcode within the bucket. It allows us to terminate the lookup prematurely.
+ if (lowHashcode > _lowHashcode)
+ {
+ _endOffset = _parser.GetOffset(); // Ensure that extra call to GetNext returns null parser again
+ break;
+ }
+
+ _parser.SkipInteger();
+ }
+
+ return false;
+ }
+ };
+
+ // The recommended code pattern to perform lookup is:
+ //
+ // NativeHashtable::Enumerator lookup = hashtable.Lookup(dwHashCode);
+ // NativeParser entryParser;
+ // while (lookup.GetNext(entryParser))
+ // {
+ // ... read entry using entryParser ...
+ // }
+ //
+ Enumerator Lookup(int hashcode)
+ {
+ uint endOffset;
+ uint bucket = ((uint)hashcode >> 8) & _bucketMask;
+ NativeParser parser = GetParserForBucket(bucket, &endOffset);
+
+ return Enumerator(parser, endOffset, (byte)hashcode);
+ }
+ };
}
return dac_cast<PTR_MethodDesc>(val);
}
+BOOL ReadyToRunInfo::HasHashtableOfTypes()
+{
+ CONTRACTL
+ {
+ GC_NOTRIGGER;
+ NOTHROW;
+ SO_TOLERANT;
+ SUPPORTS_DAC;
+ }
+ CONTRACTL_END;
+
+ return !m_availableTypesHashtable.IsNull();
+}
+
+BOOL ReadyToRunInfo::TryLookupTypeTokenFromName(NameHandle *pName, mdToken * pFoundTypeToken)
+{
+ CONTRACTL
+ {
+ GC_NOTRIGGER;
+ NOTHROW;
+ SO_TOLERANT;
+ SUPPORTS_DAC;
+ PRECONDITION(!m_availableTypesHashtable.IsNull());
+ }
+ CONTRACTL_END;
+
+ if (m_availableTypesHashtable.IsNull())
+ return FALSE;
+
+ LPCUTF8 pszName;
+ LPCUTF8 pszNameSpace;
+
+ //
+ // Compute the hashcode of the type (hashcode based on type name and namespace name)
+ //
+ DWORD dwHashCode = 0;
+ {
+ if (pName->GetTypeToken() == mdtBaseType)
+ {
+ // Name-based lookups (ex: Type.GetType()).
+
+ pszName = pName->GetName();
+ pszNameSpace = "";
+
+ if (pName->GetNameSpace() != NULL)
+ {
+ pszNameSpace = pName->GetNameSpace();
+ }
+ else
+ {
+ LPCUTF8 p;
+ CQuickBytes szNamespace;
+
+ if ((p = ns::FindSep(pszName)) != NULL)
+ {
+ SIZE_T d = p - pszName;
+
+ FAULT_NOT_FATAL();
+ pszNameSpace = szNamespace.SetStringNoThrow(pszName, d);
+
+ if (pszNameSpace == NULL)
+ return FALSE;
+
+ pszName = (p + 1);
+ }
+ }
+
+ _ASSERT(pszNameSpace != NULL);
+ dwHashCode = ((dwHashCode << 5) + dwHashCode) ^ HashStringA(pszName);
+ dwHashCode = ((dwHashCode << 5) + dwHashCode) ^ HashStringA(pszNameSpace);
+
+ // Bucket is not 'null' for a nested type, and it will have information about the nested type's encloser
+ if (!pName->GetBucket().IsNull())
+ {
+ // Must be a token based bucket that we found earlier in the R2R types hashtable
+ _ASSERT(pName->GetBucket().GetEntryType() == HashedTypeEntry::IsHashedTokenEntry);
+
+ const HashedTypeEntry::TokenTypeEntry& tokenBasedEncloser = pName->GetBucket().GetTokenBasedEntryValue();
+
+ // Token must be a typedef token that we previously resolved (we shouldn't get here with an exported type token)
+ _ASSERT(TypeFromToken(tokenBasedEncloser.m_TypeToken) == mdtTypeDef);
+
+ mdToken mdCurrentTypeToken = tokenBasedEncloser.m_TypeToken;
+ do
+ {
+ LPCUTF8 pszNameTemp;
+ LPCUTF8 pszNameSpaceTemp;
+ if (!GetTypeNameFromToken(tokenBasedEncloser.m_pModule->GetMDImport(), mdCurrentTypeToken, &pszNameTemp, &pszNameSpaceTemp))
+ return FALSE;
+
+ dwHashCode = ((dwHashCode << 5) + dwHashCode) ^ HashStringA(pszNameTemp);
+ dwHashCode = ((dwHashCode << 5) + dwHashCode) ^ HashStringA(pszNameSpaceTemp == NULL ? "" : pszNameSpaceTemp);
+
+ } while (GetEnclosingToken(tokenBasedEncloser.m_pModule->GetMDImport(), mdCurrentTypeToken, &mdCurrentTypeToken));
+
+ }
+ }
+ else
+ {
+ // Token based lookups (ex: tokens from IL code)
+
+ mdToken mdCurrentTypeToken = pName->GetTypeToken();
+ do
+ {
+ if (!GetTypeNameFromToken(pName->GetTypeModule()->GetMDImport(), mdCurrentTypeToken, &pszName, &pszNameSpace))
+ return FALSE;
+
+ dwHashCode = ((dwHashCode << 5) + dwHashCode) ^ HashStringA(pszName);
+ dwHashCode = ((dwHashCode << 5) + dwHashCode) ^ HashStringA(pszNameSpace == NULL ? "" : pszNameSpace);
+
+ } while (GetEnclosingToken(pName->GetTypeModule()->GetMDImport(), mdCurrentTypeToken, &mdCurrentTypeToken));
+ }
+ }
+
+
+ //
+ // Lookup the type in the native hashtable using the computed token
+ //
+ {
+ NativeHashtable::Enumerator lookup = m_availableTypesHashtable.Lookup((int)dwHashCode);
+ NativeParser entryParser;
+ while (lookup.GetNext(entryParser))
+ {
+ DWORD ridAndFlag = entryParser.GetUnsigned();
+ mdToken cl = ((ridAndFlag & 1) ? ((ridAndFlag >> 1) | mdtExportedType) : ((ridAndFlag >> 1) | mdtTypeDef));
+ _ASSERT(RidFromToken(cl) != 0);
+
+ if (pName->GetTypeToken() == mdtBaseType)
+ {
+ // Compare type name and namespace name
+ LPCUTF8 pszFoundName;
+ LPCUTF8 pszFoundNameSpace;
+ if (!GetTypeNameFromToken(m_pModule->GetMDImport(), cl, &pszFoundName, &pszFoundNameSpace))
+ continue;
+ if (strcmp(pszName, pszFoundName) != 0 || strcmp(pszNameSpace, pszFoundNameSpace) != 0)
+ continue;
+
+ mdToken mdFoundTypeEncloser;
+ BOOL inputTypeHasEncloser = !pName->GetBucket().IsNull();
+ BOOL foundTypeHasEncloser = GetEnclosingToken(m_pModule->GetMDImport(), cl, &mdFoundTypeEncloser);
+ if (inputTypeHasEncloser != foundTypeHasEncloser)
+ continue;
+
+ // Compare the enclosing types chain for a match
+ if (inputTypeHasEncloser)
+ {
+ const HashedTypeEntry::TokenTypeEntry& tokenBasedEncloser = pName->GetBucket().GetTokenBasedEntryValue();
+
+ if (!CompareTypeNameOfTokens(tokenBasedEncloser.m_TypeToken, tokenBasedEncloser.m_pModule->GetMDImport(), mdFoundTypeEncloser, m_pModule->GetMDImport()))
+ continue;
+ }
+ }
+ else
+ {
+ // Compare type name, namespace name, and enclosing types chain for a match
+ if (!CompareTypeNameOfTokens(pName->GetTypeToken(), pName->GetTypeModule()->GetMDImport(), cl, m_pModule->GetMDImport()))
+ continue;
+ }
+
+ // Found a match!
+ *pFoundTypeToken = cl;
+ return TRUE;
+ }
+ }
+
+ return FALSE; // No matching type found
+}
+
+BOOL ReadyToRunInfo::GetTypeNameFromToken(IMDInternalImport * pImport, mdToken mdType, LPCUTF8 * ppszName, LPCUTF8 * ppszNameSpace)
+{
+ CONTRACTL
+ {
+ GC_NOTRIGGER;
+ NOTHROW;
+ SO_TOLERANT;
+ SUPPORTS_DAC;
+ PRECONDITION(TypeFromToken(mdType) == mdtTypeDef || TypeFromToken(mdType) == mdtTypeRef || TypeFromToken(mdType) == mdtExportedType);
+ }
+ CONTRACTL_END;
+
+ switch (TypeFromToken(mdType))
+ {
+ case mdtTypeDef:
+ return SUCCEEDED(pImport->GetNameOfTypeDef(mdType, ppszName, ppszNameSpace));
+ case mdtTypeRef:
+ return SUCCEEDED(pImport->GetNameOfTypeRef(mdType, ppszNameSpace, ppszName));
+ case mdtExportedType:
+ return SUCCEEDED(pImport->GetExportedTypeProps(mdType, ppszNameSpace, ppszName, NULL, NULL, NULL));
+ }
+
+ return FALSE;
+}
+
+BOOL ReadyToRunInfo::GetEnclosingToken(IMDInternalImport * pImport, mdToken mdType, mdToken * pEnclosingToken)
+{
+ CONTRACTL
+ {
+ GC_NOTRIGGER;
+ NOTHROW;
+ SO_TOLERANT;
+ SUPPORTS_DAC;
+ PRECONDITION(TypeFromToken(mdType) == mdtTypeDef || TypeFromToken(mdType) == mdtTypeRef || TypeFromToken(mdType) == mdtExportedType);
+ }
+ CONTRACTL_END;
+
+ mdToken mdEncloser;
+ switch (TypeFromToken(mdType))
+ {
+ case mdtTypeDef:
+ return SUCCEEDED(pImport->GetNestedClassProps(mdType, pEnclosingToken));
+
+ case mdtTypeRef:
+ if (SUCCEEDED(pImport->GetResolutionScopeOfTypeRef(mdType, pEnclosingToken)))
+ return ((TypeFromToken(*pEnclosingToken) == mdtTypeRef) && (*pEnclosingToken != mdTypeRefNil));
+
+ case mdtExportedType:
+ if (SUCCEEDED(pImport->GetExportedTypeProps(mdType, NULL, NULL, pEnclosingToken, NULL, NULL)))
+ return ((TypeFromToken(*pEnclosingToken) == mdtExportedType) && (*pEnclosingToken != mdExportedTypeNil));
+ }
+
+ return FALSE;
+}
+
+BOOL ReadyToRunInfo::CompareTypeNameOfTokens(mdToken mdToken1, IMDInternalImport * pImport1, mdToken mdToken2, IMDInternalImport * pImport2)
+{
+ CONTRACTL
+ {
+ GC_NOTRIGGER;
+ NOTHROW;
+ SO_TOLERANT;
+ SUPPORTS_DAC;
+ PRECONDITION(TypeFromToken(mdToken1) == mdtTypeDef || TypeFromToken(mdToken1) == mdtTypeRef || TypeFromToken(mdToken1) == mdtExportedType);
+ PRECONDITION(TypeFromToken(mdToken2) == mdtTypeDef || TypeFromToken(mdToken2) == mdtExportedType);
+ }
+ CONTRACTL_END;
+
+ BOOL hasEncloser;
+ do
+ {
+ LPCUTF8 pszName1;
+ LPCUTF8 pszNameSpace1;
+ if (!GetTypeNameFromToken(pImport1, mdToken1, &pszName1, &pszNameSpace1))
+ return FALSE;
+
+ LPCUTF8 pszName2;
+ LPCUTF8 pszNameSpace2;
+ if (!GetTypeNameFromToken(pImport2, mdToken2, &pszName2, &pszNameSpace2))
+ return FALSE;
+
+ if (strcmp(pszName1, pszName2) != 0 || strcmp(pszNameSpace1, pszNameSpace2) != 0)
+ return FALSE;
+
+ if ((hasEncloser = GetEnclosingToken(pImport1, mdToken1, &mdToken1)) != GetEnclosingToken(pImport2, mdToken2, &mdToken2))
+ return FALSE;
+
+ } while (hasEncloser);
+
+ return TRUE;
+}
+
PTR_BYTE ReadyToRunInfo::GetDebugInfo(PTR_RUNTIME_FUNCTION pRuntimeFunction)
{
CONTRACTL
m_methodDefEntryPoints = NativeArray(&m_nativeReader, pEntryPointsDir->VirtualAddress);
}
+ IMAGE_DATA_DIRECTORY * pAvailableTypesDir = FindSection(READYTORUN_SECTION_AVAILABLE_TYPES);
+ if (pAvailableTypesDir != NULL)
+ {
+ NativeParser parser = NativeParser(&m_nativeReader, pAvailableTypesDir->VirtualAddress);
+ m_availableTypesHashtable = NativeHashtable(parser);
+ }
+
{
LockOwner lock = {&m_Crst, IsOwnerOfCrst};
m_entryPointToMethodDescMap.Init(TRUE, &lock);
{
friend class ReadyToRunJitManager;
- PTR_Module m_pModule;
+ PTR_Module m_pModule;
- PTR_PEImageLayout m_pLayout;
- PTR_READYTORUN_HEADER m_pHeader;
+ PTR_PEImageLayout m_pLayout;
+ PTR_READYTORUN_HEADER m_pHeader;
- PTR_RUNTIME_FUNCTION m_pRuntimeFunctions;
- DWORD m_nRuntimeFunctions;
+ PTR_RUNTIME_FUNCTION m_pRuntimeFunctions;
+ DWORD m_nRuntimeFunctions;
- PTR_CORCOMPILE_IMPORT_SECTION m_pImportSections;
- DWORD m_nImportSections;
+ PTR_CORCOMPILE_IMPORT_SECTION m_pImportSections;
+ DWORD m_nImportSections;
- NativeFormat::NativeReader m_nativeReader;
- NativeFormat::NativeArray m_methodDefEntryPoints;
+ NativeFormat::NativeReader m_nativeReader;
+ NativeFormat::NativeArray m_methodDefEntryPoints;
+ NativeFormat::NativeHashtable m_availableTypesHashtable;
- Crst m_Crst;
- PtrHashMap m_entryPointToMethodDescMap;
+ Crst m_Crst;
+ PtrHashMap m_entryPointToMethodDescMap;
ReadyToRunInfo(Module * pModule, PEImageLayout * pLayout, READYTORUN_HEADER * pHeader);
MethodDesc * GetMethodDescForEntryPoint(PCODE entryPoint);
+ BOOL HasHashtableOfTypes();
+ BOOL TryLookupTypeTokenFromName(NameHandle *pName, mdToken * pFoundTypeToken);
+
BOOL SkipTypeValidation()
{
LIMITED_METHOD_CONTRACT;
};
static DWORD GetFieldBaseOffset(MethodTable * pMT);
+
+private:
+ BOOL GetTypeNameFromToken(IMDInternalImport * pImport, mdToken mdType, LPCUTF8 * ppszName, LPCUTF8 * ppszNameSpace);
+ BOOL GetEnclosingToken(IMDInternalImport * pImport, mdToken mdType, mdToken * pEnclosingToken);
+ BOOL CompareTypeNameOfTokens(mdToken mdToken1, IMDInternalImport * pImport1, mdToken mdToken2, IMDInternalImport * pImport2);
};
class DynamicHelpers
// defines the type. This should cause typeName.m_pBucket to be set to the bucket
// which corresponds to the type in the defining module, instead of potentially in the manifest module.
i = -1;
- typeName.SetBucket(NULL);
+ typeName.SetBucket(HashedTypeEntry());
}
}
#include "nativeformatwriter.h"
+#include <clr_std/algorithm>
+
namespace NativeFormat
{
//
}
}
}
+
+ //
+ // VertexHashtable
+ //
+
+ // Returns 1 + log2(x) rounded up, 0 iff x == 0
+ static unsigned HighestBit(unsigned x)
+ {
+ unsigned ret = 0;
+ while (x != 0)
+ {
+ x >>= 1;
+ ret++;
+ }
+ return ret;
+ }
+
+ // Helper method to back patch entry index in the bucket table
+ static void PatchEntryIndex(NativeWriter * pWriter, int patchOffset, int entryIndexSize, int entryIndex)
+ {
+ if (entryIndexSize == 0)
+ {
+ pWriter->PatchByteAt(patchOffset, (byte)entryIndex);
+ }
+ else
+ if (entryIndexSize == 1)
+ {
+ pWriter->PatchByteAt(patchOffset, (byte)entryIndex);
+ pWriter->PatchByteAt(patchOffset + 1, (byte)(entryIndex >> 8));
+ }
+ else
+ {
+ pWriter->PatchByteAt(patchOffset, (byte)entryIndex);
+ pWriter->PatchByteAt(patchOffset + 1, (byte)(entryIndex >> 8));
+ pWriter->PatchByteAt(patchOffset + 2, (byte)(entryIndex >> 16));
+ pWriter->PatchByteAt(patchOffset + 3, (byte)(entryIndex >> 24));
+ }
+ }
+
+ void VertexHashtable::Save(NativeWriter * pWriter)
+ {
+ // Compute the layout of the table if we have not done it yet
+ if (m_nBuckets == 0)
+ ComputeLayout();
+
+ int nEntries = (int)m_Entries.size();
+ int startOffset = pWriter->GetCurrentOffset();
+ int bucketMask = (m_nBuckets - 1);
+
+ // Lowest two bits are entry index size, the rest is log2 number of buckets
+ int numberOfBucketsShift = HighestBit(m_nBuckets) - 1;
+ pWriter->WriteByte(static_cast<uint8_t>((numberOfBucketsShift << 2) | m_entryIndexSize));
+
+ int bucketsOffset = pWriter->GetCurrentOffset();
+
+ pWriter->WritePad((m_nBuckets + 1) << m_entryIndexSize);
+
+ // For faster lookup at runtime, we store the first entry index even though it is redundant (the value can be
+ // inferred from number of buckets)
+ PatchEntryIndex(pWriter, bucketsOffset, m_entryIndexSize, pWriter->GetCurrentOffset() - bucketsOffset);
+
+ int iEntry = 0;
+
+ for (int iBucket = 0; iBucket < m_nBuckets; iBucket++)
+ {
+ while (iEntry < nEntries)
+ {
+ Entry &e = m_Entries[iEntry];
+
+ if (((e.hashcode >> 8) & bucketMask) != (unsigned)iBucket)
+ break;
+
+ int currentOffset = pWriter->GetCurrentOffset();
+ pWriter->UpdateOffsetAdjustment(currentOffset - e.offset);
+ e.offset = currentOffset;
+
+ pWriter->WriteByte((byte)e.hashcode);
+ pWriter->WriteRelativeOffset(e.pVertex);
+
+ iEntry++;
+ }
+
+ int patchOffset = bucketsOffset + ((iBucket + 1) << m_entryIndexSize);
+
+ PatchEntryIndex(pWriter, patchOffset, m_entryIndexSize, pWriter->GetCurrentOffset() - bucketsOffset);
+ }
+ assert(iEntry == nEntries);
+
+ int maxIndexEntry = (pWriter->GetCurrentOffset() - bucketsOffset);
+ int newEntryIndexSize = 0;
+ if (maxIndexEntry > 0xFF)
+ {
+ newEntryIndexSize++;
+ if (maxIndexEntry > 0xFFFF)
+ newEntryIndexSize++;
+ }
+
+ if (pWriter->IsGrowing())
+ {
+ if (newEntryIndexSize > m_entryIndexSize)
+ {
+ // Ensure that the table will be redone with new entry index size
+ pWriter->UpdateOffsetAdjustment(1);
+
+ m_entryIndexSize = newEntryIndexSize;
+ }
+ }
+ else
+ {
+ if (newEntryIndexSize < m_entryIndexSize)
+ {
+ // Ensure that the table will be redone with new entry index size
+ pWriter->UpdateOffsetAdjustment(-1);
+
+ m_entryIndexSize = newEntryIndexSize;
+ }
+ }
+ }
+
+ void VertexHashtable::ComputeLayout()
+ {
+ unsigned bucketsEstimate = (unsigned)(m_Entries.size() / m_nFillFactor);
+
+ // Round number of buckets up to the power of two
+ m_nBuckets = 1 << HighestBit(bucketsEstimate);
+
+ // Lowest byte of the hashcode is used for lookup within the bucket. Keep it sorted too so that
+ // we can use the ordering to terminate the lookup prematurely.
+ unsigned mask = ((m_nBuckets - 1) << 8) | 0xFF;
+
+ // sort it by hashcode
+ std::sort(m_Entries.begin(), m_Entries.end(),
+ [=](Entry const& a, Entry const& b)
+ {
+ return (a.hashcode & mask) < (b.hashcode & mask);
+ }
+ );
+
+ // Start with maximum size entries
+ m_entryIndexSize = 2;
+ }
}
RollbackTo(offset);
}
+ void PatchByteAt(int offset, byte value)
+ {
+ m_Buffer[offset] = value;
+ }
+
//
// Same encoding as what's used by CTL
//
//
+ // Data structure building blocks
+ //
+
+ class UnsignedConstant : public Vertex
+ {
+ unsigned m_value;
+
+ public:
+ UnsignedConstant(unsigned value)
+ : m_value(value)
+ {
+ }
+
+ virtual void Save(NativeWriter * pWriter)
+ {
+ pWriter->WriteUnsigned(m_value);
+ }
+ };
+
+ //
// Sparse array. Good for random access based on index
//
class VertexArray : public Vertex
virtual void Save(NativeWriter * pWriter);
};
+
+ //
+ // Hashtable. Good for random access based on hashcode + key
+ //
+ class VertexHashtable : public Vertex
+ {
+ struct Entry
+ {
+ Entry()
+ : offset(-1), hashcode(0), pVertex(NULL)
+ {
+ }
+
+ Entry(unsigned hashcode, Vertex * pVertex)
+ : offset(0), hashcode(hashcode), pVertex(pVertex)
+ {
+ }
+
+ int offset;
+
+ unsigned hashcode;
+ Vertex * pVertex;
+ };
+
+ vector<Entry> m_Entries;
+
+ // How many entries to target per bucket. Higher fill factor means smaller size, but worse runtime perf.
+ int m_nFillFactor;
+
+ // Number of buckets choosen for the table. Must be power of two. 0 means that the table is still open for mutation.
+ int m_nBuckets;
+
+ // Current size of index entry
+ int m_entryIndexSize; // 0 - uint8, 1 - uint16, 2 - uint32
+
+ void ComputeLayout();
+
+ public:
+ static const int DefaultFillFactor = 13;
+
+ VertexHashtable(int fillFactor = DefaultFillFactor)
+ {
+ m_nBuckets = 0;
+
+ m_nFillFactor = fillFactor;
+ }
+
+ void Append(unsigned hashcode, Vertex * pElement)
+ {
+ // The table needs to be open for mutation
+ assert(m_nBuckets == 0);
+
+ m_Entries.push_back(Entry(hashcode, pElement));
+ }
+
+ virtual void Save(NativeWriter * pWriter);
+ };
};
#endif
m_pCodeMethodDescsSection = NewVirtualSection(pTextSection, IBCProfiledSection | WarmRange | ColdRange | CodeHeaderSection,sizeof(DWORD));
+#ifdef FEATURE_READYTORUN_COMPILER
+ if (IsReadyToRunCompilation())
+ {
+ m_pAvailableTypesSection = NewVirtualSection(pTextSection, IBCUnProfiledSection | WarmRange | ReadonlySection);
+ }
+#endif
+
#if defined(WIN64EXCEPTIONS)
m_pUnwindDataSection = NewVirtualSection(pTextSection, IBCProfiledSection | WarmRange | ColdRange | UnwindDataSection, sizeof(DWORD));
#endif // defined(WIN64EXCEPTIONS)
{
OutputEntrypointsTableForReadyToRun();
OutputDebugInfoForReadyToRun();
+ OutputTypesTableForReadyToRun(m_pMDImport);
}
else
#endif
ZapVirtualSection * m_pColdUnwindDataSection;
#endif // defined(WIN64EXCEPTIONS)
+#ifdef FEATURE_READYTORUN_COMPILER
+ ZapVirtualSection * m_pAvailableTypesSection;
+#endif
+
// Preloader sections
ZapVirtualSection * m_pPreloadSections[CORCOMPILE_SECTION_COUNT];
void OutputEntrypointsTableForReadyToRun();
void OutputDebugInfoForReadyToRun();
+ void OutputTypesTableForReadyToRun(IMDInternalImport * pMDImport);
void CopyDebugDirEntry();
void CopyWin32VersionResource();
GetReadyToRunHeader()->RegisterSection(READYTORUN_SECTION_DEBUG_INFO, pBlob);
}
+void ZapImage::OutputTypesTableForReadyToRun(IMDInternalImport * pMDImport)
+{
+ NativeWriter writer;
+ VertexHashtable typesHashtable;
+
+ NativeSection * pSection = writer.NewSection();
+ pSection->Place(&typesHashtable);
+
+ // Note on duplicate types with same name: there is not need to perform that check when building
+ // the hashtable. If such types were encountered, the R2R compilation would fail before reaching here.
+
+ LPCUTF8 pszName;
+ LPCUTF8 pszNameSpace;
+
+ // Save the TypeDefs to the hashtable
+ {
+ HENUMInternalHolder hEnum(pMDImport);
+ hEnum.EnumAllInit(mdtTypeDef);
+
+ mdToken mdTypeToken;
+ while (pMDImport->EnumNext(&hEnum, &mdTypeToken))
+ {
+ DWORD dwHash = 0;
+ mdTypeDef mdCurrentToken = mdTypeToken;
+
+ do
+ {
+ if (FAILED(pMDImport->GetNameOfTypeDef(mdCurrentToken, &pszName, &pszNameSpace)))
+ ThrowHR(COR_E_BADIMAGEFORMAT);
+
+ dwHash = ((dwHash << 5) + dwHash) ^ HashStringA(pszName);
+ dwHash = ((dwHash << 5) + dwHash) ^ HashStringA(pszNameSpace == NULL ? "" : pszNameSpace);
+
+ } while (SUCCEEDED(pMDImport->GetNestedClassProps(mdCurrentToken, &mdCurrentToken)));
+
+ typesHashtable.Append(dwHash, pSection->Place(new UnsignedConstant(RidFromToken(mdTypeToken) << 1)));
+ }
+ }
+
+ // Save the ExportedTypes to the hashtable
+ {
+ HENUMInternalHolder hEnum(pMDImport);
+ hEnum.EnumInit(mdtExportedType, mdTokenNil);
+
+ mdToken mdTypeToken;
+ while (pMDImport->EnumNext(&hEnum, &mdTypeToken))
+ {
+ DWORD dwHash = 0;
+ mdTypeDef mdCurrentToken = mdTypeToken;
+
+ do
+ {
+ if (FAILED(pMDImport->GetExportedTypeProps(mdCurrentToken, &pszNameSpace, &pszName, &mdCurrentToken, NULL, NULL)))
+ ThrowHR(COR_E_BADIMAGEFORMAT);
+
+ dwHash = ((dwHash << 5) + dwHash) ^ HashStringA(pszName);
+ dwHash = ((dwHash << 5) + dwHash) ^ HashStringA(pszNameSpace == NULL ? "" : pszNameSpace);
+
+ } while (TypeFromToken(mdCurrentToken) == mdtExportedType);
+
+ typesHashtable.Append(dwHash, pSection->Place(new UnsignedConstant((RidFromToken(mdTypeToken) << 1) | 1)));
+ }
+ }
+
+ vector<byte>& blob = writer.Save();
+
+ ZapNode * pBlob = ZapBlob::NewBlob(this, &blob[0], blob.size());
+ _ASSERTE(m_pAvailableTypesSection);
+ m_pAvailableTypesSection->Place(pBlob);
+
+ GetReadyToRunHeader()->RegisterSection(READYTORUN_SECTION_AVAILABLE_TYPES, pBlob);
+}
+
//
// Verify that data structures and flags shared between NGen and ReadyToRun are in sync