From 6e095aaf6ee37c8e6c94ac2e6ea2ae6b3b876d5f Mon Sep 17 00:00:00 2001 From: Egor Chesakov Date: Fri, 9 Feb 2018 10:18:50 -0800 Subject: [PATCH] Replace sizeof(void*)-like expressions with TARGET_POINTER_SIZE macro --- src/inc/corinfo.h | 2 +- src/vm/array.cpp | 4 ++-- src/vm/callingconvention.h | 14 +++++++------- src/vm/compile.cpp | 10 +++++----- src/vm/fieldmarshaler.cpp | 4 ++-- src/vm/jitinterface.cpp | 30 +++++++++++++++--------------- src/vm/methodtablebuilder.cpp | 32 ++++++++++++++++---------------- src/vm/siginfo.cpp | 2 +- 8 files changed, 49 insertions(+), 49 deletions(-) diff --git a/src/inc/corinfo.h b/src/inc/corinfo.h index f3b509c..d430412 100644 --- a/src/inc/corinfo.h +++ b/src/inc/corinfo.h @@ -2375,7 +2375,7 @@ public: // This is only called for Value classes. It returns a boolean array // in representing of 'cls' from a GC perspective. The class is // assumed to be an array of machine words - // (of length // getClassSize(cls) / sizeof(void*)), + // (of length // getClassSize(cls) / TARGET_POINTER_SIZE), // 'gcPtrs' is a pointer to an array of BYTEs of this length. // getClassGClayout fills in this array so that gcPtrs[i] is set // to one of the CorInfoGCType values which is the GC type of diff --git a/src/vm/array.cpp b/src/vm/array.cpp index 08a86a3..61b81a0 100644 --- a/src/vm/array.cpp +++ b/src/vm/array.cpp @@ -704,9 +704,9 @@ MethodTable* Module::CreateArrayMethodTable(TypeHandle elemTypeHnd, CorElementTy - ObjSizeOf(Object) - currentOffset; } - _ASSERTE(!"Module::CreateArrayMethodTable() - unaligned GC info" || IS_ALIGNED(skip, sizeof(size_t))); + _ASSERTE(!"Module::CreateArrayMethodTable() - unaligned GC info" || IS_ALIGNED(skip, TARGET_POINTER_SIZE)); - unsigned short NumPtrs = (unsigned short) (numPtrsInBytes / sizeof(void*)); + unsigned short NumPtrs = (unsigned short) (numPtrsInBytes / TARGET_POINTER_SIZE); if(skip > MAX_SIZE_FOR_VALUECLASS_IN_ARRAY || numPtrsInBytes > MAX_PTRS_FOR_VALUECLASSS_IN_ARRAY) { StackSString ssElemName; elemTypeHnd.GetName(ssElemName); diff --git a/src/vm/callingconvention.h b/src/vm/callingconvention.h index b707b4b..9a0abd3 100644 --- a/src/vm/callingconvention.h +++ b/src/vm/callingconvention.h @@ -183,7 +183,7 @@ struct TransitionBlock #if defined(UNIX_AMD64_ABI) && defined(FEATURE_UNIX_AMD64_STRUCT_PASSING) _ASSERTE(offset != TransitionBlock::StructInRegsOffset); #endif - return (offset - GetOffsetOfArgumentRegisters()) / sizeof(TADDR); + return (offset - GetOffsetOfArgumentRegisters()) / TARGET_POINTER_SIZE; } static UINT GetStackArgumentIndexFromOffset(int offset) @@ -242,7 +242,7 @@ struct TransitionBlock negSpaceSize += sizeof(FloatArgumentRegisters); #endif #ifdef _TARGET_ARM_ - negSpaceSize += sizeof(TADDR); // padding to make FloatArgumentRegisters address 8-byte aligned + negSpaceSize += TARGET_POINTER_SIZE; // padding to make FloatArgumentRegisters address 8-byte aligned #endif return negSpaceSize; } @@ -752,7 +752,7 @@ int ArgIteratorTemplate::GetRetBuffArgOffset() ret += (int) offsetof(ArgumentRegisters, x[8]); #else if (this->HasThis()) - ret += sizeof(void *); + ret += TARGET_POINTER_SIZE; #endif return ret; @@ -774,12 +774,12 @@ int ArgIteratorTemplate::GetVASigCookieOffset() if (this->HasThis()) { - ret += sizeof(void*); + ret += TARGET_POINTER_SIZE; } if (this->HasRetBuffArg() && IsRetBuffPassedAsFirstArg()) { - ret += sizeof(void*); + ret += TARGET_POINTER_SIZE; } return ret; @@ -827,12 +827,12 @@ int ArgIteratorTemplate::GetParamTypeArgOffset() if (this->HasThis()) { - ret += sizeof(void*); + ret += TARGET_POINTER_SIZE; } if (this->HasRetBuffArg() && IsRetBuffPassedAsFirstArg()) { - ret += sizeof(void*); + ret += TARGET_POINTER_SIZE; } return ret; diff --git a/src/vm/compile.cpp b/src/vm/compile.cpp index 7040ffc..9d8274d 100644 --- a/src/vm/compile.cpp +++ b/src/vm/compile.cpp @@ -1028,7 +1028,7 @@ void CEECompileInfo::GetCallRefMap(CORINFO_METHOD_HANDLE hMethod, GCRefMapBuilde nStackSlots = nStackBytes / sizeof(TADDR) + NUM_ARGUMENT_REGISTERS; #else - nStackSlots = (sizeof(TransitionBlock) + nStackBytes - TransitionBlock::GetOffsetOfArgumentRegisters()) / sizeof(TADDR); + nStackSlots = (sizeof(TransitionBlock) + nStackBytes - TransitionBlock::GetOffsetOfArgumentRegisters()) / TARGET_POINTER_SIZE; #endif for (UINT pos = 0; pos < nStackSlots; pos++) @@ -1040,7 +1040,7 @@ void CEECompileInfo::GetCallRefMap(CORINFO_METHOD_HANDLE hMethod, GCRefMapBuilde (TransitionBlock::GetOffsetOfArgumentRegisters() + ARGUMENTREGISTERS_SIZE - (pos + 1) * sizeof(TADDR)) : (TransitionBlock::GetOffsetOfArgs() + (pos - NUM_ARGUMENT_REGISTERS) * sizeof(TADDR)); #else - ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * sizeof(TADDR); + ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * TARGET_POINTER_SIZE; #endif CORCOMPILE_GCREFMAP_TOKENS token = *(CORCOMPILE_GCREFMAP_TOKENS *)(pFrame + ofs); @@ -1083,7 +1083,7 @@ void CEECompileInfo::GetCallRefMap(CORINFO_METHOD_HANDLE hMethod, GCRefMapBuilde (TransitionBlock::GetOffsetOfArgumentRegisters() + ARGUMENTREGISTERS_SIZE - (pos + 1) * sizeof(TADDR)) : (TransitionBlock::GetOffsetOfArgs() + (pos - NUM_ARGUMENT_REGISTERS) * sizeof(TADDR)); #else - ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * sizeof(TADDR); + ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * TARGET_POINTER_SIZE; #endif if (token != 0) @@ -2113,7 +2113,7 @@ void CEECompileInfo::EncodeTypeLayout(CORINFO_CLASS_HANDLE classHandle, SigBuild // Check everything dwFlags |= READYTORUN_LAYOUT_Alignment; - if (dwAlignment == sizeof(void *)) + if (dwAlignment == TARGET_POINTER_SIZE) dwFlags |= READYTORUN_LAYOUT_Alignment_Native; dwFlags |= READYTORUN_LAYOUT_GCLayout; @@ -2139,7 +2139,7 @@ void CEECompileInfo::EncodeTypeLayout(CORINFO_CLASS_HANDLE classHandle, SigBuild if ((dwFlags & READYTORUN_LAYOUT_GCLayout) && !(dwFlags & READYTORUN_LAYOUT_GCLayout_Empty)) { - size_t cbGCRefMap = (dwSize / sizeof(TADDR) + 7) / 8; + size_t cbGCRefMap = (dwSize / TARGET_POINTER_SIZE + 7) / 8; _ASSERTE(cbGCRefMap > 0); BYTE * pGCRefMap = (BYTE *)_alloca(cbGCRefMap); diff --git a/src/vm/fieldmarshaler.cpp b/src/vm/fieldmarshaler.cpp index b1a4a8b..750fc43 100644 --- a/src/vm/fieldmarshaler.cpp +++ b/src/vm/fieldmarshaler.cpp @@ -289,8 +289,8 @@ do \ } else if (corElemType == ELEMENT_TYPE_PTR) { - pfwalk->m_managedSize = sizeof(LPVOID); - pfwalk->m_managedAlignmentReq = sizeof(LPVOID); + pfwalk->m_managedSize = TARGET_POINTER_SIZE; + pfwalk->m_managedAlignmentReq = TARGET_POINTER_SIZE; } else if (corElemType == ELEMENT_TYPE_VALUETYPE) { diff --git a/src/vm/jitinterface.cpp b/src/vm/jitinterface.cpp index 168927e..db402ed 100644 --- a/src/vm/jitinterface.cpp +++ b/src/vm/jitinterface.cpp @@ -1942,7 +1942,7 @@ unsigned CEEInfo::getClassAlignmentRequirement(CORINFO_CLASS_HANDLE type, BOOL f } CONTRACTL_END; // Default alignment is sizeof(void*) - unsigned result = sizeof(void*); + unsigned result = TARGET_POINTER_SIZE; JIT_TO_EE_TRANSITION_LEAF(); @@ -1976,7 +1976,7 @@ unsigned CEEInfo::getClassAlignmentRequirementStatic(TypeHandle clsHnd) LIMITED_METHOD_CONTRACT; // Default alignment is sizeof(void*) - unsigned result = sizeof(void*); + unsigned result = TARGET_POINTER_SIZE; MethodTable * pMT = clsHnd.GetMethodTable(); if (pMT == NULL) @@ -2137,7 +2137,7 @@ static unsigned ComputeGCLayout(MethodTable * pMT, BYTE* gcPtrs) ApproxFieldDescIterator fieldIterator(pMT, ApproxFieldDescIterator::INSTANCE_FIELDS); for (FieldDesc *pFD = fieldIterator.Next(); pFD != NULL; pFD = fieldIterator.Next()) { - int fieldStartIndex = pFD->GetOffset() / sizeof(void*); + int fieldStartIndex = pFD->GetOffset() / TARGET_POINTER_SIZE; if (pFD->GetFieldType() != ELEMENT_TYPE_VALUETYPE) { @@ -2185,7 +2185,7 @@ unsigned CEEInfo::getClassGClayout (CORINFO_CLASS_HANDLE clsHnd, BYTE* gcPtrs) // native value types have no GC pointers result = 0; memset(gcPtrs, TYPE_GC_NONE, - (VMClsHnd.GetSize() + sizeof(void*) -1)/ sizeof(void*)); + (VMClsHnd.GetSize() + TARGET_POINTER_SIZE - 1) / TARGET_POINTER_SIZE); } else if (pMT->IsByRefLike()) { @@ -2200,7 +2200,7 @@ unsigned CEEInfo::getClassGClayout (CORINFO_CLASS_HANDLE clsHnd, BYTE* gcPtrs) else { memset(gcPtrs, TYPE_GC_NONE, - (VMClsHnd.GetSize() + sizeof(void*) - 1) / sizeof(void*)); + (VMClsHnd.GetSize() + TARGET_POINTER_SIZE - 1) / TARGET_POINTER_SIZE); // Note: This case is more complicated than the TypedReference case // due to ByRefLike structs being included as fields in other value // types (TypedReference can not be.) @@ -2215,7 +2215,7 @@ unsigned CEEInfo::getClassGClayout (CORINFO_CLASS_HANDLE clsHnd, BYTE* gcPtrs) // assume no GC pointers at first result = 0; memset(gcPtrs, TYPE_GC_NONE, - (VMClsHnd.GetSize() + sizeof(void*) -1)/ sizeof(void*)); + (VMClsHnd.GetSize() + TARGET_POINTER_SIZE - 1) / TARGET_POINTER_SIZE); // walk the GC descriptors, turning on the correct bits if (pMT->ContainsPointers()) @@ -2229,11 +2229,11 @@ unsigned CEEInfo::getClassGClayout (CORINFO_CLASS_HANDLE clsHnd, BYTE* gcPtrs) size_t cbSeriesSize = pByValueSeries->GetSeriesSize() + pMT->GetBaseSize(); size_t cbOffset = pByValueSeries->GetSeriesOffset() - sizeof(Object); - _ASSERTE (cbOffset % sizeof(void*) == 0); - _ASSERTE (cbSeriesSize % sizeof(void*) == 0); + _ASSERTE (cbOffset % TARGET_POINTER_SIZE == 0); + _ASSERTE (cbSeriesSize % TARGET_POINTER_SIZE == 0); - result += (unsigned) (cbSeriesSize / sizeof(void*)); - memset(&gcPtrs[cbOffset/sizeof(void*)], TYPE_GC_REF, cbSeriesSize / sizeof(void*)); + result += (unsigned) (cbSeriesSize / TARGET_POINTER_SIZE); + memset(&gcPtrs[cbOffset / TARGET_POINTER_SIZE], TYPE_GC_REF, cbSeriesSize / TARGET_POINTER_SIZE); pByValueSeries++; } @@ -13079,17 +13079,17 @@ void ComputeGCRefMap(MethodTable * pMT, BYTE * pGCRefMap, size_t cbGCRefMap) { // offset to embedded references in this series must be // adjusted by the VTable pointer, when in the unboxed state. - size_t offset = cur->GetSeriesOffset() - sizeof(void*); + size_t offset = cur->GetSeriesOffset() - TARGET_POINTER_SIZE; size_t offsetStop = offset + cur->GetSeriesSize() + size; while (offset < offsetStop) { - size_t bit = offset / sizeof(void *); + size_t bit = offset / TARGET_POINTER_SIZE; size_t index = bit / 8; _ASSERTE(index < cbGCRefMap); pGCRefMap[index] |= (1 << (bit & 7)); - offset += sizeof(void *); + offset += TARGET_POINTER_SIZE; } cur--; } while (cur >= last); @@ -13145,7 +13145,7 @@ BOOL TypeLayoutCheck(MethodTable * pMT, PCCOR_SIGNATURE pBlob) if (dwFlags & READYTORUN_LAYOUT_Alignment) { - DWORD dwExpectedAlignment = sizeof(void *); + DWORD dwExpectedAlignment = TARGET_POINTER_SIZE; if (!(dwFlags & READYTORUN_LAYOUT_Alignment_Native)) { IfFailThrow(p.GetData(&dwExpectedAlignment)); @@ -13166,7 +13166,7 @@ BOOL TypeLayoutCheck(MethodTable * pMT, PCCOR_SIGNATURE pBlob) } else { - size_t cbGCRefMap = (dwActualSize / sizeof(TADDR) + 7) / 8; + size_t cbGCRefMap = (dwActualSize / TARGET_POINTER_SIZE + 7) / 8; _ASSERTE(cbGCRefMap > 0); BYTE * pGCRefMap = (BYTE *)_alloca(cbGCRefMap); diff --git a/src/vm/methodtablebuilder.cpp b/src/vm/methodtablebuilder.cpp index 0ddbbbc..179a42d 100644 --- a/src/vm/methodtablebuilder.cpp +++ b/src/vm/methodtablebuilder.cpp @@ -1959,7 +1959,7 @@ MethodTableBuilder::BuildMethodTableThrowing( // Check for the RemotingProxy Attribute // structs with GC pointers MUST be pointer sized aligned because the GC assumes it - if (IsValueClass() && pMT->ContainsPointers() && (bmtFP->NumInstanceFieldBytes % sizeof(void*) != 0)) + if (IsValueClass() && pMT->ContainsPointers() && (bmtFP->NumInstanceFieldBytes % TARGET_POINTER_SIZE != 0)) { BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); } @@ -7908,7 +7908,7 @@ VOID MethodTableBuilder::PlaceInstanceFields(MethodTable ** pByValueClassCach // value types can never get large enough to allocate on the LOH). if (!IsValueClass()) { - dwOffsetBias = sizeof(MethodTable*); + dwOffsetBias = TARGET_POINTER_SIZE; dwCumulativeInstanceFieldPos += dwOffsetBias; } #endif // FEATURE_64BIT_ALIGNMENT @@ -8089,14 +8089,14 @@ VOID MethodTableBuilder::PlaceInstanceFields(MethodTable ** pByValueClassCach #if !defined(_WIN64) && (DATA_ALIGNMENT > 4) dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, - (pByValueMT->GetNumInstanceFieldBytes() >= DATA_ALIGNMENT) ? DATA_ALIGNMENT : sizeof(void*)); + (pByValueMT->GetNumInstanceFieldBytes() >= DATA_ALIGNMENT) ? DATA_ALIGNMENT : TARGET_POINTER_SIZE); #else // !(!defined(_WIN64) && (DATA_ALIGNMENT > 4)) #ifdef FEATURE_64BIT_ALIGNMENT if (pByValueMT->RequiresAlign8()) dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, 8); else #endif // FEATURE_64BIT_ALIGNMENT - dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, sizeof(void*)); + dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, TARGET_POINTER_SIZE); #endif // !(!defined(_WIN64) && (DATA_ALIGNMENT > 4)) pFieldDescList[i].SetOffset(dwCumulativeInstanceFieldPos - dwOffsetBias); @@ -8129,8 +8129,8 @@ VOID MethodTableBuilder::PlaceInstanceFields(MethodTable ** pByValueClassCach } else #endif // FEATURE_64BIT_ALIGNMENT - if (dwNumInstanceFieldBytes > sizeof(void*)) { - minAlign = sizeof(void*); + if (dwNumInstanceFieldBytes > TARGET_POINTER_SIZE) { + minAlign = TARGET_POINTER_SIZE; } else { minAlign = 1; @@ -8319,9 +8319,9 @@ MethodTableBuilder::HandleExplicitLayout( // 3. If an OREF does overlap with another OREF, the class is marked unverifiable. // 4. If an overlap of any kind occurs, the class will be marked NotTightlyPacked (affects ValueType.Equals()). // - char emptyObject[sizeof(void*)]; - char isObject[sizeof(void*)]; - for (i = 0; i < sizeof(void*); i++) + char emptyObject[TARGET_POINTER_SIZE]; + char isObject[TARGET_POINTER_SIZE]; + for (i = 0; i < TARGET_POINTER_SIZE; i++) { emptyObject[i] = empty; isObject[i] = oref; @@ -8409,7 +8409,7 @@ MethodTableBuilder::HandleExplicitLayout( MethodTable *pByValueMT = pByValueClassCache[valueClassCacheIndex]; if (pByValueMT->ContainsPointers()) { - if ((pFD->GetOffset_NoLogging() & ((ULONG)sizeof(void*) - 1)) == 0) + if ((pFD->GetOffset_NoLogging() & ((ULONG)TARGET_POINTER_SIZE - 1)) == 0) { ExplicitFieldTrust::TrustLevel trust; DWORD firstObjectOverlapOffsetInsideValueClass = ((DWORD)(-1)); @@ -8518,7 +8518,7 @@ MethodTableBuilder::HandleExplicitLayout( S_UINT32 dwInstanceSliceOffset = S_UINT32(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0); if (bmtGCSeries->numSeries != 0) { - dwInstanceSliceOffset.AlignUp(sizeof(void*)); + dwInstanceSliceOffset.AlignUp(TARGET_POINTER_SIZE); } if (dwInstanceSliceOffset.IsOverflow()) { @@ -8546,10 +8546,10 @@ MethodTableBuilder::HandleExplicitLayout( } } - // The GC requires that all valuetypes containing orefs be sized to a multiple of sizeof(void*). + // The GC requires that all valuetypes containing orefs be sized to a multiple of TARGET_POINTER_SIZE. if (bmtGCSeries->numSeries != 0) { - numInstanceFieldBytes.AlignUp(sizeof(void*)); + numInstanceFieldBytes.AlignUp(TARGET_POINTER_SIZE); } if (numInstanceFieldBytes.IsOverflow()) { @@ -8711,7 +8711,7 @@ void MethodTableBuilder::FindPointerSeriesExplicit(UINT instanceSliceSize, bmtGCSeries->pSeries[bmtGCSeries->numSeries].offset = (DWORD)(loc - pFieldLayout); bmtGCSeries->pSeries[bmtGCSeries->numSeries].len = (DWORD)(cur - loc); - CONSISTENCY_CHECK(IS_ALIGNED(cur - loc, sizeof(size_t))); + CONSISTENCY_CHECK(IS_ALIGNED(cur - loc, TARGET_POINTER_SIZE)); bmtGCSeries->numSeries++; loc = cur; @@ -8757,7 +8757,7 @@ MethodTableBuilder::HandleGCForExplicitLayout() } - UINT32 dwInstanceSliceOffset = AlignUp(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0, sizeof(void*)); + UINT32 dwInstanceSliceOffset = AlignUp(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0, TARGET_POINTER_SIZE); // Build the pointer series map for this pointers in this instance CGCDescSeries *pSeries = ((CGCDesc*)pMT)->GetLowestSeries(); @@ -9929,7 +9929,7 @@ MethodTable * MethodTableBuilder::AllocateNewMT(Module *pLoaderModule, BYTE *pData = (BYTE *)pamTracker->Track(pAllocator->GetHighFrequencyHeap()->AllocMem(cbTotalSize)); - _ASSERTE(IS_ALIGNED(pData, sizeof(size_t))); + _ASSERTE(IS_ALIGNED(pData, TARGET_POINTER_SIZE)); // There should be no overflows if we have allocated the memory succesfully _ASSERTE(!cbTotalSize.IsOverflow()); diff --git a/src/vm/siginfo.cpp b/src/vm/siginfo.cpp index bc16bdf..fa26002 100644 --- a/src/vm/siginfo.cpp +++ b/src/vm/siginfo.cpp @@ -4975,7 +4975,7 @@ void ReportPointersFromValueType(promote_func *fn, ScanContext *sc, PTR_MethodTa { // offset to embedded references in this series must be // adjusted by the VTable pointer, when in the unboxed state. - size_t offset = cur->GetSeriesOffset() - sizeof(void*); + size_t offset = cur->GetSeriesOffset() - TARGET_POINTER_SIZE; PTR_OBJECTREF srcPtr = dac_cast(PTR_BYTE(pSrc) + offset); PTR_OBJECTREF srcPtrStop = dac_cast(PTR_BYTE(srcPtr) + cur->GetSeriesSize() + size); while (srcPtr < srcPtrStop) -- 2.7.4