// This is only called for Value classes. It returns a boolean array
// in representing of 'cls' from a GC perspective. The class is
// assumed to be an array of machine words
- // (of length // getClassSize(cls) / sizeof(void*)),
+ // (of length // getClassSize(cls) / TARGET_POINTER_SIZE),
// 'gcPtrs' is a pointer to an array of BYTEs of this length.
// getClassGClayout fills in this array so that gcPtrs[i] is set
// to one of the CorInfoGCType values which is the GC type of
- ObjSizeOf(Object) - currentOffset;
}
- _ASSERTE(!"Module::CreateArrayMethodTable() - unaligned GC info" || IS_ALIGNED(skip, sizeof(size_t)));
+ _ASSERTE(!"Module::CreateArrayMethodTable() - unaligned GC info" || IS_ALIGNED(skip, TARGET_POINTER_SIZE));
- unsigned short NumPtrs = (unsigned short) (numPtrsInBytes / sizeof(void*));
+ unsigned short NumPtrs = (unsigned short) (numPtrsInBytes / TARGET_POINTER_SIZE);
if(skip > MAX_SIZE_FOR_VALUECLASS_IN_ARRAY || numPtrsInBytes > MAX_PTRS_FOR_VALUECLASSS_IN_ARRAY) {
StackSString ssElemName;
elemTypeHnd.GetName(ssElemName);
#if defined(UNIX_AMD64_ABI) && defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
_ASSERTE(offset != TransitionBlock::StructInRegsOffset);
#endif
- return (offset - GetOffsetOfArgumentRegisters()) / sizeof(TADDR);
+ return (offset - GetOffsetOfArgumentRegisters()) / TARGET_POINTER_SIZE;
}
static UINT GetStackArgumentIndexFromOffset(int offset)
negSpaceSize += sizeof(FloatArgumentRegisters);
#endif
#ifdef _TARGET_ARM_
- negSpaceSize += sizeof(TADDR); // padding to make FloatArgumentRegisters address 8-byte aligned
+ negSpaceSize += TARGET_POINTER_SIZE; // padding to make FloatArgumentRegisters address 8-byte aligned
#endif
return negSpaceSize;
}
ret += (int) offsetof(ArgumentRegisters, x[8]);
#else
if (this->HasThis())
- ret += sizeof(void *);
+ ret += TARGET_POINTER_SIZE;
#endif
return ret;
if (this->HasThis())
{
- ret += sizeof(void*);
+ ret += TARGET_POINTER_SIZE;
}
if (this->HasRetBuffArg() && IsRetBuffPassedAsFirstArg())
{
- ret += sizeof(void*);
+ ret += TARGET_POINTER_SIZE;
}
return ret;
if (this->HasThis())
{
- ret += sizeof(void*);
+ ret += TARGET_POINTER_SIZE;
}
if (this->HasRetBuffArg() && IsRetBuffPassedAsFirstArg())
{
- ret += sizeof(void*);
+ ret += TARGET_POINTER_SIZE;
}
return ret;
nStackSlots = nStackBytes / sizeof(TADDR) + NUM_ARGUMENT_REGISTERS;
#else
- nStackSlots = (sizeof(TransitionBlock) + nStackBytes - TransitionBlock::GetOffsetOfArgumentRegisters()) / sizeof(TADDR);
+ nStackSlots = (sizeof(TransitionBlock) + nStackBytes - TransitionBlock::GetOffsetOfArgumentRegisters()) / TARGET_POINTER_SIZE;
#endif
for (UINT pos = 0; pos < nStackSlots; pos++)
(TransitionBlock::GetOffsetOfArgumentRegisters() + ARGUMENTREGISTERS_SIZE - (pos + 1) * sizeof(TADDR)) :
(TransitionBlock::GetOffsetOfArgs() + (pos - NUM_ARGUMENT_REGISTERS) * sizeof(TADDR));
#else
- ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * sizeof(TADDR);
+ ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * TARGET_POINTER_SIZE;
#endif
CORCOMPILE_GCREFMAP_TOKENS token = *(CORCOMPILE_GCREFMAP_TOKENS *)(pFrame + ofs);
(TransitionBlock::GetOffsetOfArgumentRegisters() + ARGUMENTREGISTERS_SIZE - (pos + 1) * sizeof(TADDR)) :
(TransitionBlock::GetOffsetOfArgs() + (pos - NUM_ARGUMENT_REGISTERS) * sizeof(TADDR));
#else
- ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * sizeof(TADDR);
+ ofs = TransitionBlock::GetOffsetOfArgumentRegisters() + pos * TARGET_POINTER_SIZE;
#endif
if (token != 0)
// Check everything
dwFlags |= READYTORUN_LAYOUT_Alignment;
- if (dwAlignment == sizeof(void *))
+ if (dwAlignment == TARGET_POINTER_SIZE)
dwFlags |= READYTORUN_LAYOUT_Alignment_Native;
dwFlags |= READYTORUN_LAYOUT_GCLayout;
if ((dwFlags & READYTORUN_LAYOUT_GCLayout) && !(dwFlags & READYTORUN_LAYOUT_GCLayout_Empty))
{
- size_t cbGCRefMap = (dwSize / sizeof(TADDR) + 7) / 8;
+ size_t cbGCRefMap = (dwSize / TARGET_POINTER_SIZE + 7) / 8;
_ASSERTE(cbGCRefMap > 0);
BYTE * pGCRefMap = (BYTE *)_alloca(cbGCRefMap);
}
else if (corElemType == ELEMENT_TYPE_PTR)
{
- pfwalk->m_managedSize = sizeof(LPVOID);
- pfwalk->m_managedAlignmentReq = sizeof(LPVOID);
+ pfwalk->m_managedSize = TARGET_POINTER_SIZE;
+ pfwalk->m_managedAlignmentReq = TARGET_POINTER_SIZE;
}
else if (corElemType == ELEMENT_TYPE_VALUETYPE)
{
} CONTRACTL_END;
// Default alignment is sizeof(void*)
- unsigned result = sizeof(void*);
+ unsigned result = TARGET_POINTER_SIZE;
JIT_TO_EE_TRANSITION_LEAF();
LIMITED_METHOD_CONTRACT;
// Default alignment is sizeof(void*)
- unsigned result = sizeof(void*);
+ unsigned result = TARGET_POINTER_SIZE;
MethodTable * pMT = clsHnd.GetMethodTable();
if (pMT == NULL)
ApproxFieldDescIterator fieldIterator(pMT, ApproxFieldDescIterator::INSTANCE_FIELDS);
for (FieldDesc *pFD = fieldIterator.Next(); pFD != NULL; pFD = fieldIterator.Next())
{
- int fieldStartIndex = pFD->GetOffset() / sizeof(void*);
+ int fieldStartIndex = pFD->GetOffset() / TARGET_POINTER_SIZE;
if (pFD->GetFieldType() != ELEMENT_TYPE_VALUETYPE)
{
// native value types have no GC pointers
result = 0;
memset(gcPtrs, TYPE_GC_NONE,
- (VMClsHnd.GetSize() + sizeof(void*) -1)/ sizeof(void*));
+ (VMClsHnd.GetSize() + TARGET_POINTER_SIZE - 1) / TARGET_POINTER_SIZE);
}
else if (pMT->IsByRefLike())
{
else
{
memset(gcPtrs, TYPE_GC_NONE,
- (VMClsHnd.GetSize() + sizeof(void*) - 1) / sizeof(void*));
+ (VMClsHnd.GetSize() + TARGET_POINTER_SIZE - 1) / TARGET_POINTER_SIZE);
// Note: This case is more complicated than the TypedReference case
// due to ByRefLike structs being included as fields in other value
// types (TypedReference can not be.)
// assume no GC pointers at first
result = 0;
memset(gcPtrs, TYPE_GC_NONE,
- (VMClsHnd.GetSize() + sizeof(void*) -1)/ sizeof(void*));
+ (VMClsHnd.GetSize() + TARGET_POINTER_SIZE - 1) / TARGET_POINTER_SIZE);
// walk the GC descriptors, turning on the correct bits
if (pMT->ContainsPointers())
size_t cbSeriesSize = pByValueSeries->GetSeriesSize() + pMT->GetBaseSize();
size_t cbOffset = pByValueSeries->GetSeriesOffset() - sizeof(Object);
- _ASSERTE (cbOffset % sizeof(void*) == 0);
- _ASSERTE (cbSeriesSize % sizeof(void*) == 0);
+ _ASSERTE (cbOffset % TARGET_POINTER_SIZE == 0);
+ _ASSERTE (cbSeriesSize % TARGET_POINTER_SIZE == 0);
- result += (unsigned) (cbSeriesSize / sizeof(void*));
- memset(&gcPtrs[cbOffset/sizeof(void*)], TYPE_GC_REF, cbSeriesSize / sizeof(void*));
+ result += (unsigned) (cbSeriesSize / TARGET_POINTER_SIZE);
+ memset(&gcPtrs[cbOffset / TARGET_POINTER_SIZE], TYPE_GC_REF, cbSeriesSize / TARGET_POINTER_SIZE);
pByValueSeries++;
}
{
// offset to embedded references in this series must be
// adjusted by the VTable pointer, when in the unboxed state.
- size_t offset = cur->GetSeriesOffset() - sizeof(void*);
+ size_t offset = cur->GetSeriesOffset() - TARGET_POINTER_SIZE;
size_t offsetStop = offset + cur->GetSeriesSize() + size;
while (offset < offsetStop)
{
- size_t bit = offset / sizeof(void *);
+ size_t bit = offset / TARGET_POINTER_SIZE;
size_t index = bit / 8;
_ASSERTE(index < cbGCRefMap);
pGCRefMap[index] |= (1 << (bit & 7));
- offset += sizeof(void *);
+ offset += TARGET_POINTER_SIZE;
}
cur--;
} while (cur >= last);
if (dwFlags & READYTORUN_LAYOUT_Alignment)
{
- DWORD dwExpectedAlignment = sizeof(void *);
+ DWORD dwExpectedAlignment = TARGET_POINTER_SIZE;
if (!(dwFlags & READYTORUN_LAYOUT_Alignment_Native))
{
IfFailThrow(p.GetData(&dwExpectedAlignment));
}
else
{
- size_t cbGCRefMap = (dwActualSize / sizeof(TADDR) + 7) / 8;
+ size_t cbGCRefMap = (dwActualSize / TARGET_POINTER_SIZE + 7) / 8;
_ASSERTE(cbGCRefMap > 0);
BYTE * pGCRefMap = (BYTE *)_alloca(cbGCRefMap);
// Check for the RemotingProxy Attribute
// structs with GC pointers MUST be pointer sized aligned because the GC assumes it
- if (IsValueClass() && pMT->ContainsPointers() && (bmtFP->NumInstanceFieldBytes % sizeof(void*) != 0))
+ if (IsValueClass() && pMT->ContainsPointers() && (bmtFP->NumInstanceFieldBytes % TARGET_POINTER_SIZE != 0))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
// value types can never get large enough to allocate on the LOH).
if (!IsValueClass())
{
- dwOffsetBias = sizeof(MethodTable*);
+ dwOffsetBias = TARGET_POINTER_SIZE;
dwCumulativeInstanceFieldPos += dwOffsetBias;
}
#endif // FEATURE_64BIT_ALIGNMENT
#if !defined(_WIN64) && (DATA_ALIGNMENT > 4)
dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos,
- (pByValueMT->GetNumInstanceFieldBytes() >= DATA_ALIGNMENT) ? DATA_ALIGNMENT : sizeof(void*));
+ (pByValueMT->GetNumInstanceFieldBytes() >= DATA_ALIGNMENT) ? DATA_ALIGNMENT : TARGET_POINTER_SIZE);
#else // !(!defined(_WIN64) && (DATA_ALIGNMENT > 4))
#ifdef FEATURE_64BIT_ALIGNMENT
if (pByValueMT->RequiresAlign8())
dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, 8);
else
#endif // FEATURE_64BIT_ALIGNMENT
- dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, sizeof(void*));
+ dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, TARGET_POINTER_SIZE);
#endif // !(!defined(_WIN64) && (DATA_ALIGNMENT > 4))
pFieldDescList[i].SetOffset(dwCumulativeInstanceFieldPos - dwOffsetBias);
}
else
#endif // FEATURE_64BIT_ALIGNMENT
- if (dwNumInstanceFieldBytes > sizeof(void*)) {
- minAlign = sizeof(void*);
+ if (dwNumInstanceFieldBytes > TARGET_POINTER_SIZE) {
+ minAlign = TARGET_POINTER_SIZE;
}
else {
minAlign = 1;
// 3. If an OREF does overlap with another OREF, the class is marked unverifiable.
// 4. If an overlap of any kind occurs, the class will be marked NotTightlyPacked (affects ValueType.Equals()).
//
- char emptyObject[sizeof(void*)];
- char isObject[sizeof(void*)];
- for (i = 0; i < sizeof(void*); i++)
+ char emptyObject[TARGET_POINTER_SIZE];
+ char isObject[TARGET_POINTER_SIZE];
+ for (i = 0; i < TARGET_POINTER_SIZE; i++)
{
emptyObject[i] = empty;
isObject[i] = oref;
MethodTable *pByValueMT = pByValueClassCache[valueClassCacheIndex];
if (pByValueMT->ContainsPointers())
{
- if ((pFD->GetOffset_NoLogging() & ((ULONG)sizeof(void*) - 1)) == 0)
+ if ((pFD->GetOffset_NoLogging() & ((ULONG)TARGET_POINTER_SIZE - 1)) == 0)
{
ExplicitFieldTrust::TrustLevel trust;
DWORD firstObjectOverlapOffsetInsideValueClass = ((DWORD)(-1));
S_UINT32 dwInstanceSliceOffset = S_UINT32(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0);
if (bmtGCSeries->numSeries != 0)
{
- dwInstanceSliceOffset.AlignUp(sizeof(void*));
+ dwInstanceSliceOffset.AlignUp(TARGET_POINTER_SIZE);
}
if (dwInstanceSliceOffset.IsOverflow())
{
}
}
- // The GC requires that all valuetypes containing orefs be sized to a multiple of sizeof(void*).
+ // The GC requires that all valuetypes containing orefs be sized to a multiple of TARGET_POINTER_SIZE.
if (bmtGCSeries->numSeries != 0)
{
- numInstanceFieldBytes.AlignUp(sizeof(void*));
+ numInstanceFieldBytes.AlignUp(TARGET_POINTER_SIZE);
}
if (numInstanceFieldBytes.IsOverflow())
{
bmtGCSeries->pSeries[bmtGCSeries->numSeries].offset = (DWORD)(loc - pFieldLayout);
bmtGCSeries->pSeries[bmtGCSeries->numSeries].len = (DWORD)(cur - loc);
- CONSISTENCY_CHECK(IS_ALIGNED(cur - loc, sizeof(size_t)));
+ CONSISTENCY_CHECK(IS_ALIGNED(cur - loc, TARGET_POINTER_SIZE));
bmtGCSeries->numSeries++;
loc = cur;
}
- UINT32 dwInstanceSliceOffset = AlignUp(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0, sizeof(void*));
+ UINT32 dwInstanceSliceOffset = AlignUp(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0, TARGET_POINTER_SIZE);
// Build the pointer series map for this pointers in this instance
CGCDescSeries *pSeries = ((CGCDesc*)pMT)->GetLowestSeries();
BYTE *pData = (BYTE *)pamTracker->Track(pAllocator->GetHighFrequencyHeap()->AllocMem(cbTotalSize));
- _ASSERTE(IS_ALIGNED(pData, sizeof(size_t)));
+ _ASSERTE(IS_ALIGNED(pData, TARGET_POINTER_SIZE));
// There should be no overflows if we have allocated the memory succesfully
_ASSERTE(!cbTotalSize.IsOverflow());
{
// offset to embedded references in this series must be
// adjusted by the VTable pointer, when in the unboxed state.
- size_t offset = cur->GetSeriesOffset() - sizeof(void*);
+ size_t offset = cur->GetSeriesOffset() - TARGET_POINTER_SIZE;
PTR_OBJECTREF srcPtr = dac_cast<PTR_OBJECTREF>(PTR_BYTE(pSrc) + offset);
PTR_OBJECTREF srcPtrStop = dac_cast<PTR_OBJECTREF>(PTR_BYTE(srcPtr) + cur->GetSeriesSize() + size);
while (srcPtr < srcPtrStop)