void GenTree::ParseArrayAddress(
Compiler* comp, ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
{
- *pArr = nullptr;
- ValueNum inxVN = ValueNumStore::NoVN;
- ssize_t offset = 0;
- FieldSeqNode* fldSeq = nullptr;
+ *pArr = nullptr;
+ ValueNum inxVN = ValueNumStore::NoVN;
+ target_ssize_t offset = 0;
+ FieldSeqNode* fldSeq = nullptr;
ParseArrayAddressWork(comp, 1, pArr, &inxVN, &offset, &fldSeq);
}
// Is there some portion of the "offset" beyond the first-elem offset and the struct field suffix we just computed?
- if (!FitsIn<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset) || !FitsIn<ssize_t>(arrayInfo->m_elemSize))
+ if (!FitsIn<target_ssize_t>(fieldOffsets + arrayInfo->m_elemOffset) ||
+ !FitsIn<target_ssize_t>(arrayInfo->m_elemSize))
{
// This seems unlikely, but no harm in being safe...
*pInxVN = comp->GetValueNumStore()->VNForExpr(nullptr, TYP_INT);
return;
}
// Otherwise...
- ssize_t offsetAccountedFor = static_cast<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
- ssize_t elemSize = static_cast<ssize_t>(arrayInfo->m_elemSize);
+ target_ssize_t offsetAccountedFor = static_cast<target_ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
+ target_ssize_t elemSize = static_cast<target_ssize_t>(arrayInfo->m_elemSize);
- ssize_t constIndOffset = offset - offsetAccountedFor;
+ target_ssize_t constIndOffset = offset - offsetAccountedFor;
// This should be divisible by the element size...
assert((constIndOffset % elemSize) == 0);
- ssize_t constInd = constIndOffset / elemSize;
+ target_ssize_t constInd = constIndOffset / elemSize;
ValueNumStore* vnStore = comp->GetValueNumStore();
// which has been scaled by element size. We need to recover the array index from that offset
if (vnStore->IsVNConstant(inxVN))
{
- ssize_t index = vnStore->CoercedConstantValue<ssize_t>(inxVN);
+ target_ssize_t index = vnStore->CoercedConstantValue<target_ssize_t>(inxVN);
noway_assert(elemSize > 0 && ((index % elemSize) == 0));
*pInxVN = vnStore->VNForPtrSizeIntCon((index / elemSize) + constInd);
}
}
}
-void GenTree::ParseArrayAddressWork(
- Compiler* comp, ssize_t inputMul, GenTree** pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq)
+void GenTree::ParseArrayAddressWork(Compiler* comp,
+ target_ssize_t inputMul,
+ GenTree** pArr,
+ ValueNum* pInxVN,
+ target_ssize_t* pOffset,
+ FieldSeqNode** pFldSeq)
{
if (TypeGet() == TYP_REF)
{
{
case GT_CNS_INT:
*pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, gtIntCon.gtFieldSeq);
- *pOffset += (inputMul * gtIntCon.gtIconVal);
+ assert(!gtIntCon.ImmedValNeedsReloc(comp));
+ // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t
+ // type.
+ *pOffset += (inputMul * (target_ssize_t)(gtIntCon.gtIconVal));
return;
case GT_ADD:
case GT_MUL:
{
// If one op is a constant, continue parsing down.
- ssize_t subMul = 0;
- GenTree* nonConst = nullptr;
+ target_ssize_t subMul = 0;
+ GenTree* nonConst = nullptr;
if (gtOp.gtOp1->IsCnsIntOrI())
{
// If the other arg is an int constant, and is a "not-a-field", choose
if (gtOp.gtOp2->OperGet() == GT_CNS_INT &&
gtOp.gtOp2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField())
{
- subMul = gtOp.gtOp2->gtIntConCommon.IconValue();
+ assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
+ // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
+ // target_ssize_t type.
+ subMul = (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
nonConst = gtOp.gtOp1;
}
else
{
- subMul = gtOp.gtOp1->gtIntConCommon.IconValue();
+ assert(!gtOp.gtOp1->gtIntCon.ImmedValNeedsReloc(comp));
+ // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
+ // target_ssize_t type.
+ subMul = (target_ssize_t)gtOp.gtOp1->gtIntConCommon.IconValue();
nonConst = gtOp.gtOp2;
}
}
else if (gtOp.gtOp2->IsCnsIntOrI())
{
- subMul = gtOp.gtOp2->gtIntConCommon.IconValue();
+ assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
+ // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
+ // target_ssize_t type.
+ subMul = (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
nonConst = gtOp.gtOp1;
}
if (nonConst != nullptr)
// If one op is a constant, continue parsing down.
if (gtOp.gtOp2->IsCnsIntOrI())
{
- ssize_t subMul = ssize_t{1} << gtOp.gtOp2->gtIntConCommon.IconValue();
+ assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
+ // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t
+ // type.
+ target_ssize_t subMul = target_ssize_t{1} << (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
return;
}