}
// If the op1 is already in the dstReg - nothing to do.
- // Otherwise load the op1 (GT_ADDR) into the dstReg to copy the struct on the stack by value.
+ // Otherwise load the op1 (the address) into the dstReg to copy the struct on the stack by value.
CLANG_FORMAT_COMMENT_ANCHOR;
#ifdef TARGET_X86
(structHandle != m_simdHandleCache->SIMDVector4Handle));
}
- // Returns true if the tree corresponds to a TYP_SIMD lcl var.
- // Note that both SIMD vector args and locals are mared as lvSIMDType = true, but
- // type of an arg node is TYP_BYREF and a local node is TYP_SIMD or TYP_STRUCT.
- bool isSIMDTypeLocal(GenTree* tree)
- {
- return tree->OperIsLocal() && lvaGetDesc(tree->AsLclVarCommon())->lvSIMDType;
- }
-
// Returns true if the lclVar is an opaque SIMD type.
bool isOpaqueSIMDLclVar(const LclVarDsc* varDsc) const
{
case GT_BITCAST:
case GT_CKFINITE:
case GT_LCLHEAP:
- case GT_ADDR:
case GT_IND:
case GT_OBJ:
case GT_BLK:
if (oper == GT_ADDR)
{
- if (op1->OperIsIndir())
+ switch (op1->OperGet())
{
- assert(op1->IsValue());
- return op1->AsIndir()->Addr();
- }
+ case GT_LCL_VAR:
+ return gtNewLclVarAddrNode(op1->AsLclVar()->GetLclNum(), type);
+
+ case GT_LCL_FLD:
+ return gtNewLclFldAddrNode(op1->AsLclFld()->GetLclNum(), op1->AsLclFld()->GetLclOffs(), type);
+
+ case GT_BLK:
+ case GT_OBJ:
+ case GT_IND:
+ return op1->AsIndir()->Addr();
+
+ case GT_FIELD:
+ {
+ GenTreeField* fieldAddr =
+ new (this, GT_FIELD_ADDR) GenTreeField(GT_FIELD_ADDR, type, op1->AsField()->GetFldObj(),
+ op1->AsField()->gtFldHnd, op1->AsField()->gtFldOffset);
+ fieldAddr->gtFldMayOverlap = op1->AsField()->gtFldMayOverlap;
+#ifdef FEATURE_READYTORUN
+ fieldAddr->gtFieldLookup = op1->AsField()->gtFieldLookup;
+#endif
+ return fieldAddr;
+ }
- assert(op1->OperIsLocalRead() || op1->OperIs(GT_FIELD));
- op1->SetDoNotCSE();
+ default:
+ unreached();
+ }
}
GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, nullptr);
case GT_BITCAST:
case GT_CKFINITE:
case GT_LCLHEAP:
- case GT_ADDR:
case GT_IND:
case GT_OBJ:
case GT_BLK:
break;
case GT_ASG:
- case GT_ADDR:
// Note that this is a weak check - the "op1" location node can be a COMMA.
assert(!op1->CanCSE());
break;
return GenTree::VisitResult::Continue;
});
- // Addresses of locals never need GTF_GLOB_REF
- if (tree->OperIs(GT_ADDR) && tree->IsLocalAddrExpr())
- {
- expectedFlags &= ~GTF_GLOB_REF;
- }
-
fgDebugCheckFlagsHelper(tree, actualFlags, expectedFlags);
}
//
// Return Value:
// If "tree" is a indirection (GT_IND, GT_BLK, or GT_OBJ) whose arg is:
-// - an ADDR, whose arg in turn is a LCL_VAR, return that LCL_VAR node;
// - a LCL_VAR_ADDR, return that LCL_VAR_ADDR;
// - else nullptr.
//
GenTreeLclVar* Compiler::fgIsIndirOfAddrOfLocal(GenTree* tree)
{
GenTreeLclVar* res = nullptr;
- if (tree->OperIsIndir())
+ if (tree->OperIsIndir() && tree->AsIndir()->Addr()->OperIs(GT_LCL_VAR_ADDR))
{
- GenTree* addr = tree->AsIndir()->Addr();
-
- if (addr->OperGet() == GT_ADDR)
- {
- GenTree* lclvar = addr->AsOp()->gtOp1;
- if (lclvar->OperGet() == GT_LCL_VAR)
- {
- res = lclvar->AsLclVar();
- }
- }
- else if (addr->OperGet() == GT_LCL_VAR_ADDR)
- {
- res = addr->AsLclVar();
- }
+ res = tree->AsIndir()->Addr()->AsLclVar();
}
+
return res;
}
return !addr->IsIconHandle();
case GT_CNS_STR:
- case GT_ADDR:
case GT_FIELD_ADDR:
case GT_LCL_VAR_ADDR:
case GT_LCL_FLD_ADDR:
case GT_CLS_VAR_ADDR:
- // A GT_ADDR node, by itself, never requires null checking. The expression whose address is being
- // taken is either a local or static variable, whose address is necessarily non-null, or else it is
- // a field dereference, which will do its own bounds checking if necessary.
return false;
case GT_IND:
{
// Insert the expression "enter/exitCrit(this, &acquired)" or "enter/exitCrit(handle, &acquired)"
- GenTree* varNode = gtNewLclvNode(lvaMonAcquired, lvaGetDesc(lvaMonAcquired)->TypeGet());
- GenTree* varAddrNode = gtNewOperNode(GT_ADDR, TYP_BYREF, varNode);
+ GenTree* varAddrNode = gtNewLclVarAddrNode(lvaMonAcquired);
GenTree* tree;
if (info.compIsStatic)
// Add enter pinvoke exit callout at the start of prolog
- GenTree* pInvokeFrameVar = gtNewOperNode(GT_ADDR, TYP_I_IMPL, gtNewLclvNode(lvaReversePInvokeFrameVar, TYP_BLK));
+ GenTree* pInvokeFrameVar = gtNewLclVarAddrNode(lvaReversePInvokeFrameVar);
GenTree* tree;
// Add reverse pinvoke exit callout at the end of epilog
- tree = gtNewOperNode(GT_ADDR, TYP_I_IMPL, gtNewLclvNode(lvaReversePInvokeFrameVar, TYP_BLK));
+ tree = gtNewLclVarAddrNode(lvaReversePInvokeFrameVar);
CorInfoHelpFunc reversePInvokeExitHelper = opts.jitFlags->IsSet(JitFlags::JIT_FLAG_TRACK_TRANSITIONS)
? CORINFO_HELP_JIT_REVERSE_PINVOKE_EXIT_TRACK_TRANSITIONS
// Screen out contextual "uses"
//
GenTree* const parent = user;
- bool const isAddr = parent->OperIs(GT_ADDR);
-
- bool isCallTarget = false;
// Quirk:
//
// fgGetStubAddrArg cannot handle complex trees (it calls gtClone)
//
+ bool isCallTarget = false;
if (parent->IsCall())
{
GenTreeCall* const parentCall = parent->AsCall();
isCallTarget = (parentCall->gtCallType == CT_INDIRECT) && (parentCall->gtCallAddr == node);
}
- if (!isDef && !isAddr && !isCallTarget)
+ if (!isDef && !isCallTarget)
{
m_node = node;
m_use = use;
// gtHasRef: Find out whether the given tree contains a local.
//
// Arguments:
-// tree - tree to find the local in
-// lclNum - the local's number
+// tree - tree to find the local in
+// lclNum - the local's number
//
// Return Value:
-// Whether "tree" has any LCL_VAR/LCL_FLD nodes that refer to the local.
-//
-// Notes:
-// Does not pay attention to local address nodes.
+// Whether "tree" has any local nodes that refer to the local.
//
/* static */ bool Compiler::gtHasRef(GenTree* tree, unsigned lclNum)
{
if (tree->OperIsLeaf())
{
- if (tree->OperIs(GT_LCL_VAR, GT_LCL_FLD) && (tree->AsLclVarCommon()->GetLclNum() == lclNum))
+ if ((tree->OperIsLocal() || tree->OperIsLocalAddr()) && (tree->AsLclVarCommon()->GetLclNum() == lclNum))
{
return true;
}
level++;
break;
- case GT_ADDR:
- if (op1->OperIsLocalRead())
- {
- costEx = 3;
- costSz = 3;
- goto DONE;
- }
-
- costEx = 0;
- costSz = 1;
- break;
-
case GT_ARR_LENGTH:
case GT_MDARR_LENGTH:
case GT_MDARR_LOWER_BOUND:
bool bReverseInAssignment = false;
if (oper == GT_ASG && (!optValnumCSE_phase || optCSE_canSwap(op1, op2)))
{
- GenTree* op1Val = op1;
-
- // Skip over the GT_IND/GT_ADDR tree (if one exists)
- //
- if ((op1->gtOper == GT_IND) && (op1->AsOp()->gtOp1->gtOper == GT_ADDR))
- {
- op1Val = op1->AsOp()->gtOp1->AsOp()->gtOp1;
- }
-
- switch (op1Val->gtOper)
+ switch (op1->OperGet())
{
case GT_IND:
case GT_BLK:
// itself. As such, we can discard any side effects "induced" by it in
// this logic.
//
- // Note that for local "addr"s, liveness depends on seeing the defs and
- // uses in correct order, and so we MUST reverse the ASG in that case.
- //
GenTree* op1Addr = op1->AsIndir()->Addr();
- if (op1Addr->IsLocalAddrExpr() || op1Addr->IsInvariant())
+ if (op1Addr->IsInvariant())
{
bReverseInAssignment = true;
tree->gtFlags |= GTF_REVERSE_OPS;
break;
}
- // In case op2 assigns to a local var that is used in op1Val, we have to evaluate op1Val first.
+ // In case op2 assigns to a local var that is used in op1, we have to evaluate op1 first.
if (op2->gtFlags & GTF_ASG)
{
break;
}
// If op2 is simple then evaluate op1 first
-
if (op2->OperKind() & GTK_LEAF)
{
break;
case GT_LCL_VAR:
case GT_LCL_FLD:
-
- // We evaluate op2 before op1
+ // Note that for local stores, liveness depends on seeing the defs and
+ // uses in correct order, and so we MUST reverse the ASG in that case.
bReverseInAssignment = true;
tree->gtFlags |= GTF_REVERSE_OPS;
break;
case GT_BITCAST:
case GT_CKFINITE:
case GT_LCLHEAP:
- case GT_ADDR:
case GT_IND:
case GT_OBJ:
case GT_BLK:
GenTreeField* fieldNode = new (this, GT_FIELD) GenTreeField(GT_FIELD, type, obj, fldHnd, offset);
// If "obj" is the address of a local, note that a field of that struct local has been accessed.
- if ((obj != nullptr) && obj->OperIs(GT_ADDR) && varTypeIsStruct(obj->AsUnOp()->gtOp1) &&
- obj->AsUnOp()->gtOp1->OperIs(GT_LCL_VAR))
+ if ((obj != nullptr) && obj->OperIs(GT_LCL_VAR_ADDR))
{
- LclVarDsc* varDsc = lvaGetDesc(obj->AsUnOp()->gtOp1->AsLclVarCommon());
+ LclVarDsc* varDsc = lvaGetDesc(obj->AsLclVarCommon());
varDsc->lvFieldAccessed = 1;
GenTreeField* fieldNode = new (this, GT_FIELD_ADDR) GenTreeField(GT_FIELD_ADDR, type, obj, fldHnd, offset);
// If "obj" is the address of a local, note that a field of that struct local has been accessed.
- if ((obj != nullptr) && obj->OperIs(GT_ADDR) && varTypeIsStruct(obj->AsUnOp()->gtOp1) &&
- obj->AsUnOp()->gtOp1->OperIs(GT_LCL_VAR))
+ if ((obj != nullptr) && obj->OperIs(GT_LCL_VAR_ADDR))
{
- LclVarDsc* varDsc = lvaGetDesc(obj->AsUnOp()->gtOp1->AsLclVarCommon());
+ LclVarDsc* varDsc = lvaGetDesc(obj->AsLclVarCommon());
varDsc->lvFieldAccessed = 1;
//
GenTree* Compiler::gtNewStructVal(ClassLayout* layout, GenTree* addr)
{
- if (addr->OperIs(GT_ADDR))
+ if (addr->OperIs(GT_LCL_VAR_ADDR))
{
- GenTree* location = addr->gtGetOp1();
- if (location->OperIs(GT_LCL_VAR))
+ unsigned lclNum = addr->AsLclVar()->GetLclNum();
+ LclVarDsc* varDsc = lvaGetDesc(lclNum);
+ if (!lvaIsImplicitByRefLocal(lclNum) && varTypeIsStruct(varDsc) &&
+ ClassLayout::AreCompatible(layout, varDsc->GetLayout()))
{
- unsigned lclNum = location->AsLclVar()->GetLclNum();
- LclVarDsc* varDsc = lvaGetDesc(lclNum);
- if (!lvaIsImplicitByRefLocal(lclNum) && varTypeIsStruct(varDsc) &&
- ClassLayout::AreCompatible(layout, varDsc->GetLayout()))
- {
- return location;
- }
+ return gtNewLclvNode(lclNum, varDsc->TypeGet());
}
}
GenTree* currSrc = srcOrFillVal;
GenTree* currDst = dst;
- if (currSrc->OperIsBlk() && (currSrc->AsBlk()->Addr()->OperGet() == GT_ADDR))
- {
- currSrc = currSrc->AsBlk()->Addr()->gtGetOp1();
- }
- if (currDst->OperIsBlk() && (currDst->AsBlk()->Addr()->OperGet() == GT_ADDR))
- {
- currDst = currDst->AsBlk()->Addr()->gtGetOp1();
- }
-
- if (currSrc->OperGet() == GT_LCL_VAR && currDst->OperGet() == GT_LCL_VAR &&
+ if (currSrc->OperIs(GT_LCL_VAR) && currDst->OperIs(GT_LCL_VAR) &&
currSrc->AsLclVarCommon()->GetLclNum() == currDst->AsLclVarCommon()->GetLclNum())
{
result->gtBashToNOP(); // Make this a NOP.
{
// TODO-Cleanup: similar logic already exists in "gtNewAssignNode",
// however, it is not enabled for x86. Fix that and delete this code.
- if (dst->OperIsBlk() && (dst->AsIndir()->Addr()->OperGet() == GT_ADDR))
+ GenTreeLclVar* dstLclNode = nullptr;
+ if (dst->OperIs(GT_LCL_VAR))
{
- dst = dst->AsIndir()->Addr()->gtGetOp1();
+ dstLclNode = dst->AsLclVar();
+ }
+ else if (dst->OperIsBlk() && dst->AsIndir()->Addr()->OperIs(GT_LCL_VAR_ADDR))
+ {
+ dstLclNode = dst->AsIndir()->Addr()->AsLclVar();
}
- if (dst->OperIsLocal() && varTypeIsStruct(dst))
+ if ((dstLclNode != nullptr) && varTypeIsStruct(lvaGetDesc(dstLclNode)))
{
- setLclRelatedToSIMDIntrinsic(dst);
+ setLclRelatedToSIMDIntrinsic(dstLclNode);
}
}
#endif // FEATURE_SIMD
break;
}
+ case GT_LCL_VAR_ADDR:
+ if (!complexOK)
+ {
+ return nullptr;
+ }
+ FALLTHROUGH;
+
case GT_LCL_VAR:
- copy = gtNewLclvNode(tree->AsLclVarCommon()->GetLclNum(),
- tree->TypeGet() DEBUGARG(tree->AsLclVar()->gtLclILoffs));
+ copy = new (this, tree->OperGet())
+ GenTreeLclVar(tree->OperGet(), tree->TypeGet(), tree->AsLclVar()->GetLclNum());
goto FINISH_CLONING_LCL_NODE;
case GT_LCL_FLD:
return nullptr;
}
}
- else if (tree->gtOper == GT_ADDR)
- {
- GenTree* op1 = gtClone(tree->AsOp()->gtOp1);
- if (op1 == nullptr)
- {
- return nullptr;
- }
- copy = gtNewOperNode(GT_ADDR, tree->TypeGet(), op1);
- }
else
{
return nullptr;
case GT_BITCAST:
case GT_CKFINITE:
case GT_LCLHEAP:
- case GT_ADDR:
case GT_IND:
case GT_OBJ:
case GT_BLK:
lvaTable[boxTempLcl].lvType = TYP_UNDEF;
const bool isUnsafeValueClass = false;
lvaSetStruct(boxTempLcl, boxClass, isUnsafeValueClass);
- var_types boxTempType = lvaTable[boxTempLcl].lvType;
// Remove the newobj and assignment to box temp
JITDUMP("Bashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
asg->gtBashToNOP();
// Update the copy from the value to be boxed to the box temp
- GenTree* newDst = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, boxTempType));
- copyDst->AsOp()->gtOp1 = newDst;
+ copyDst->AsOp()->gtOp1 = gtNewLclVarAddrNode(boxTempLcl, TYP_BYREF);
// Return the address of the now-struct typed box temp
- GenTree* retValue = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, boxTempType));
+ GenTree* retValue = gtNewLclVarAddrNode(boxTempLcl, TYP_BYREF);
return retValue;
}
return Compiler::WALK_SKIP_SUBTREES;
}
- if ((m_flags & GTF_EXCEPT) != 0)
- {
- // Special case - GT_ADDR of GT_IND nodes of TYP_STRUCT have to be kept together.
- if (node->OperIs(GT_ADDR) && node->gtGetOp1()->OperIsIndir() &&
- (node->gtGetOp1()->TypeGet() == TYP_STRUCT))
- {
- JITDUMP("Keep the GT_ADDR and GT_IND together:\n");
- PushSideEffects(node);
- return Compiler::WALK_SKIP_SUBTREES;
- }
- }
-
// Generally all GT_CALL nodes are considered to have side-effects.
// So if we get here it must be a helper call that we decided it does
// not have side effects that we needed to keep.
//
bool GenTree::DefinesLocalAddr(GenTreeLclVarCommon** pLclVarTree, ssize_t* pOffset)
{
- if (OperIs(GT_ADDR) || OperIsLocalAddr())
+ if (OperIsLocalAddr())
{
- GenTree* lclNode = this;
- if (OperGet() == GT_ADDR)
- {
- lclNode = AsOp()->gtOp1;
- }
+ *pLclVarTree = AsLclVarCommon();
- if (lclNode->IsLocal() || lclNode->OperIsLocalAddr())
+ if (pOffset != nullptr)
{
- *pLclVarTree = lclNode->AsLclVarCommon();
-
- if (pOffset != nullptr)
- {
- *pOffset += lclNode->AsLclVarCommon()->GetLclOffs();
- }
-
- return true;
+ *pOffset += AsLclVarCommon()->GetLclOffs();
}
+
+ return true;
}
- // Otherwise...
return false;
}
const GenTreeLclVarCommon* GenTree::IsLocalAddrExpr() const
{
- if (OperGet() == GT_ADDR)
- {
- return AsOp()->gtOp1->IsLocal() ? AsOp()->gtOp1->AsLclVarCommon() : nullptr;
- }
- else if (OperIsLocalAddr())
- {
- return this->AsLclVarCommon();
- }
-
- // Otherwise...
- return nullptr;
+ return OperIsLocalAddr() ? AsLclVarCommon() : nullptr;
}
//------------------------------------------------------------------------
{
GenTree* addr = AsIndir()->Addr();
- if (addr->OperIs(GT_LCL_VAR))
+ if (addr->OperIs(GT_LCL_VAR, GT_LCL_VAR_ADDR))
{
lcl = addr->AsLclVar();
}
- else if (addr->OperIs(GT_ADDR))
- {
- GenTree* base = addr->AsOp()->gtOp1;
-
- if (base->OperIs(GT_LCL_VAR))
- {
- lcl = base->AsLclVar();
- }
- }
}
if ((lcl != nullptr) && compiler->lvaIsImplicitByRefLocal(lcl->GetLclNum()))
return;
}
- if (op->OperIsLocal())
+ if (op->OperIs(GT_LCL_VAR))
{
setLclRelatedToSIMDIntrinsic(op);
}
- else if (op->OperIs(GT_OBJ))
+ else if (op->OperIs(GT_OBJ) && op->AsIndir()->Addr()->OperIs(GT_LCL_VAR_ADDR))
{
- GenTree* addr = op->AsIndir()->Addr();
-
- if (addr->OperIs(GT_ADDR))
- {
- GenTree* addrOp1 = addr->AsOp()->gtGetOp1();
-
- if (addrOp1->OperIsLocal())
- {
- setLclRelatedToSIMDIntrinsic(addrOp1);
- }
- }
+ setLclRelatedToSIMDIntrinsic(op->AsIndir()->Addr());
}
}
// OperIsIndir() returns true also for indirection nodes such as GT_BLK, etc. as well as GT_NULLCHECK.
static bool OperIsIndir(genTreeOps gtOper)
{
- return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK || OperIsBlk(gtOper);
+ static_assert_no_msg(AreContiguous(GT_IND, GT_STOREIND, GT_OBJ, GT_STORE_OBJ, GT_BLK, GT_STORE_BLK,
+ GT_STORE_DYN_BLK, GT_NULLCHECK));
+ return (GT_IND <= gtOper) && (gtOper <= GT_NULLCHECK);
}
static bool OperIsArrLength(genTreeOps gtOper)
}
return WALK_SKIP_SUBTREES;
- case GT_ADDR:
- newState.isUnderIndir = false;
- // We'll assume p in "**p = " can be vulnerable because by changing 'p', someone
- // could control where **p stores to.
- {
- comp->fgWalkTreePre(&tree->AsOp()->gtOp1, comp->gsMarkPtrsAndAssignGroups, (void*)&newState);
- }
- return WALK_SKIP_SUBTREES;
-
case GT_ASG:
{
GenTreeOp* asg = tree->AsOp();
}
else
{
- assert((newobjThis->gtOper == GT_ADDR) && (newobjThis->AsOp()->gtOp1->gtOper == GT_LCL_VAR));
+ assert(newobjThis->OperIs(GT_LCL_VAR_ADDR));
arg = newobjThis;
// push newobj result on type stack
- unsigned tmp = arg->AsOp()->gtOp1->AsLclVarCommon()->GetLclNum();
- impPushOnStack(gtNewLclvNode(tmp, lvaGetRealType(tmp)), verMakeTypeInfo(argClass).NormaliseForStack());
+ unsigned lclNum = arg->AsLclVarCommon()->GetLclNum();
+ impPushOnStack(gtNewLclvNode(lclNum, lvaGetRealType(lclNum)),
+ verMakeTypeInfo(argClass).NormaliseForStack());
}
}
else
{
assert(varTypeIsStruct(structVal) || eeIsValueClass(structHnd));
- var_types type = structVal->TypeGet();
-
+ var_types type = structVal->TypeGet();
genTreeOps oper = structVal->gtOper;
- if (oper == GT_OBJ && willDeref)
- {
- assert(structVal->AsObj()->GetLayout()->GetClassHandle() == structHnd);
- return (structVal->AsObj()->Addr());
- }
- else if (oper == GT_CALL || oper == GT_RET_EXPR || oper == GT_OBJ || oper == GT_MKREFANY ||
- structVal->OperIsSimdOrHWintrinsic() || structVal->IsCnsVec())
+ if (oper == GT_CALL || oper == GT_RET_EXPR || (oper == GT_OBJ && !willDeref) || oper == GT_MKREFANY ||
+ structVal->OperIsSimdOrHWintrinsic() || structVal->IsCnsVec())
{
unsigned tmpNum = lvaGrabTemp(true DEBUGARG("struct address for call/obj"));
impAssignTempGen(tmpNum, structVal, structHnd, curLevel);
- // The 'return value' is now the temp itself
-
- type = genActualType(lvaTable[tmpNum].TypeGet());
- GenTree* temp = gtNewLclvNode(tmpNum, type);
- temp = gtNewOperNode(GT_ADDR, TYP_BYREF, temp);
- return temp;
+ // The 'return value' is now address of the temp itself.
+ return gtNewLclVarAddrNode(tmpNum, TYP_BYREF);
}
- else if (oper == GT_COMMA)
+ if (oper == GT_COMMA)
{
assert(structVal->AsOp()->gtOp2->gtType == type); // Second thing is the struct
return (structVal);
}
- return (gtNewOperNode(GT_ADDR, TYP_BYREF, structVal));
+ return gtNewOperNode(GT_ADDR, TYP_BYREF, structVal);
}
//------------------------------------------------------------------------
if ((tree->gtFlags & spillFlags) != 0 ||
(spillGlobEffects && // Only consider the following when spillGlobEffects == true
- !impIsAddressInLocal(tree) && // No need to spill the GT_ADDR node on a local.
+ !impIsAddressInLocal(tree) && // No need to spill the LCL_ADDR nodes.
gtHasLocalsWithAddrOp(tree))) // Spill if we still see GT_LCL_VAR that contains lvHasLdAddrOp or
// lvAddrTaken flag.
{
// - Pointer to block of int32 dimensions: address of lvaNewObjArrayArgs temp.
//
- node = gtNewLclvNode(lvaNewObjArrayArgs, TYP_BLK);
- node = gtNewOperNode(GT_ADDR, TYP_I_IMPL, node);
+ node = gtNewLclVarAddrNode(lvaNewObjArrayArgs);
// Pop dimension arguments from the stack one at a time and store it
// into lvaNewObjArrayArgs temp.
for (int i = pCallInfo->sig.numArgs - 1; i >= 0; i--)
{
- GenTree* arg = impImplicitIorI4Cast(impPopStack().val, TYP_INT);
-
- GenTree* dest = gtNewLclvNode(lvaNewObjArrayArgs, TYP_BLK);
- dest = gtNewOperNode(GT_ADDR, TYP_I_IMPL, dest);
- dest = gtNewOperNode(GT_ADD, TYP_I_IMPL, dest,
- new (this, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, sizeof(INT32) * i));
- dest = gtNewOperNode(GT_IND, TYP_INT, dest);
-
- node = gtNewOperNode(GT_COMMA, node->TypeGet(), gtNewAssignNode(dest, arg), node);
+ GenTree* arg = impImplicitIorI4Cast(impPopStack().val, TYP_INT);
+ GenTree* dest = gtNewLclFldNode(lvaNewObjArrayArgs, TYP_INT, sizeof(INT32) * i);
+ node = gtNewOperNode(GT_COMMA, node->TypeGet(), gtNewAssignNode(dest, arg), node);
}
node =
lclNum = impInlineFetchLocal(lclNum DEBUGARG("Inline ldloca(s) first use temp"));
assert(!lvaGetDesc(lclNum)->lvNormalizeOnLoad());
- op1 = gtNewLclvNode(lclNum, lvaGetActualType(lclNum));
-
+ op1 = gtNewLclVarAddrNode(lclNum, TYP_BYREF);
goto _PUSH_ADRVAR;
}
return;
}
- assert(op1->gtOper == GT_LCL_VAR);
-
+ op1->ChangeType(TYP_BYREF);
+ op1->SetOper(GT_LCL_VAR_ADDR);
goto _PUSH_ADRVAR;
}
goto ADRVAR;
ADRVAR:
-
- op1 = impCreateLocalNode(lclNum DEBUGARG(opcodeOffs + sz + 1));
+ // Note that this is supposed to create the transient type "*"
+ // which may be used as a TYP_I_IMPL. However we catch places
+ // where it is used as a TYP_I_IMPL and change the node if needed.
+ // Thus we are pessimistic and may report byrefs in the GC info
+ // where it was not absolutely needed, but doing otherwise would
+ // require careful rethinking of the importer routines which use
+ // the IL validity model (e. g. "impGetByRefResultType").
+ op1 = gtNewLclVarAddrNode(lclNum, TYP_BYREF);
_PUSH_ADRVAR:
- assert(op1->gtOper == GT_LCL_VAR);
-
- /* Note that this is supposed to create the transient type "*"
- which may be used as a TYP_I_IMPL. However we catch places
- where it is used as a TYP_I_IMPL and change the node if needed.
- Thus we are pessimistic and may report byrefs in the GC info
- where it was not absolutely needed, but it is safer this way.
- */
- op1 = gtNewOperNode(GT_ADDR, TYP_BYREF, op1);
-
- // &aliasedVar doesnt need GTF_GLOB_REF, though alisasedVar does
- assert((op1->gtFlags & GTF_GLOB_REF) == 0);
+ assert(op1->OperIs(GT_LCL_VAR_ADDR));
tiRetVal = typeInfo(TI_BYTE).MakeByRef();
impPushOnStack(op1, tiRetVal);
assertImp((info.compMethodInfo->args.callConv & CORINFO_CALLCONV_MASK) == CORINFO_CALLCONV_VARARG);
- /* The ARGLIST cookie is a hidden 'last' parameter, we have already
- adjusted the arg count cos this is like fetching the last param */
- assertImp(0 < numArgs);
- lclNum = lvaVarargsHandleArg;
- op1 = gtNewLclvNode(lclNum, TYP_I_IMPL DEBUGARG(opcodeOffs + sz + 1));
- op1 = gtNewOperNode(GT_ADDR, TYP_BYREF, op1);
+ // The ARGLIST cookie is a hidden 'last' parameter, we have already
+ // adjusted the arg count cos this is like fetching the last param.
+ assertImp(numArgs > 0);
+ op1 = gtNewLclVarAddrNode(lvaVarargsHandleArg, TYP_BYREF);
impPushOnStack(op1, tiRetVal);
break;
lclDsc->lvHasLdAddrOp = true;
// Obtain the address of the temp
- newObjThisPtr =
- gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(lclNum, lvaTable[lclNum].TypeGet()));
+ newObjThisPtr = gtNewLclVarAddrNode(lclNum, TYP_BYREF);
}
else
{
op = op->AsField()->GetFldObj();
}
- if (op->OperIs(GT_ADDR) && op->AsUnOp()->gtOp1->OperIs(GT_LCL_VAR))
+ if (op->OperIs(GT_LCL_VAR_ADDR))
{
if (lclVarTreeOut != nullptr)
{
- *lclVarTreeOut = op->AsUnOp()->gtOp1;
+ *lclVarTreeOut = const_cast<GenTree*>(op);
}
return true;
{
LclVarDsc* varDsc = lvaGetDesc(lclVarTree->AsLclVarCommon());
- if (varTypeIsStruct(lclVarTree))
+ if (varTypeIsStruct(varDsc))
{
inlCurArgInfo->argIsByRefToStructLocal = true;
#ifdef FEATURE_SIMD
if (clsFlags & CORINFO_FLG_VALUECLASS)
{
- assert(newobjThis->gtOper == GT_ADDR && newobjThis->AsOp()->gtOp1->gtOper == GT_LCL_VAR);
+ assert(newobjThis->OperIs(GT_LCL_VAR_ADDR));
- unsigned tmp = newobjThis->AsOp()->gtOp1->AsLclVarCommon()->GetLclNum();
- impPushOnStack(gtNewLclvNode(tmp, lvaGetRealType(tmp)), verMakeTypeInfo(clsHnd).NormaliseForStack());
+ unsigned lclNum = newobjThis->AsLclVarCommon()->GetLclNum();
+ impPushOnStack(gtNewLclvNode(lclNum, lvaGetRealType(lclNum)),
+ verMakeTypeInfo(clsHnd).NormaliseForStack());
}
else
{
{
static bool IsArgsFieldInit(GenTree* tree, unsigned index, unsigned lvaNewObjArrayArgs)
{
- return (tree->OperGet() == GT_ASG) && IsArgsFieldIndir(tree->gtGetOp1(), index, lvaNewObjArrayArgs) &&
- IsArgsAddr(tree->gtGetOp1()->gtGetOp1()->gtGetOp1(), lvaNewObjArrayArgs);
+ return tree->OperIs(GT_ASG) && IsArgsField(tree->gtGetOp1(), index, lvaNewObjArrayArgs);
}
- static bool IsArgsFieldIndir(GenTree* tree, unsigned index, unsigned lvaNewObjArrayArgs)
+ static bool IsArgsField(GenTree* tree, unsigned index, unsigned lvaNewObjArrayArgs)
{
- return (tree->OperGet() == GT_IND) && (tree->gtGetOp1()->OperGet() == GT_ADD) &&
- (tree->gtGetOp1()->gtGetOp2()->IsIntegralConst(sizeof(INT32) * index)) &&
- IsArgsAddr(tree->gtGetOp1()->gtGetOp1(), lvaNewObjArrayArgs);
- }
-
- static bool IsArgsAddr(GenTree* tree, unsigned lvaNewObjArrayArgs)
- {
- return (tree->OperGet() == GT_ADDR) && (tree->gtGetOp1()->OperGet() == GT_LCL_VAR) &&
- (tree->gtGetOp1()->AsLclVar()->GetLclNum() == lvaNewObjArrayArgs);
+ return tree->OperIs(GT_LCL_FLD) && (tree->AsLclFld()->GetLclNum() == lvaNewObjArrayArgs) &&
+ (tree->AsLclFld()->GetLclOffs() == sizeof(INT32) * index);
}
static bool IsComma(GenTree* tree)
argIndex++;
}
- assert((comma != nullptr) && Match::IsArgsAddr(comma, lvaNewObjArrayArgs));
+ assert((comma != nullptr) && comma->OperIs(GT_LCL_VAR_ADDR) &&
+ (comma->AsLclVarCommon()->GetLclNum() == lvaNewObjArrayArgs));
if (argIndex != numArgs)
{
unsigned rawHandleSlot = lvaGrabTemp(true DEBUGARG("rawHandle"));
impAssignTempGen(rawHandleSlot, rawHandle, clsHnd, CHECK_SPILL_NONE);
- GenTree* lclVar = gtNewLclvNode(rawHandleSlot, TYP_I_IMPL);
- GenTree* lclVarAddr = gtNewOperNode(GT_ADDR, TYP_I_IMPL, lclVar);
+ GenTree* lclVarAddr = gtNewLclVarAddrNode(rawHandleSlot);
var_types resultType = JITtype2varType(sig->retType);
if (resultType == TYP_STRUCT)
{
if (optimizedTheBox)
{
- assert(localCopyThis->OperIs(GT_ADDR));
+ assert(localCopyThis->OperIs(GT_LCL_VAR_ADDR));
// We may end up inlining this call, so the local copy must be marked as "aliased",
// making sure the inlinee importer will know when to spill references to its value.
- lvaGetDesc(localCopyThis->AsUnOp()->gtOp1->AsLclVar())->lvHasLdAddrOp = true;
+ lvaGetDesc(localCopyThis->AsLclVar())->lvHasLdAddrOp = true;
#if FEATURE_TAILCALL_OPT
if (call->IsImplicitTailCall())
boxAsgStmt->SetRootNode(boxTempAsg);
}
- JITDUMP("\nImporting KEEPALIVE(BOX) as KEEPALIVE(ADDR(LCL_VAR V%02u))", boxTempNum);
-
- GenTree* boxTemp = gtNewLclvNode(boxTempNum, boxSrc->TypeGet());
- GenTree* boxTempAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, boxTemp);
-
- return gtNewKeepAliveNode(boxTempAddr);
+ JITDUMP("\nImporting KEEPALIVE(BOX) as KEEPALIVE(LCL_VAR_ADDR V%02u)", boxTempNum);
+ objToKeepAlive = gtNewLclVarAddrNode(boxTempNum);
}
}
// actually produce values in IR) in order to support the invariant that every
// node produces a value.
//
- // The existence of GT_ADDR nodes and their use together with GT_FIELD to form
- // FIELD/ADDR/FIELD/ADDR/LCL_VAR sequences complicate things a bit. A typical
- // GT_FIELD node acts like an indirection and should produce an unknown value,
- // local address analysis doesn't know or care what value the field stores.
- // But a GT_FIELD can also be used as an operand for a GT_ADDR node and then
- // the GT_FIELD node does not perform an indirection, it's just represents a
- // location, similar to GT_LCL_VAR and GT_LCL_FLD.
- //
- // To avoid this issue, the semantics of GT_FIELD (and for simplicity's sake any other
- // indirection) nodes slightly deviates from the IR semantics - an indirection does not
- // actually produce an unknown value but a location value, if the indirection address
- // operand is an address value.
- //
- // The actual indirection is performed when the indirection's user node is processed:
- // - A GT_ADDR user turns the location value produced by the indirection back
- // into an address value.
- // - Any other user node performs the indirection and produces an unknown value.
+ // Each value is processed ("escaped") when visiting (in post-order) its parent,
+ // to achieve uniformity between how address and location values are handled.
//
class Value
{
}
//------------------------------------------------------------------------
- // Address: Produce an address value from a location value.
- //
- // Arguments:
- // val - the input value
- //
- // Notes:
- // - LOCATION(lclNum, offset) => ADDRESS(lclNum, offset)
- // - ADDRESS(lclNum, offset) => invalid, we should never encounter something like ADDR(ADDR(...))
- // - UNKNOWN => UNKNOWN
- //
- void Address(Value& val)
- {
- assert(!IsLocation() && !IsAddress());
- assert(!val.IsAddress());
-
- if (val.IsLocation())
- {
- m_address = true;
- m_lclNum = val.m_lclNum;
- m_offset = val.m_offset;
- }
-
- INDEBUG(val.Consume();)
- }
-
- //------------------------------------------------------------------------
// AddOffset: Produce an address value from an address value.
//
// Arguments:
MorphLocalField(node, user);
}
- if (node->OperIsLocal())
+ if (node->OperIsLocal() || node->OperIsLocalAddr())
{
unsigned const lclNum = node->AsLclVarCommon()->GetLclNum();
LclVarDsc* const varDsc = m_compiler->lvaGetDesc(lclNum);
TopValue(0).Address(node->AsLclFld());
break;
- case GT_ADDR:
- assert(TopValue(1).Node() == node);
- assert(TopValue(0).Node() == node->gtGetOp1());
-
- TopValue(1).Address(TopValue(0));
- PopValue();
- break;
-
case GT_ADD:
assert(TopValue(2).Node() == node);
assert(TopValue(1).Node() == node->gtGetOp1());
assert(node->OperIs(GT_IND, GT_FIELD, GT_FIELD_ADDR));
GenTree* objRef = node->AsUnOp()->gtOp1;
- GenTree* obj = ((objRef != nullptr) && objRef->OperIs(GT_ADDR)) ? objRef->AsOp()->gtOp1 : nullptr;
// TODO-Bug: this code does not pay attention to "GTF_IND_VOLATILE".
- if ((obj != nullptr) && obj->OperIs(GT_LCL_VAR) && varTypeIsStruct(obj))
+ if ((objRef != nullptr) && objRef->OperIs(GT_LCL_VAR_ADDR))
{
- const LclVarDsc* varDsc = m_compiler->lvaGetDesc(obj->AsLclVarCommon());
+ const LclVarDsc* varDsc = m_compiler->lvaGetDesc(objRef->AsLclVarCommon());
if (varDsc->lvPromoted)
{
return;
}
- const LclVarDsc* fieldDsc = m_compiler->lvaGetDesc(fieldLclNum);
- var_types fieldType = fieldDsc->TypeGet();
- GenTree* lclVarNode = nullptr;
- GenTreeFlags lclVarFlags = node->gtFlags & (GTF_NODE_MASK | GTF_DONT_CSE);
+ const LclVarDsc* fieldDsc = m_compiler->lvaGetDesc(fieldLclNum);
+ var_types fieldType = fieldDsc->TypeGet();
assert(fieldType != TYP_STRUCT); // Promoted LCL_VAR can't have a struct type.
if (node->OperIs(GT_FIELD_ADDR))
{
- node->ChangeOper(GT_ADDR);
- node->AsUnOp()->gtOp1 = obj;
-
- lclVarNode = obj;
+ node->ChangeOper(GT_LCL_VAR_ADDR);
+ node->AsLclVar()->SetLclNum(fieldLclNum);
}
- else if ((node->TypeGet() == fieldType) || ((user != nullptr) && user->OperIs(GT_ADDR)))
+ else if (node->TypeGet() == fieldType)
{
- if (user != nullptr)
+ GenTreeFlags lclVarFlags = node->gtFlags & (GTF_NODE_MASK | GTF_DONT_CSE);
+
+ if ((user != nullptr) && user->OperIs(GT_ASG) && (user->AsOp()->gtOp1 == node))
{
- if (user->OperIs(GT_ASG) && (user->AsOp()->gtOp1 == node))
- {
- lclVarFlags |= GTF_VAR_DEF;
- }
- else if (user->OperIs(GT_ADDR))
- {
- // TODO-ADDR: delete this quirk.
- lclVarFlags &= ~GTF_DONT_CSE;
- }
+ lclVarFlags |= GTF_VAR_DEF;
}
- lclVarNode = node;
+ node->ChangeOper(GT_LCL_VAR);
+ node->AsLclVar()->SetLclNum(fieldLclNum);
+ node->gtType = fieldType;
+ node->gtFlags = lclVarFlags;
}
- else // Here we will turn "FIELD/IND(ADDR(LCL_VAR<parent>))" into "OBJ/IND(ADDR(LCL_VAR<field>))".
+ else // Here we will turn "FIELD/IND(LCL_ADDR_VAR<parent>)" into "OBJ/IND(LCL_ADDR_VAR<field>)".
{
// This type mismatch is somewhat common due to how we retype fields of struct type that
// recursively simplify down to a primitive. E. g. for "struct { struct { int a } A, B }",
node->SetOper(GT_IND);
}
- lclVarNode = obj;
+ objRef->AsLclVar()->SetLclNum(fieldLclNum);
}
- lclVarNode->SetOper(GT_LCL_VAR);
- lclVarNode->AsLclVarCommon()->SetLclNum(fieldLclNum);
- lclVarNode->gtType = fieldType;
- lclVarNode->gtFlags = lclVarFlags;
-
JITDUMP("Replacing the field in promoted struct with local var V%02u\n", fieldLclNum);
m_stmtModified = true;
}
// But the pattern should at least subset the implicit byref cases that are
// handled in fgCanFastTailCall and fgMakeOutgoingStructArgCopy.
//
- // CALL(OBJ(ADDR(LCL_VAR...)))
+ // CALL(OBJ(LCL_VAR_ADDR...))
bool isArgToCall = false;
bool keepSearching = true;
for (int i = 0; i < m_ancestors.Height() && keepSearching; i++)
{
case 0:
{
- keepSearching = node->OperIs(GT_LCL_VAR);
+ keepSearching = node->OperIs(GT_LCL_VAR_ADDR);
}
break;
case 1:
{
- keepSearching = node->OperIs(GT_ADDR);
- }
- break;
-
- case 2:
- {
keepSearching = node->OperIs(GT_OBJ);
}
break;
- case 3:
+ case 2:
{
keepSearching = false;
isArgToCall = node->IsCall();
unsigned index = 0;
CorInfoType simdBaseJitType = CORINFO_TYPE_UNDEF;
unsigned simdSize = 0;
- GenTree* simdStructNode = getSIMDStructFromField(prevRHS, &simdBaseJitType, &index, &simdSize, true);
+ GenTree* simdLclAddr = getSIMDStructFromField(prevRHS, &simdBaseJitType, &index, &simdSize, true);
- if ((simdStructNode == nullptr) || (index != 0) || (simdBaseJitType != CORINFO_TYPE_FLOAT))
+ if ((simdLclAddr == nullptr) || (index != 0) || (simdBaseJitType != CORINFO_TYPE_FLOAT))
{
// if the RHS is not from a SIMD vector field X, then there is no need to check further.
return false;
JITDUMP("\n" FMT_BB " " FMT_STMT " (before):\n", block->bbNum, stmt->GetID());
DISPSTMT(stmt);
- assert(!simdStructNode->CanCSE() && varTypeIsSIMD(simdStructNode));
- simdStructNode->ClearDoNotCSE();
-
- tree = gtNewAssignNode(dstNode, simdStructNode);
+ tree = gtNewAssignNode(dstNode, gtNewLclvNode(simdLclAddr->AsLclVarCommon()->GetLclNum(), simdType));
stmt->SetRootNode(tree);
{
lcl = tree->AsLclVarCommon();
}
- else if (tree->OperIs(GT_ADDR))
- {
- GenTree* const addr = tree->AsOp()->gtOp1;
- if (addr->OperIs(GT_LCL_VAR, GT_LCL_FLD))
- {
- lcl = addr->AsLclVarCommon();
- }
- }
if (lcl == nullptr)
{
tree->ChangeOper(GT_LCL_FLD_ADDR);
tree->AsLclFld()->SetLclOffs(padding);
}
- else
- {
- noway_assert(tree->OperIs(GT_ADDR));
- GenTree* paddingTree = pComp->gtNewIconNode(padding);
- GenTree* newAddr = pComp->gtNewOperNode(GT_ADD, tree->gtType, tree, paddingTree);
-
- *pTree = newAddr;
-
- lcl->gtType = TYP_BLK;
- }
}
return WALK_SKIP_SUBTREES;
}
break;
- case GT_ADDR:
- {
- // For a GT_ADDR, the child node should not be evaluated into a register
- GenTree* child = tree->gtGetOp1();
- assert(!isCandidateLocalRef(child));
- assert(child->isContained());
- assert(dstCount == 1);
- srcCount = 0;
- BuildDef(tree);
- }
- break;
-
case GT_STORE_BLK:
case GT_STORE_OBJ:
case GT_STORE_DYN_BLK:
}
break;
- case GT_ADDR:
- {
- // For a GT_ADDR, the child node should not be evaluated into a register
- GenTree* child = tree->gtGetOp1();
- assert(!isCandidateLocalRef(child));
- assert(child->isContained());
- assert(dstCount == 1);
- srcCount = 0;
- BuildDef(tree);
- }
- break;
-
case GT_BLK:
// These should all be eliminated prior to Lowering.
assert(!"Non-store block node in Lowering");
case GT_COMMA:
case GT_QMARK:
case GT_COLON:
- case GT_ADDR:
srcCount = 0;
assert(dstCount == 0);
unreached();
}
break;
- case GT_ADDR:
- {
- // For a GT_ADDR, the child node should not be evaluated into a register
- GenTree* child = tree->gtGetOp1();
- assert(!isCandidateLocalRef(child));
- assert(child->isContained());
- assert(dstCount == 1);
- srcCount = 0;
- }
- break;
-
-#if !defined(FEATURE_PUT_STRUCT_ARG_STK)
case GT_OBJ:
-#endif
case GT_BLK:
// These should all be eliminated prior to Lowering.
assert(!"Non-store block node in Lowering");
// {args}
// GT_COMMA
// GT_CALL Dispatcher
-// GT_ADDR ReturnAddress
+// GT_LCL_VAR_ADDR ReturnAddress
// {CallTargetStub}
-// GT_ADDR ReturnValue
+// GT_LCL_VAR_ADDR ReturnValue
// GT_LCL ReturnValue
// whenever the call node returns a value. If the call node does not return a
// value the last comma will not be there.
lvaGetDesc(newRetLcl)->lvIsMultiRegRet = true;
}
- retValArg =
- gtNewOperNode(GT_ADDR, TYP_I_IMPL, gtNewLclvNode(newRetLcl, genActualType(lvaTable[newRetLcl].lvType)));
- retVal = gtNewLclvNode(newRetLcl, genActualType(lvaTable[newRetLcl].lvType));
+ retValArg = gtNewLclVarAddrNode(newRetLcl);
+ retVal = gtNewLclvNode(newRetLcl, genActualType(lvaTable[newRetLcl].lvType));
}
else
{
lvaSetVarAddrExposed(lvaRetAddrVar DEBUGARG(AddressExposedReason::DISPATCH_RET_BUF));
}
- GenTree* retAddrSlot = gtNewOperNode(GT_ADDR, TYP_I_IMPL, gtNewLclvNode(lvaRetAddrVar, TYP_I_IMPL));
+ GenTree* retAddrSlot = gtNewLclVarAddrNode(lvaRetAddrVar);
NewCallArg retAddrSlotArg = NewCallArg::Primitive(retAddrSlot);
NewCallArg callTargetArg = NewCallArg::Primitive(callTarget);
unsigned* simdSizeOut,
bool ignoreUsedInSIMDIntrinsic /*false*/)
{
- if (tree->OperIs(GT_FIELD))
+ if (tree->OperIs(GT_FIELD) && tree->AsField()->IsInstance())
{
GenTree* objRef = tree->AsField()->GetFldObj();
- if ((objRef != nullptr) && objRef->OperIs(GT_ADDR))
+ if (objRef->OperIs(GT_LCL_VAR_ADDR))
{
- GenTree* obj = objRef->AsOp()->gtOp1;
-
- if (isSIMDTypeLocal(obj))
+ LclVarDsc* varDsc = lvaGetDesc(objRef->AsLclVarCommon());
+ if (varTypeIsSIMD(varDsc) && (varDsc->lvIsUsedInSIMDIntrinsic() || ignoreUsedInSIMDIntrinsic))
{
- LclVarDsc* varDsc = lvaGetDesc(obj->AsLclVarCommon());
- if (varDsc->lvIsUsedInSIMDIntrinsic() || ignoreUsedInSIMDIntrinsic)
- {
- CorInfoType simdBaseJitType = varDsc->GetSimdBaseJitType();
- var_types simdBaseType = JITtype2varType(simdBaseJitType);
- unsigned fieldOffset = tree->AsField()->gtFldOffset;
- unsigned baseTypeSize = genTypeSize(simdBaseType);
+ CorInfoType simdBaseJitType = varDsc->GetSimdBaseJitType();
+ var_types simdBaseType = JITtype2varType(simdBaseJitType);
+ unsigned fieldOffset = tree->AsField()->gtFldOffset;
+ unsigned baseTypeSize = genTypeSize(simdBaseType);
- // Below condition is convervative. We don't actually need the two types to
- // match (only the tree type is relevant), but we don't have a convenient way
- // to turn the tree type into "CorInfoType".
- if ((tree->TypeGet() == simdBaseType) && ((fieldOffset % baseTypeSize) == 0))
- {
- *simdSizeOut = varDsc->lvExactSize;
- *simdBaseJitTypeOut = simdBaseJitType;
- *indexOut = fieldOffset / baseTypeSize;
+ // Below condition is convervative. We don't actually need the two types to
+ // match (only the tree type is relevant), but we don't have a convenient way
+ // to turn the tree type into "CorInfoType".
+ if ((tree->TypeGet() == simdBaseType) && ((fieldOffset % baseTypeSize) == 0))
+ {
+ *simdSizeOut = varDsc->lvExactSize;
+ *simdBaseJitTypeOut = simdBaseJitType;
+ *indexOut = fieldOffset / baseTypeSize;
- return obj;
- }
+ return objRef;
}
}
}
op1->gtFlags |= GTF_DONT_CSE;
break;
- case GT_ADDR:
- if (op1->OperIs(GT_FIELD) && op1->AsField()->IsInstance())
- {
- op1->SetOper(GT_FIELD_ADDR);
- return fgMorphField(op1, mac);
- }
-
- // Location nodes cannot be CSEd.
- op1->gtFlags |= GTF_DONT_CSE;
- break;
-
case GT_QMARK:
case GT_JTRUE:
// Propagate the new flags
tree->gtFlags |= (op1->gtFlags & GTF_ALL_EFFECT);
-
- // addresses of locals do not need GTF_GLOB_REF, even if the child has
- // it (is address exposed). Note that general addressing may still need
- // GTF_GLOB_REF, for example if the subtree has a comma that involves a
- // global reference.
- if (tree->OperIs(GT_ADDR) && ((tree->gtFlags & GTF_GLOB_REF) != 0) && tree->IsLocalAddrExpr())
- {
- tree->gtFlags &= ~GTF_GLOB_REF;
- }
} // if (op1)
/*-------------------------------------------------------------------------
case GT_BLK:
case GT_IND:
{
- // If we have IND(ADDR(X)) and X has GTF_GLOB_REF, we must set GTF_GLOB_REF on
- // the OBJ. Note that the GTF_GLOB_REF will have been cleared on ADDR(X) where X
- // is a local, even if it has been address-exposed.
- if (op1->OperIs(GT_ADDR))
- {
- tree->gtFlags |= (op1->AsUnOp()->gtGetOp1()->gtFlags & GTF_GLOB_REF);
- }
-
if (!tree->OperIs(GT_IND))
{
break;
}
break;
- case GT_ADDR:
- // Can not remove a GT_ADDR if it is currently a CSE candidate.
- if (gtIsActiveCSE_Candidate(tree))
- {
- break;
- }
-
- // Perform the transform ADDR(IND(...)) == (...).
- if (op1->OperIsIndir())
- {
- GenTree* addr = op1->AsIndir()->Addr();
-
- noway_assert(varTypeIsI(genActualType(addr)));
-
- DEBUG_DESTROY_NODE(op1);
- DEBUG_DESTROY_NODE(tree);
-
- return addr;
- }
- // Perform the transform ADDR(COMMA(x, ..., z)) == COMMA(x, ..., ADDR(z)).
- else if (op1->OperIs(GT_COMMA) && !optValnumCSE_phase)
- {
- ArrayStack<GenTree*> commas(getAllocator(CMK_ArrayStack));
- for (GenTree* comma = op1; comma != nullptr && comma->gtOper == GT_COMMA; comma = comma->gtGetOp2())
- {
- commas.Push(comma);
- }
-
- GenTree* commaNode = commas.Top();
- GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, commaNode->AsOp()->gtOp2);
- commaNode->AsOp()->gtOp2 = addr;
-
- // Retype the comma nodes to match "addr" and update their side effects.
- while (!commas.Empty())
- {
- GenTree* comma = commas.Pop();
- comma->gtType = addr->TypeGet();
-#ifdef DEBUG
- comma->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
-#endif
- gtUpdateNodeSideEffects(comma);
- }
-
- return op1;
- }
- break;
-
case GT_COLON:
if (fgGlobalMorph)
{
Compiler::fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
{
- GenTree* tree = *use;
- assert(tree != nullptr);
- assert(tree->IsLocal());
+ GenTree* tree = *use;
+ unsigned lclNum = tree->AsLclVarCommon()->GetLclNum();
+ bool lclEscapes = true;
- var_types type = tree->TypeGet();
- if ((tree->OperGet() == GT_LCL_VAR) && (type == TYP_REF || type == TYP_BYREF || type == TYP_I_IMPL))
+ if (tree->OperIs(GT_LCL_VAR) && tree->TypeIs(TYP_REF, TYP_BYREF, TYP_I_IMPL))
{
- unsigned int lclNum = tree->AsLclVar()->GetLclNum();
assert(tree == m_ancestors.Top());
- if (m_allocator->CanLclVarEscapeViaParentStack(&m_ancestors, lclNum))
+ if (!m_allocator->CanLclVarEscapeViaParentStack(&m_ancestors, lclNum))
{
- if (!m_allocator->CanLclVarEscape(lclNum))
- {
- JITDUMP("V%02u first escapes via [%06u]\n", lclNum, m_compiler->dspTreeID(tree));
- }
- m_allocator->MarkLclVarAsEscaping(lclNum);
+ lclEscapes = false;
+ }
+ }
+
+ if (lclEscapes)
+ {
+ if (!m_allocator->CanLclVarEscape(lclNum))
+ {
+ JITDUMP("V%02u first escapes via [%06u]\n", lclNum, m_compiler->dspTreeID(tree));
}
+ m_allocator->MarkLclVarAsEscaping(lclNum);
}
+
return Compiler::fgWalkResult::WALK_CONTINUE;
}
};
case GT_FIELD:
case GT_IND:
- {
- int grandParentIndex = parentIndex + 1;
- if ((parentStack->Height() > grandParentIndex) &&
- (parentStack->Top(grandParentIndex)->OperGet() == GT_ADDR))
- {
- // Check if the address of the field/ind escapes.
- parentIndex += 2;
- keepChecking = true;
- }
- else
- {
- // Address of the field/ind is not taken so the local doesn't escape.
- canLclVarEscapeViaParentStack = false;
- }
+ // Address of the field/ind is not taken so the local doesn't escape.
+ canLclVarEscapeViaParentStack = false;
break;
- }
case GT_CALL:
{
// It's either null or points to inside a stack-allocated object.
parent->gtFlags |= GTF_IND_TGT_NOT_HEAP;
}
-
- int grandParentIndex = parentIndex + 1;
-
- if (parentStack->Height() > grandParentIndex)
- {
- GenTree* grandParent = parentStack->Top(grandParentIndex);
- if (grandParent->OperGet() == GT_ADDR)
- {
- if (grandParent->TypeGet() == TYP_REF)
- {
- grandParent->ChangeType(newType);
- }
- parentIndex += 2;
- keepChecking = true;
- }
- }
break;
}
if (m_allocator->m_HeapLocalToStackLocalMap.TryGetValue(lclNum, &newLclNum))
{
newType = TYP_I_IMPL;
- tree =
- m_compiler->gtNewOperNode(GT_ADDR, newType, m_compiler->gtNewLclvNode(newLclNum, TYP_STRUCT));
- *use = tree;
+ tree = m_compiler->gtNewLclVarAddrNode(newLclNum);
+ *use = tree;
}
else
{
//
// call PPHelper(&ppCounter, ilOffset)
GenTree* ilOffsetNode = compiler->gtNewIconNode(ilOffset, TYP_INT);
- GenTree* ppCounterRef = compiler->gtNewLclvNode(ppCounterLclNum, TYP_INT);
- GenTree* ppCounterAddr = compiler->gtNewOperNode(GT_ADDR, TYP_I_IMPL, ppCounterRef);
+ GenTree* ppCounterAddr = compiler->gtNewLclVarAddrNode(ppCounterLclNum);
GenTreeCall* helperCall =
compiler->gtNewHelperCallNode(CORINFO_HELP_PATCHPOINT, TYP_VOID, ppCounterAddr, ilOffsetNode);
}
}
-void Rationalizer::RewriteAddress(LIR::Use& use)
-{
- assert(use.IsInitialized());
-
- GenTreeUnOp* address = use.Def()->AsUnOp();
- assert(address->OperGet() == GT_ADDR);
-
- GenTree* location = address->gtGetOp1();
- genTreeOps locationOp = location->OperGet();
-
- if (location->IsLocal())
- {
-// We are changing the child from GT_LCL_VAR TO GT_LCL_VAR_ADDR.
-// Therefore gtType of the child needs to be changed to a TYP_BYREF
-#ifdef DEBUG
- if (locationOp == GT_LCL_VAR)
- {
- JITDUMP("Rewriting GT_ADDR(GT_LCL_VAR) to GT_LCL_VAR_ADDR:\n");
- }
- else
- {
- assert(locationOp == GT_LCL_FLD);
- JITDUMP("Rewriting GT_ADDR(GT_LCL_FLD) to GT_LCL_FLD_ADDR:\n");
- }
-#endif // DEBUG
-
- location->SetOper(addrForm(locationOp));
- location->gtType = TYP_BYREF;
- copyFlags(location, address, GTF_ALL_EFFECT);
-
- use.ReplaceWith(location);
- BlockRange().Remove(address);
- }
- else if (location->OperIsIndir())
- {
- use.ReplaceWith(location->gtGetOp1());
- BlockRange().Remove(location);
- BlockRange().Remove(address);
-
- JITDUMP("Rewriting GT_ADDR(GT_IND(X)) to X:\n");
- }
- else
- {
- unreached();
- }
-
- DISPTREERANGE(BlockRange(), use.Def());
- JITDUMP("\n");
-}
-
Compiler::fgWalkResult Rationalizer::RewriteNode(GenTree** useEdge, Compiler::GenTreeStack& parentStack)
{
assert(useEdge != nullptr);
RewriteAssignment(use);
break;
- case GT_ADDR:
- RewriteAddress(use);
- break;
-
case GT_IND:
case GT_BLK:
case GT_OBJ:
// Other transformations
void RewriteAssignment(LIR::Use& use);
- void RewriteAddress(LIR::Use& use);
#ifdef TARGET_ARM64
void RewriteSubLshDiv(GenTree** use);
tree = gtNewOperNode(GT_IND, type, tree);
}
- if (tree->OperIsIndir() && tree->AsIndir()->Addr()->OperIs(GT_ADDR))
+ if (tree->OperIsIndir() && tree->AsIndir()->Addr()->OperIs(GT_LCL_VAR_ADDR))
{
- GenTree* location = tree->AsIndir()->Addr()->gtGetOp1();
- if (location->OperIs(GT_LCL_VAR) && location->TypeIs(type))
+ GenTreeLclVar* lclAddr = tree->AsIndir()->Addr()->AsLclVar();
+ LclVarDsc* varDsc = lvaGetDesc(lclAddr);
+ if (varDsc->TypeGet() == type)
{
assert(type != TYP_STRUCT);
- tree = location;
+ lclAddr->ChangeType(type);
+ lclAddr->SetOper(GT_LCL_VAR);
+
+ tree = lclAddr;
}
}
}
else if (tree->gtType == TYP_BYREF)
{
- assert(tree->IsLocal() || (tree->OperGet() == GT_RET_EXPR) || (tree->OperGet() == GT_CALL) ||
- ((tree->gtOper == GT_ADDR) && varTypeIsSIMD(tree->gtGetOp1())));
+ assert(tree->IsLocal() || tree->OperIs(GT_RET_EXPR, GT_CALL) ||
+ (tree->OperIs(GT_LCL_VAR_ADDR) && varTypeIsSIMD(lvaGetDesc(tree->AsLclVar()))));
}
return tree;
if (opcode == CEE_NEWOBJ)
{
op1 = newobjThis;
- assert(newobjThis->gtOper == GT_ADDR && newobjThis->AsOp()->gtOp1->gtOper == GT_LCL_VAR);
+ assert(newobjThis->OperIs(GT_LCL_VAR_ADDR));
// push newobj result on type stack
- unsigned tmp = op1->AsOp()->gtOp1->AsLclVarCommon()->GetLclNum();
- impPushOnStack(gtNewLclvNode(tmp, lvaGetRealType(tmp)), verMakeTypeInfo(clsHnd).NormaliseForStack());
+ unsigned lclNum = op1->AsLclVarCommon()->GetLclNum();
+ impPushOnStack(gtNewLclvNode(lclNum, lvaGetRealType(lclNum)), verMakeTypeInfo(clsHnd).NormaliseForStack());
}
else
{
// is used in a SIMD intrinsic.
// Arguments:
// tree - GenTree*
-
+//
void Compiler::setLclRelatedToSIMDIntrinsic(GenTree* tree)
{
- assert(tree->OperIsLocal());
+ assert(tree->OperIs(GT_LCL_VAR, GT_LCL_VAR_ADDR));
LclVarDsc* lclVarDsc = lvaGetDesc(tree->AsLclVarCommon());
lclVarDsc->lvUsedInSIMDIntrinsic = true;
}
GenTree* op2ObjRef = op2->AsField()->GetFldObj();
while (op1ObjRef != nullptr && op2ObjRef != nullptr)
{
-
if (op1ObjRef->OperGet() != op2ObjRef->OperGet())
{
break;
}
- else if (op1ObjRef->OperGet() == GT_ADDR)
- {
- op1ObjRef = op1ObjRef->AsOp()->gtOp1;
- op2ObjRef = op2ObjRef->AsOp()->gtOp1;
- }
- if (op1ObjRef->OperIsLocal() && op2ObjRef->OperIsLocal() &&
- op1ObjRef->AsLclVarCommon()->GetLclNum() == op2ObjRef->AsLclVarCommon()->GetLclNum())
+ if (op1ObjRef->OperIs(GT_LCL_VAR, GT_LCL_VAR_ADDR) &&
+ (op1ObjRef->AsLclVarCommon()->GetLclNum() == op2ObjRef->AsLclVarCommon()->GetLclNum()))
{
return true;
}
- else if (op1ObjRef->OperGet() == GT_FIELD && op2ObjRef->OperGet() == GT_FIELD &&
- op1ObjRef->AsField()->gtFldHnd == op2ObjRef->AsField()->gtFldHnd)
+
+ if (op1ObjRef->OperIs(GT_FIELD) && (op1ObjRef->AsField()->gtFldHnd == op2ObjRef->AsField()->gtFldHnd))
{
op1ObjRef = op1ObjRef->AsField()->GetFldObj();
op2ObjRef = op2ObjRef->AsField()->GetFldObj();
if (tree->OperIs(GT_FIELD))
{
GenTree* objRef = tree->AsField()->GetFldObj();
- if (objRef != nullptr && objRef->gtOper == GT_ADDR)
+ if ((objRef != nullptr) && objRef->OperIs(GT_LCL_VAR_ADDR))
{
- GenTree* obj = objRef->AsOp()->gtOp1;
-
// If the field is directly from a struct, then in this case,
// we should set this struct's lvUsedInSIMDIntrinsic as true,
// so that this sturct won't be promoted.
// TODO-CQ:
// In future, we should optimize this case so that if there is a nested field like s1.s2.x and s1.s2.x's
// address is used for initializing the vector, then s1 can be promoted but s2 can't.
- if (varTypeIsSIMD(obj) && obj->OperIsLocal())
+ if (varTypeIsSIMD(lvaGetDesc(objRef->AsLclVar())))
{
- setLclRelatedToSIMDIntrinsic(obj);
+ setLclRelatedToSIMDIntrinsic(objRef);
}
}
GenTree* expr = stmt->GetRootNode();
if (expr->OperGet() == GT_ASG && expr->TypeGet() == TYP_FLOAT)
{
- GenTree* curDst = expr->AsOp()->gtOp1;
- GenTree* curSrc = expr->AsOp()->gtOp2;
- unsigned index = 0;
- CorInfoType simdBaseJitType = CORINFO_TYPE_UNDEF;
- unsigned simdSize = 0;
- GenTree* srcSimdStructNode = getSIMDStructFromField(curSrc, &simdBaseJitType, &index, &simdSize, true);
+ GenTree* curDst = expr->AsOp()->gtOp1;
+ GenTree* curSrc = expr->AsOp()->gtOp2;
+ unsigned index = 0;
+ CorInfoType simdBaseJitType = CORINFO_TYPE_UNDEF;
+ unsigned simdSize = 0;
+ GenTree* srcSimdLclAddr = getSIMDStructFromField(curSrc, &simdBaseJitType, &index, &simdSize, true);
- if (srcSimdStructNode == nullptr || simdBaseJitType != CORINFO_TYPE_FLOAT)
+ if (srcSimdLclAddr == nullptr || simdBaseJitType != CORINFO_TYPE_FLOAT)
{
fgPreviousCandidateSIMDFieldAsgStmt = nullptr;
}
- else if (index == 0 && isSIMDTypeLocal(srcSimdStructNode))
+ else if (index == 0)
{
fgPreviousCandidateSIMDFieldAsgStmt = stmt;
}
if (index == (simdSize / genTypeSize(simdBaseType) - 1))
{
// Successfully found the pattern, mark the lclvar as UsedInSIMDIntrinsic
- if (srcSimdStructNode->OperIsLocal())
- {
- setLclRelatedToSIMDIntrinsic(srcSimdStructNode);
- }
+ setLclRelatedToSIMDIntrinsic(srcSimdLclAddr);
- if (curDst->OperGet() == GT_FIELD)
+ if (curDst->OperIs(GT_FIELD) && curDst->AsField()->IsInstance())
{
GenTree* objRef = curDst->AsField()->GetFldObj();
- if (objRef != nullptr && objRef->gtOper == GT_ADDR)
+ if (objRef->OperIs(GT_LCL_VAR_ADDR) && varTypeIsStruct(lvaGetDesc(objRef->AsLclVar())))
{
- GenTree* obj = objRef->AsOp()->gtOp1;
- if (varTypeIsStruct(obj) && obj->OperIsLocal())
- {
- setLclRelatedToSIMDIntrinsic(obj);
- }
+ setLclRelatedToSIMDIntrinsic(objRef);
}
}
}
if (initFromFirstArgIndir)
{
simdTree = op2;
- if (op1->AsOp()->gtOp1->OperIsLocal())
+ if (op1->OperIs(GT_LCL_VAR_ADDR))
{
// label the dst struct's lclvar is used for SIMD intrinsic,
// so that this dst struct won't be promoted.
- setLclRelatedToSIMDIntrinsic(op1->AsOp()->gtOp1);
+ setLclRelatedToSIMDIntrinsic(op1);
}
}
else
unsigned int lclNum = lclVarTree->AsLclVarCommon()->GetLclNum();
LclVarDsc* varDsc = compiler->lvaGetDesc(lclNum);
-#ifdef DEBUG
-#if !defined(TARGET_AMD64)
- // There are no addr nodes on ARM and we are experimenting with encountering vars in 'random' order.
- // Struct fields are not traversed in a consistent order, so ignore them when
- // verifying that we see the var nodes in execution order
- if (ForCodeGen)
- {
- if (tree->OperIsIndir())
- {
- assert(indirAddrLocal != NULL);
- }
- else if (tree->gtNext != NULL && tree->gtNext->gtOper == GT_ADDR &&
- ((tree->gtNext->gtNext == NULL || !tree->gtNext->gtNext->OperIsIndir())))
- {
- assert(tree->IsLocal()); // Can only take the address of a local.
- // The ADDR might occur in a context where the address it contributes is eventually
- // dereferenced, so we can't say that this is not a use or def.
- }
- }
-#endif // !TARGET_AMD64
-#endif // DEBUG
-
compiler->compCurLifeTree = tree;
VarSetOps::Assign(compiler, newLife, compiler->compCurLife);
}
break;
- case GT_ADDR:
- {
- GenTree* location = tree->AsUnOp()->gtGetOp1();
-
- if (location->OperIsLocalRead())
- {
- GenTreeLclVarCommon* lclNode = location->AsLclVarCommon();
- ValueNum addrVN =
- vnStore->VNForFunc(TYP_BYREF, VNF_PtrToLoc, vnStore->VNForIntCon(lclNode->GetLclNum()),
- vnStore->VNForIntPtrCon(lclNode->GetLclOffs()));
- tree->gtVNPair.SetBoth(addrVN); // No exceptions for local addresses.
- }
- else
- {
- tree->gtVNPair = vnStore->VNPUniqueWithExc(tree->TypeGet(),
- vnStore->VNPExceptionSet(location->gtVNPair));
- }
- }
- break;
-
case GT_ARR_ADDR:
fgValueNumberArrIndexAddr(tree->AsArrAddr());
break;