{
GenTreePtr tree = *pTree;
- if (tree->OperKind() & GTK_ASGOP)
+ if (tree->OperIsAssignment())
{
GenTreePtr op1 = tree->gtOp.gtOp1;
Compiler* comp = data->compiler;
GenTreePtr tree = optAddCopyAsgnNode;
GenTreePtr op1 = tree->gtOp.gtOp1;
- noway_assert(tree && op1 && (tree->OperKind() & GTK_ASGOP) && (op1->gtOper == GT_LCL_VAR) &&
+ noway_assert(tree && op1 && tree->OperIsAssignment() && (op1->gtOper == GT_LCL_VAR) &&
(op1->gtLclVarCommon.gtLclNum == lclNum));
/* TODO-Review: BB_UNITY_WEIGHT is not the correct block weight */
case GT_RSH:
case GT_RSZ:
case GT_NEG:
+#ifdef LEGACY_BACKEND
case GT_CHS:
+#endif
case GT_CAST:
case GT_INTRINSIC:
break;
if (jumpKind == EJ_lo)
{
- if ((tree->OperGet() != GT_SUB) && (tree->gtOper != GT_ASG_SUB))
+ if ((tree->OperGet() != GT_SUB)
+#ifdef LEGACY_BACKEND
+ && (tree->gtOper != GT_ASG_SUB)
+#endif
+ )
{
jumpKind = EJ_hs;
}
{
chars += printf("[LOGOP]");
}
+#ifdef LEGACY_BACKEND
if (kind & GTK_ASGOP)
{
chars += printf("[ASGOP]");
}
+#endif
if (kind & GTK_COMMUTE)
{
chars += printf("[COMMUTE]");
case GT_CAST:
case GT_ADD:
case GT_SUB:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
case GT_ASG_SUB:
+#endif
if (tree->gtFlags & GTF_OVERFLOW)
{
chars += printf("[OVERFLOW]");
bool fgFoldConditional(BasicBlock* block);
+#ifdef LEGACY_BACKEND
void fgMorphStmts(BasicBlock* block, bool* mult, bool* lnot, bool* loadw);
+#else
+ void fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw);
+#endif
void fgMorphBlocks();
bool fgMorphBlockStmt(BasicBlock* block, GenTreeStmt* stmt DEBUGARG(const char* msg));
// lowering that is distributed between fgMorph and the lowering phase of LSRA.
void fgSimpleLowering();
+#ifdef LEGACY_BACKEND
bool fgShouldCreateAssignOp(GenTreePtr tree, bool* bReverse);
+#endif
GenTreePtr fgInitThisClass();
inline bool GenTree::gtOverflow() const
{
-#if !defined(_TARGET_64BIT_) && !defined(LEGACY_BACKEND)
- assert(gtOper == GT_MUL || gtOper == GT_CAST || gtOper == GT_ADD || gtOper == GT_SUB || gtOper == GT_ASG_ADD ||
- gtOper == GT_ASG_SUB || gtOper == GT_ADD_LO || gtOper == GT_SUB_LO || gtOper == GT_ADD_HI ||
- gtOper == GT_SUB_HI);
-#else
- assert(gtOper == GT_MUL || gtOper == GT_CAST || gtOper == GT_ADD || gtOper == GT_SUB || gtOper == GT_ASG_ADD ||
- gtOper == GT_ASG_SUB);
-#endif
+ assert(OperMayOverflow());
- if (gtFlags & GTF_OVERFLOW)
+ if ((gtFlags & GTF_OVERFLOW) != 0)
{
assert(varTypeIsIntegral(TypeGet()));
inline bool GenTree::gtOverflowEx() const
{
- if (gtOper == GT_MUL || gtOper == GT_CAST || gtOper == GT_ADD || gtOper == GT_SUB ||
-#if !defined(_TARGET_64BIT_) && !defined(LEGACY_BACKEND)
- gtOper == GT_ADD_HI || gtOper == GT_SUB_HI ||
-#endif
- gtOper == GT_ASG_ADD || gtOper == GT_ASG_SUB)
- {
- return gtOverflow();
- }
- return false;
+ return OperMayOverflow() && gtOverflow();
}
/*
assert(lpIterTree);
- assert(lpIterTree->OperKind() & GTK_ASGOP); // +=, -=, etc or = +, = -, etc
+ assert(lpIterTree->OperIsAssignment());
if (lpIterTree->OperGet() == GT_ASG)
{
loResult->gtFlags |= GTF_SET_FLAGS;
hiResult->gtFlags |= GTF_USE_FLAGS;
- if (loResult->gtOverflow())
+ if ((loResult->gtFlags & GTF_OVERFLOW) != 0)
{
hiResult->gtFlags |= GTF_OVERFLOW | GTF_EXCEPT;
loResult->gtFlags &= ~(GTF_OVERFLOW | GTF_EXCEPT);
jumpKind = isUnsignedOverflow ? EJ_lo : EJ_vs;
if (jumpKind == EJ_lo)
{
- if ((dst->OperGet() != GT_SUB) && (dst->OperGet() != GT_ASG_SUB) && (dst->OperGet() != GT_SUB_HI))
+ if ((dst->OperGet() != GT_SUB) &&
+#ifdef LEGACY_BACKEND
+ (dst->OperGet() != GT_ASG_SUB) &&
+#endif
+ (dst->OperGet() != GT_SUB_HI))
{
jumpKind = EJ_hs;
}
{
noway_assert(tree->gtOper != GT_STMT);
- genTreeOps oper = tree->OperGet();
- unsigned kind = tree->OperKind();
- unsigned treeFlags = tree->gtFlags & GTF_ALL_EFFECT;
- unsigned chkFlags = 0;
+ const genTreeOps oper = tree->OperGet();
+ const unsigned kind = tree->OperKind();
+ unsigned treeFlags = tree->gtFlags & GTF_ALL_EFFECT;
+ unsigned chkFlags = 0;
if (tree->OperMayThrow(this))
{
/* For a GT_ASG(GT_IND(x), y) we are interested in the side effects of x */
GenTreePtr op1p;
- if ((kind & GTK_ASGOP) && (op1->gtOper == GT_IND))
+ if (GenTree::OperIsAssignment(oper) && (op1->gtOper == GT_IND))
{
op1p = op1->gtOp.gtOp1;
}
case GT_MUL:
case GT_ADD:
case GT_SUB:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
case GT_ASG_SUB:
+#endif
case GT_CAST:
if (tree->gtOverflow())
{
#include "gtlist.h"
};
+#ifdef LEGACY_BACKEND
/*****************************************************************************/
// static
genTreeOps GenTree::OpAsgToOper(genTreeOps op)
unreached(); // Precondition implies we don't get here.
}
}
+#endif // LEGACY_BACKEND
/*****************************************************************************
*
return false;
}
- if (kind & GTK_ASGOP)
+ if (GenTree::OperIsAssignment(oper))
{
// 'tree' is the gtOp1 of an assignment node. So we can handle
// the case where defOnly is either true or false.
/* Figure out what kind of a node we have */
- genTreeOps oper = tree->OperGet();
- unsigned kind = tree->OperKind();
+ const genTreeOps oper = tree->OperGet();
+ const unsigned kind = tree->OperKind();
/* Assume no fixed registers will be trashed */
case GT_ADD:
case GT_SUB:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
case GT_ASG_SUB:
-
+#endif
if (isflt)
{
/* FP instructions are a bit more expensive */
/* Assignments need a bit of special handling */
- if (kind & GTK_ASGOP)
+ if (GenTree::OperIsAssignment(oper))
{
/* Process the target */
#endif // FEATURE_STACK_FP_X87
bool bReverseInAssignment = false;
- if (kind & GTK_ASGOP)
+ if (GenTree::OperIsAssignment(oper))
{
GenTreePtr op1Val = op1;
case GT_RSZ:
case GT_ROL:
case GT_ROR:
+#ifdef LEGACY_BACKEND
case GT_ASG_LSH:
case GT_ASG_RSH:
case GT_ASG_RSZ:
-
+#endif
/* Variable sized shifts are more expensive and use REG_SHIFT */
if (!op2->IsCnsIntOrI())
bool GenTree::OperRequiresAsgFlag()
{
- return ((OperKind() & GTK_ASGOP) || (gtOper == GT_XADD) || (gtOper == GT_XCHG) || (gtOper == GT_LOCKADD) ||
+ return (OperIsAssignment() || (gtOper == GT_XADD) || (gtOper == GT_XCHG) || (gtOper == GT_LOCKADD) ||
(gtOper == GT_CMPXCHG) || (gtOper == GT_MEMORYBARRIER));
}
break;
case GT_ADD:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
if (val == 0)
{
goto DONE_FOLD;
break;
case GT_MUL:
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
+#endif
if (val == 1)
{
goto DONE_FOLD;
case GT_DIV:
case GT_UDIV:
+#ifdef LEGACY_BACKEND
case GT_ASG_DIV:
+#endif
if ((op2 == cons) && (val == 1) && !(op1->OperKind() & GTK_CONST))
{
goto DONE_FOLD;
break;
case GT_SUB:
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
if ((op2 == cons) && (val == 0) && !(op1->OperKind() & GTK_CONST))
{
goto DONE_FOLD;
case GT_RSZ:
case GT_ROL:
case GT_ROR:
+#ifdef LEGACY_BACKEND
case GT_ASG_LSH:
case GT_ASG_RSH:
case GT_ASG_RSZ:
+#endif
if (val == 0)
{
if (op2 == cons)
// a use, update the flags appropriately
if (op->gtOper == GT_LCL_VAR)
{
- assert((tree->OperKind() & GTK_ASGOP) || (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_DEF)) == 0);
+ assert(tree->OperIsAssignment() || (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_DEF)) == 0);
op->gtFlags &= ~(GTF_VAR_USEASG | GTF_VAR_DEF);
}
break;
case GT_NEG:
+#ifdef LEGACY_BACKEND
case GT_CHS:
+#endif
i1 = -i1;
break;
break;
case GT_NEG:
+#ifdef LEGACY_BACKEND
case GT_CHS:
+#endif
lval1 = -lval1;
break;
switch (tree->gtOper)
{
case GT_NEG:
+#ifdef LEGACY_BACKEND
case GT_CHS:
+#endif
d1 = -d1;
break;
{
if (flags & GTF_ASG)
{
- if ((tree->OperKind() & GTK_ASGOP))
+ if (tree->OperIsAssignment())
{
return true;
}
*pOper = rhs->gtOper;
}
}
+#ifdef LEGACY_BACKEND
else
{
lclNum = lhsLclNum;
*pOper = GenTree::OpAsgToOper(gtOper);
*pOtherTree = gtOp.gtOp2;
}
+#endif
}
}
return lclNum;
GTK_BINOP = 0x0008, // binary operator
GTK_RELOP = 0x0010, // comparison operator
GTK_LOGOP = 0x0020, // logical operator
+#ifdef LEGACY_BACKEND
GTK_ASGOP = 0x0040, // assignment operator
+#endif
GTK_KINDMASK = 0x007F, // operator kind mask
static bool OperIsAssignment(genTreeOps gtOper)
{
+#ifdef LEGACY_BACKEND
return (OperKind(gtOper) & GTK_ASGOP) != 0;
+#else
+ return gtOper == GT_ASG;
+#endif
}
bool OperIsAssignment() const
return OperIsAssignment(gtOper);
}
+ static bool OperMayOverflow(genTreeOps gtOper)
+ {
+ return ((gtOper == GT_ADD) || (gtOper == GT_SUB) || (gtOper == GT_MUL) || (gtOper == GT_CAST)
+#ifdef LEGACY_BACKEND
+ || (gtOper == GT_ASG_ADD) || (gtOper == GT_ASG_SUB)
+#elif !defined(_TARGET_64BIT_)
+ || (gtOper == GT_ADD_HI) || (gtOper == GT_SUB_HI)
+#endif
+ );
+ }
+
+ bool OperMayOverflow() const
+ {
+ return OperMayOverflow(gtOper);
+ }
+
static bool OperIsIndir(genTreeOps gtOper)
{
return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK || OperIsBlk(gtOper);
return OperIsBoundsCheck(OperGet());
}
+#ifdef LEGACY_BACKEND
// Requires that "op" is an op= operator. Returns
// the corresponding "op".
static genTreeOps OpAsgToOper(genTreeOps op);
+#endif
#ifdef DEBUG
bool NullOp1Legal() const
case GT_ROR:
case GT_INDEX:
case GT_ASG:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
case GT_ASG_SUB:
case GT_ASG_MUL:
case GT_ASG_LSH:
case GT_ASG_RSH:
case GT_ASG_RSZ:
+#endif
case GT_EQ:
case GT_NE:
case GT_LT:
GTNODE(COPY , GenTreeCopyOrReload,0,GTK_UNOP) // Copies a variable from its current location to a register that satisfies
// code generation constraints. The child is the actual lclVar node.
GTNODE(RELOAD , GenTreeCopyOrReload,0,GTK_UNOP)
+#ifdef LEGACY_BACKEND
GTNODE(CHS , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR) // GT_CHS is actually unary -- op2 is ignored.
// Changing to unary presently causes problems, though -- take a little work to fix.
+#endif
GTNODE(ARR_LENGTH , GenTreeArrLen ,0,GTK_UNOP|GTK_EXOP) // array-length
// the div into a MULHI + some adjustments. In codegen, we only use the
// results of the high register, and we drop the low results.
+#ifndef LEGACY_BACKEND
+GTNODE(ASG , GenTreeOp ,0,GTK_BINOP|GTK_NOTLIR)
+#else
GTNODE(ASG , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
GTNODE(ASG_ADD , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
GTNODE(ASG_SUB , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
GTNODE(ASG_LSH , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
GTNODE(ASG_RSH , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
GTNODE(ASG_RSZ , GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-
+#endif
GTNODE(EQ , GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
GTNODE(NE , GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
GTNODE(LT , GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
switch (oper)
{
case GT_ADD:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
return type == TYP_DOUBLE ? INS_addsd : INS_addss;
- break;
case GT_SUB:
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
return type == TYP_DOUBLE ? INS_subsd : INS_subss;
- break;
case GT_MUL:
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
+#endif
return type == TYP_DOUBLE ? INS_mulsd : INS_mulss;
- break;
case GT_DIV:
+#ifdef LEGACY_BACKEND
case GT_ASG_DIV:
+#endif
return type == TYP_DOUBLE ? INS_divsd : INS_divss;
case GT_AND:
return type == TYP_DOUBLE ? INS_andpd : INS_andps;
switch (oper)
{
case GT_ADD:
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
return INS_vadd;
- break;
case GT_SUB:
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
return INS_vsub;
- break;
case GT_MUL:
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
+#endif
return INS_vmul;
break;
case GT_DIV:
+#ifdef LEGACY_BACKEND
case GT_ASG_DIV:
+#endif
return INS_vdiv;
case GT_NEG:
return INS_vneg;
//=============================================================================
-#define OPT_MULT_ADDSUB 1 // optimize consecutive "lclVar += or -= icon"
-#define OPT_BOOL_OPS 1 // optimize boolean operations
+#define OPT_BOOL_OPS 1 // optimize boolean operations
//=============================================================================
/* Is this an assigment? */
- if (tree->OperKind() & GTK_ASGOP)
+ if (tree->OperIsAssignment())
{
GenTreePtr op1 = tree->gtOp.gtOp1;
GenTreePtr op2 = tree->gtOp.gtOp2;
unsigned lclNum;
LclVarDsc* varDsc = nullptr;
+#ifdef LEGACY_BACKEND
/* GT_CHS is special it doesn't have a valid op2 */
if (tree->gtOper == GT_CHS)
{
}
}
else
+#endif
{
if (op2->gtOper == GT_LCL_VAR)
{
noway_assert(rhsNode);
noway_assert(tree->gtFlags & GTF_VAR_DEF);
+#ifndef LEGACY_BACKEND
+ assert(asgNode->OperIs(GT_ASG));
+#else
if (asgNode->gtOper != GT_ASG && asgNode->gtOverflowEx())
{
// asgNode may be <op_ovf>= (with GTF_OVERFLOW). In that case, we need to keep the <op_ovf>
}
return false;
}
-
+#endif
// Do not remove if this local variable represents
// a promoted struct field of an address exposed local.
if (varDsc->lvIsStructField && lvaTable[varDsc->lvParentLcl].lvAddrExposed)
break;
case GT_ASG:
- case GT_ASG_ADD:
- case GT_ASG_SUB:
noway_assert(!"We should never hit any assignment operator in lowering");
info->srcCount = 0;
break;
break;
case GT_ASG:
- case GT_ASG_ADD:
- case GT_ASG_SUB:
noway_assert(!"We should never hit any assignment operator in lowering");
info->srcCount = 0;
break;
break;
case GT_ASG:
- case GT_ASG_ADD:
- case GT_ASG_SUB:
noway_assert(!"We should never hit any assignment operator in lowering");
info->srcCount = 0;
break;
}
#endif
+#ifdef LEGACY_BACKEND
__fallthrough;
case GT_ASG_ADD:
case GT_ASG_RSH:
case GT_ASG_RSZ:
case GT_CHS:
+#endif
// We can't CSE the LHS of an assignment. Only r-values can be CSEed.
// Previously, the "lhs" (addr) of a block op was CSE'd. So, to duplicate the former
}
fgAssignSetVarDef(tree);
+#ifdef LEGACY_BACKEND
__fallthrough;
case GT_ASG_ADD:
case GT_ASG_LSH:
case GT_ASG_RSH:
case GT_ASG_RSZ:
+#endif
/* We can't CSE the LHS of an assignment */
/* We also must set in the pre-morphing phase, otherwise assertionProp doesn't see it */
break;
+#ifdef LEGACY_BACKEND
case GT_CHS:
+#endif
case GT_NOT:
case GT_NEG:
case GT_COMMA:
/* Special case: trees that don't produce a value */
- if ((op2->OperKind() & GTK_ASGOP) || (op2->OperGet() == GT_COMMA && op2->TypeGet() == TYP_VOID) ||
- fgIsThrow(op2))
+ if (op2->OperIsAssignment() || (op2->OperGet() == GT_COMMA && op2->TypeGet() == TYP_VOID) || fgIsThrow(op2))
{
typ = tree->gtType = TYP_VOID;
}
switch (oper)
{
+#ifdef LEGACY_BACKEND
genTreeOps cmop;
bool dstIsSafeLclVar;
+#endif
case GT_ASG:
- /* We'll convert "a = a <op> x" into "a <op>= x" */
- /* and also "a = x <op> a" into "a <op>= x" for communative ops */
- CLANG_FORMAT_COMMENT_ANCHOR;
-
- if (typ == TYP_LONG)
- {
- break;
- }
-
if (varTypeIsStruct(typ) && !tree->IsPhiDefn())
{
if (tree->OperIsCopyBlkOp())
}
}
+ if (typ == TYP_LONG)
+ {
+ break;
+ }
+
/* Make sure we're allowed to do this */
if (optValnumCSE_phase)
break;
}
+#ifdef LEGACY_BACKEND
+ /* We'll convert "a = a <op> x" into "a <op>= x" */
+ /* and also "a = x <op> a" into "a <op>= x" for communative ops */
+
/* Are we assigning to a GT_LCL_VAR ? */
dstIsSafeLclVar = (op1->gtOper == GT_LCL_VAR);
}
if (!dstIsSafeLclVar)
+#endif // LEGACY_BACKEND
{
if (op2->gtFlags & GTF_ASG)
{
/* Special case: a cast that can be thrown away */
+ // TODO-Cleanup: fgMorphSmp does a similar optimization. However, it removes only
+ // one cast and sometimes there is another one after it that gets removed by this
+ // code. fgMorphSmp should be improved to remove all redundant casts so this code
+ // can be removed.
+
if (op1->gtOper == GT_IND && op2->gtOper == GT_CAST && !op2->gtOverflow())
{
var_types srct;
}
}
+#ifdef LEGACY_BACKEND
/* Make sure we have the operator range right */
static_assert(GT_SUB == GT_ADD + 1, "bad oper value");
default:
break;
}
-
+#endif // LEGACY_BACKEND
break;
case GT_MUL:
* for reentrant calls.
*/
+#ifdef LEGACY_BACKEND
void Compiler::fgMorphStmts(BasicBlock* block, bool* mult, bool* lnot, bool* loadw)
+#else
+void Compiler::fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw)
+#endif
{
fgRemoveRestOfBlock = false;
compCurBB = block;
- *mult = *lnot = *loadw = false;
+ *lnot = *loadw = false;
+#ifdef LEGACY_BACKEND
+ *mult = false;
+#endif
fgCurrentlyInUseArgTemps = hashBv::Create(this);
continue;
}
-#if OPT_MULT_ADDSUB
-
+#ifdef LEGACY_BACKEND
/* Note whether we have two or more +=/-= operators in a row */
if (tree->gtOper == GT_ASG_ADD || tree->gtOper == GT_ASG_SUB)
}
}
-#endif
-
/* Note "x = a[i] & icon" followed by "x |= a[i] << 8" */
if (tree->gtOper == GT_ASG_OR && prev && prev->gtOper == GT_ASG)
{
*loadw = true;
}
+#endif // LEGACY_BACKEND
}
if (fgRemoveRestOfBlock)
do
{
-#if OPT_MULT_ADDSUB
+#ifdef LEGACY_BACKEND
bool mult = false;
#endif
GenTreePtr tree;
+#ifndef LEGACY_BACKEND
+ fgMorphStmts(block, &lnot, &loadw);
+#else
fgMorphStmts(block, &mult, &lnot, &loadw);
-#if OPT_MULT_ADDSUB
-
if (mult && (opts.compFlags & CLFLG_TREETRANS) && !opts.compDbgCode && !opts.MinOpts())
{
for (tree = block->bbTreeList; tree; tree = tree->gtNext)
}
}
-#endif
+#endif // LEGACY_BACKEND
/* Are we using a single return block? */
}
}
+#ifdef LEGACY_BACKEND
/** This predicate decides whether we will fold a tree with the structure:
* x = x <op> y where x could be any arbitrary expression into
* x <op>= y.
#if CPU_LOAD_STORE_ARCH
/* In the case of a load/store architecture, there's no gain by doing any of this, we bail. */
return false;
-#elif !defined(LEGACY_BACKEND)
- return false;
-#else // defined(LEGACY_BACKEND)
-
+#else
GenTreePtr op1 = tree->gtOp.gtOp1;
GenTreePtr op2 = tree->gtGetOp2();
genTreeOps cmop = op2->OperGet();
}
}
return false;
-#endif // defined(LEGACY_BACKEND)
+#endif // !CPU_LOAD_STORE_ARCH
}
+#endif // LEGACY_BACKEND
#ifdef FEATURE_SIMD
switch (iterOper)
{
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
case GT_SUB:
iterInc = -iterInc;
__fallthrough;
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
case GT_ADD:
if (constInitX != constLimitX)
{
*iterCount = loopCount;
return true;
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
- case GT_MUL:
case GT_ASG_DIV:
- case GT_DIV:
case GT_ASG_RSH:
- case GT_RSH:
case GT_ASG_LSH:
- case GT_LSH:
case GT_ASG_UDIV:
+#endif
+ case GT_MUL:
+ case GT_DIV:
+ case GT_RSH:
+ case GT_LSH:
case GT_UDIV:
return false;
case GT_LT:
switch (iterOper)
{
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
case GT_SUB:
iterInc = -iterInc;
__fallthrough;
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
case GT_ADD:
if (constInitX < constLimitX)
{
*iterCount = loopCount;
return true;
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
- case GT_MUL:
case GT_ASG_DIV:
- case GT_DIV:
case GT_ASG_RSH:
- case GT_RSH:
case GT_ASG_LSH:
- case GT_LSH:
case GT_ASG_UDIV:
+#endif
+ case GT_MUL:
+ case GT_DIV:
+ case GT_RSH:
+ case GT_LSH:
case GT_UDIV:
return false;
case GT_LE:
switch (iterOper)
{
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
case GT_SUB:
iterInc = -iterInc;
__fallthrough;
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
case GT_ADD:
if (constInitX <= constLimitX)
{
*iterCount = loopCount;
return true;
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
- case GT_MUL:
case GT_ASG_DIV:
- case GT_DIV:
case GT_ASG_RSH:
- case GT_RSH:
case GT_ASG_LSH:
- case GT_LSH:
case GT_ASG_UDIV:
+#endif
+ case GT_MUL:
+ case GT_DIV:
+ case GT_RSH:
+ case GT_LSH:
case GT_UDIV:
return false;
case GT_GT:
switch (iterOper)
{
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
case GT_SUB:
iterInc = -iterInc;
__fallthrough;
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
case GT_ADD:
if (constInitX > constLimitX)
{
*iterCount = loopCount;
return true;
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
- case GT_MUL:
case GT_ASG_DIV:
- case GT_DIV:
case GT_ASG_RSH:
- case GT_RSH:
case GT_ASG_LSH:
- case GT_LSH:
case GT_ASG_UDIV:
+#endif
+ case GT_MUL:
+ case GT_DIV:
+ case GT_RSH:
+ case GT_LSH:
case GT_UDIV:
return false;
case GT_GE:
switch (iterOper)
{
+#ifdef LEGACY_BACKEND
case GT_ASG_SUB:
+#endif
case GT_SUB:
iterInc = -iterInc;
__fallthrough;
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
+#endif
case GT_ADD:
if (constInitX >= constLimitX)
{
*iterCount = loopCount;
return true;
+#ifdef LEGACY_BACKEND
case GT_ASG_MUL:
- case GT_MUL:
case GT_ASG_DIV:
- case GT_DIV:
case GT_ASG_RSH:
- case GT_RSH:
case GT_ASG_LSH:
- case GT_LSH:
case GT_ASG_UDIV:
+#endif
+ case GT_MUL:
+ case GT_DIV:
+ case GT_RSH:
+ case GT_LSH:
case GT_UDIV:
return false;
oper = tree->OperGet();
kind = tree->OperKind();
- if (kind & GTK_ASGOP)
+ if (GenTree::OperIsAssignment(oper))
{
noway_assert(doit == false);
return false;
{
GenTreePtr tree = *pTree;
- if (tree->OperKind() & GTK_ASGOP)
+ if (tree->OperIsAssignment())
{
GenTreePtr dest = tree->gtOp.gtOp1;
genTreeOps destOper = dest->OperGet();
GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
// If we encounter an assignment to a local variable,
- if ((tree->OperKind() & GTK_ASGOP) && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)
+ if (tree->OperIsAssignment() && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)
{
// And the assigned variable equals the input local,
if (tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum == LclNum)
}
// TODO-CQ: CLONE: Mark increasing or decreasing loops.
- if ((pLoop->lpIterOper() != GT_ASG_ADD && pLoop->lpIterOper() != GT_ADD) || (pLoop->lpIterConst() != 1))
+ if ((
+#ifdef LEGACY_BACKEND
+ pLoop->lpIterOper() != GT_ASG_ADD &&
+#endif
+ pLoop->lpIterOper() != GT_ADD) ||
+ (pLoop->lpIterConst() != 1))
{
JITDUMP("> Loop iteration operator not matching\n");
return false;
return false;
}
- if (!(((pLoop->lpTestOper() == GT_LT || pLoop->lpTestOper() == GT_LE) &&
- (pLoop->lpIterOper() == GT_ADD || pLoop->lpIterOper() == GT_ASG_ADD)) ||
- ((pLoop->lpTestOper() == GT_GT || pLoop->lpTestOper() == GT_GE) &&
- (pLoop->lpIterOper() == GT_SUB || pLoop->lpIterOper() == GT_ASG_SUB))))
+ if (!(((pLoop->lpTestOper() == GT_LT || pLoop->lpTestOper() == GT_LE) && (pLoop->lpIterOper() == GT_ADD
+#ifdef LEGACY_BACKEND
+ || pLoop->lpIterOper() == GT_ASG_ADD
+#endif
+ )) ||
+ ((pLoop->lpTestOper() == GT_GT || pLoop->lpTestOper() == GT_GE) && (pLoop->lpIterOper() == GT_SUB
+#ifdef LEGACY_BACKEND
+ || pLoop->lpIterOper() == GT_ASG_SUB
+#endif
+ ))))
{
JITDUMP("> Loop test (%s) doesn't agree with the direction (%s) of the pLoop->\n",
GenTree::OpName(pLoop->lpTestOper()), GenTree::OpName(pLoop->lpIterOper()));
return false;
}
GenTreePtr asg = loc->parent;
- assert(asg->OperKind() & GTK_ASGOP);
+ assert(asg->OperIsAssignment());
switch (asg->OperGet())
{
case GT_ASG:
return IsMonotonicallyIncreasing(asg->gtGetOp2(), path);
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
return IsBinOpMonotonicallyIncreasing(asg->gtGetOp1(), asg->gtGetOp2(), GT_ADD, path);
+#endif
default:
+#ifndef LEGACY_BACKEND
+ unreached();
+#endif
// All other 'asg->OperGet()' kinds, return false
break;
}
}
#endif
GenTreePtr asg = loc->parent;
- assert(asg->OperKind() & GTK_ASGOP);
+ assert(asg->OperIsAssignment());
switch (asg->OperGet())
{
// If the operator of the definition is assignment, then compute the range of the rhs.
return range;
}
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
// If the operator of the definition is +=, then compute the range of the operands of +.
// Note that gtGetOp1 will return op1 to be the lhs; in the formulation of ssa, we have
// a side table for defs and the lhs of a += is considered to be a use for SSA numbering.
return ComputeRangeForBinOp(loc->block, loc->stmt, asg->gtGetOp1(), asg->gtGetOp2(), GT_ADD, path,
monotonic DEBUGARG(indent));
+#endif
default:
+#ifndef LEGACY_BACKEND
+ unreached();
+#endif
// All other 'asg->OperGet()' kinds, return Limit::keUnknown
break;
}
}
// Get the parent node which is an asg.
GenTreePtr asg = loc->parent;
- assert(asg->OperKind() & GTK_ASGOP);
+ assert(asg->OperIsAssignment());
switch (asg->OperGet())
{
case GT_ASG:
return DoesOverflow(loc->block, loc->stmt, asg->gtGetOp2(), path);
+#ifdef LEGACY_BACKEND
case GT_ASG_ADD:
// For GT_ASG_ADD, op2 is use, op1 is also use since we side table for defs in useasg case.
return DoesBinOpOverflow(loc->block, loc->stmt, asg->gtGetOp1(), asg->gtGetOp2(), path);
+#endif
default:
+#ifndef LEGACY_BACKEND
+ unreached();
+#endif
// All other 'asg->OperGet()' kinds, conservatively return true
break;
}
if (ssaNum != SsaConfig::RESERVED_SSA_NUM)
{
// To avoid ind(addr) use asgs
- if (loc.parent->OperKind() & GTK_ASGOP)
+ if (loc.parent->OperIsAssignment())
{
SetDef(HashCode(lclNum, ssaNum), new (m_pCompiler->getAllocator()) Location(loc));
}
}
else // Must be an "op="
{
+#ifndef LEGACY_BACKEND
+ unreached();
+#else
// If the LHS is an IND, we didn't evaluate it when we visited it previously.
// But we didn't know that the parent was an op=. We do now, so go back and evaluate it.
// (We actually check if the effective val is the IND. We will have evaluated any non-last
lhsNormVNP),
lhsExcVNP);
}
+#endif // !LEGACY_BACKEND
}
if (tree->TypeGet() != TYP_VOID)
{