/*****************************************************************************/
const unsigned short GenTree::gtOperKindTable[] = {
-#define GTNODE(en, sn, cm, ok) ok + GTK_COMMUTE *cm,
+#define GTNODE(en, sn, st, cm, ok) ok + GTK_COMMUTE *cm,
#include "gtlist.h"
};
}
static const char* nodeNames[] = {
-#define GTNODE(en, sn, cm, ok) sn,
+#define GTNODE(en, sn, st, cm, ok) sn,
#include "gtlist.h"
};
return nodeNames[op];
}
+#endif
+
+#if defined(DEBUG) || NODEBASH_STATS
+
static const char* opNames[] = {
-#define GTNODE(en, sn, cm, ok) #en,
+#define GTNODE(en, sn, st, cm, ok) #en,
#include "gtlist.h"
};
/* static */
unsigned char GenTree::s_gtNodeSizes[GT_COUNT + 1];
+#if NODEBASH_STATS
+
+unsigned char GenTree::s_gtTrueSizes[GT_COUNT+1]
+{
+ #define GTNODE(en, sn, st, cm, ok) sizeof(st),
+ #include "gtlist.h"
+};
+
+#endif//NODEBASH_STATS
+
/* static */
void GenTree::InitNodeSize()
{
// Now set all of the appropriate entries to 'large'
CLANG_FORMAT_COMMENT_ANCHOR;
+ // clang-format off
#if defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
// On ARM32, ARM64 and System V for struct returning
// there is code that does GT_ASG-tree.CopyObj call.
// CopyObj is a large node and the GT_ASG is small, which triggers an exception.
- GenTree::s_gtNodeSizes[GT_ASG] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_RETURN] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_ASG] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_RETURN] = TREE_NODE_SZ_LARGE;
#endif // defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
GenTree::s_gtNodeSizes[GT_CALL] = TREE_NODE_SZ_LARGE;
#ifdef FEATURE_SIMD
GenTree::s_gtNodeSizes[GT_SIMD_CHK] = TREE_NODE_SZ_LARGE;
#endif // FEATURE_SIMD
- GenTree::s_gtNodeSizes[GT_ARR_ELEM] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_ARR_INDEX] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_ARR_OFFSET] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_RET_EXPR] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_OBJ] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_FIELD] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_STMT] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_CMPXCHG] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_QMARK] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_LEA] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_STORE_OBJ] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_DYN_BLK] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_STORE_DYN_BLK] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_INTRINSIC] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_ALLOCOBJ] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_ARR_ELEM] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_ARR_INDEX] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_ARR_OFFSET] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_RET_EXPR] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_OBJ] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_FIELD] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_STMT] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_CMPXCHG] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_QMARK] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_LEA] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_STORE_OBJ] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_DYN_BLK] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_STORE_DYN_BLK] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_INTRINSIC] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_ALLOCOBJ] = TREE_NODE_SZ_LARGE;
#if USE_HELPERS_FOR_INT_DIV
- GenTree::s_gtNodeSizes[GT_DIV] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_UDIV] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_MOD] = TREE_NODE_SZ_LARGE;
- GenTree::s_gtNodeSizes[GT_UMOD] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_DIV] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_UDIV] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_MOD] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_UMOD] = TREE_NODE_SZ_LARGE;
#endif
#ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
- GenTree::s_gtNodeSizes[GT_PUTARG_STK] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_PUTARG_STK] = TREE_NODE_SZ_LARGE;
#endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
assert(GenTree::s_gtNodeSizes[GT_RETURN] == GenTree::s_gtNodeSizes[GT_ASG]);
assert(sizeof(GenTreeLclFld) <= GenTree::s_gtNodeSizes[GT_LCL_FLD]);
assert(sizeof(GenTreeLclVar) <= GenTree::s_gtNodeSizes[GT_LCL_VAR]);
- static_assert_no_msg(sizeof(GenTree) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeUnOp) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeOp) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeVal) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTree) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeUnOp) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeOp) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeVal) <= TREE_NODE_SZ_SMALL);
static_assert_no_msg(sizeof(GenTreeIntConCommon) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreePhysReg) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreePhysReg) <= TREE_NODE_SZ_SMALL);
#ifndef LEGACY_BACKEND
- static_assert_no_msg(sizeof(GenTreeJumpTable) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeJumpTable) <= TREE_NODE_SZ_SMALL);
#endif // !LEGACY_BACKEND
- static_assert_no_msg(sizeof(GenTreeIntCon) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeLngCon) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeDblCon) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeStrCon) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeIntCon) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeLngCon) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeDblCon) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeStrCon) <= TREE_NODE_SZ_SMALL);
static_assert_no_msg(sizeof(GenTreeLclVarCommon) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeLclVar) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeLclFld) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeRegVar) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeCast) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeBox) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeField) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeArgList) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeColon) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeCall) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeCmpXchg) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeFptrVal) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeQmark) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeIntrinsic) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeIndex) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeArrLen) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeBoundsChk) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeArrElem) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeArrIndex) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeArrOffs) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeIndir) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeStoreInd) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeAddrMode) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeObj) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeBlk) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeRetExpr) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeStmt) <= TREE_NODE_SZ_LARGE); // *** large node
- static_assert_no_msg(sizeof(GenTreeClsVar) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeArgPlace) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeLabel) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreePhiArg) <= TREE_NODE_SZ_SMALL);
- static_assert_no_msg(sizeof(GenTreeAllocObj) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeLclVar) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeLclFld) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeRegVar) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeJumpCC) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeCast) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeBox) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeField) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeArgList) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeColon) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeCall) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeCmpXchg) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeFptrVal) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeQmark) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeIntrinsic) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeIndex) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeArrLen) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeBoundsChk) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeArrElem) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeArrIndex) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeArrOffs) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeIndir) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeStoreInd) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeAddrMode) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeObj) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeBlk) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeRetExpr) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeStmt) <= TREE_NODE_SZ_LARGE); // *** large node
+ static_assert_no_msg(sizeof(GenTreeClsVar) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeArgPlace) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeLabel) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreePhiArg) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeAllocObj) <= TREE_NODE_SZ_LARGE); // *** large node
#ifndef FEATURE_UNIX_AMD64_STRUCT_PASSING
- static_assert_no_msg(sizeof(GenTreePutArgStk) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreePutArgStk) <= TREE_NODE_SZ_SMALL);
#else // FEATURE_UNIX_AMD64_STRUCT_PASSING
- static_assert_no_msg(sizeof(GenTreePutArgStk) <= TREE_NODE_SZ_LARGE);
+ static_assert_no_msg(sizeof(GenTreePutArgStk) <= TREE_NODE_SZ_LARGE);
#endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
#ifdef FEATURE_SIMD
- static_assert_no_msg(sizeof(GenTreeSIMD) <= TREE_NODE_SZ_SMALL);
+ static_assert_no_msg(sizeof(GenTreeSIMD) <= TREE_NODE_SZ_SMALL);
#endif // FEATURE_SIMD
+ // clang-format on
}
size_t GenTree::GetNodeSize() const
}
#endif
+/*****************************************************************************
+ *
+ * When 'NODEBASH_STATS' is enabled in "jit.h" we record all instances of
+ * an existing GenTree node having its operator changed. This can be useful
+ * for two (related) things - to see what is being bashed (and what isn't),
+ * and to verify that the existing choices for what nodes are marked 'large'
+ * are reasonable (to minimize "wasted" space).
+ *
+ * And yes, the hash function / logic is simplistic, but it is conflict-free
+ * and transparent for what we need.
+ */
+
+#if NODEBASH_STATS
+
+#define BASH_HASH_SIZE 211
+
+inline hashme(genTreeOps op1, genTreeOps op2) { return ((op1 * 104729) ^ (op2 * 56569)) % BASH_HASH_SIZE; }
+
+struct BashHashDsc
+{
+ unsigned __int32 bhFullHash; // the hash value (unique for all old->new pairs)
+ unsigned __int32 bhCount; // the same old->new bashings seen so far
+ unsigned __int8 bhOperOld; // original gtOper
+ unsigned __int8 bhOperNew; // new gtOper
+};
+
+static BashHashDsc BashHash[BASH_HASH_SIZE];
+
+void GenTree::RecordOperBashing(genTreeOps operOld, genTreeOps operNew)
+{
+ unsigned hash = hashme(operOld, operNew);
+ BashHashDsc *desc = BashHash + hash;
+
+ if (desc->bhFullHash != hash)
+ {
+ noway_assert(desc->bhCount == 0); // if this ever fires, need fix the hash fn
+ desc->bhFullHash = hash;
+ }
+
+ desc->bhCount += 1;
+ desc->bhOperOld = operOld;
+ desc->bhOperNew = operNew;
+}
+
+void GenTree::ReportOperBashing(FILE *f)
+{
+ unsigned total = 0;
+
+ fflush(f);
+
+ fprintf(f, "\n");
+ fprintf(f, "Bashed gtOper stats:\n");
+ fprintf(f, "\n");
+ fprintf(f, " Old operator New operator #bytes old->new Count\n");
+ fprintf(f, " ---------------------------------------------------------------\n");
+
+ for (unsigned h = 0; h < BASH_HASH_SIZE; h++)
+ {
+ unsigned count = BashHash[h].bhCount;
+ if (count == 0)
+ continue;
+
+ unsigned opOld = BashHash[h].bhOperOld;
+ unsigned opNew = BashHash[h].bhOperNew;
+
+ fprintf(f, " GT_%-13s -> GT_%-13s [size: %3u->%3u] %c %7u\n", OpName((genTreeOps)opOld),
+ OpName((genTreeOps)opNew),
+ s_gtTrueSizes[opOld],
+ s_gtTrueSizes[opNew],
+ (s_gtTrueSizes[opOld] < s_gtTrueSizes[opNew]) ? 'X' : ' ',
+ count);
+ total += count;
+ }
+ fprintf(f, "\n");
+ fprintf(f, "Total bashings: %u\n", total);
+ fprintf(f, "\n");
+
+ fflush(f);
+}
+
+#endif// NODEBASH_STATS
+
#else // SMALL_TREE_NODES
#ifdef DEBUG
}
}
-//
+//
//------------------------------------------------------------------------
// gtBlockOpInit: Initializes a BlkOp GenTree
//
// dst - the target (destination) we want to either initialize or copy to.
// src - the init value for InitBlk or the source struct for CpBlk/CpObj.
// isVolatile - specifies whether this node is a volatile memory operation.
-//
+//
// Assumptions:
// 'result' is an assignment that is newly constructed.
// If 'dst' is TYP_STRUCT, then it must be a block node or lclVar.
/*****************************************************************************/
void GenTree::CopyTo(class Compiler* comp, const GenTree& gt)
{
- gtOper = gt.gtOper;
+ SetOperRaw(gt.OperGet());
+
gtType = gt.gtType;
gtAssertionNum = gt.gtAssertionNum;
// Don't fold conversions of +inf/-inf to integral value on all platforms
// as the value returned by JIT helper doesn't match with the C compiler's cast result.
- // We want the behavior to be same with or without folding.
+ // We want the behavior to be same with or without folding.
return tree;
}
- if (d1 <= -1.0 && varTypeIsUnsigned(tree->CastToType()))
+ if (d1 <= -1.0 && varTypeIsUnsigned(tree->CastToType()))
{
// Don't fold conversions of these cases becasue the result is unspecified per ECMA spec
// and the native math doing the fold doesn't match the run-time computation on all platforms.
// We want the behavior to be same with or without folding.
return tree;
}
-
+
switch (tree->CastToType())
{
case TYP_BYTE:
// effect of this instruction, change it into a GT_LOCKADD node (the add only)
if (oper == GT_XADD)
{
- expr->gtOper = GT_LOCKADD;
+ expr->SetOperRaw(GT_LOCKADD);
expr->gtType = TYP_VOID;
}
#endif
/*****************************************************************************/
//
-// Node enum
-// , "Node name"
-// ,commutative
-// ,operKind
+// Node enum
+// ,"Node name"
+// ,GenTree struct flavor
+// ,commutative
+// ,operKind
-GTNODE(NONE , "<none>" ,0,GTK_SPECIAL)
+GTNODE(NONE , "<none>" ,char ,0,GTK_SPECIAL)
//-----------------------------------------------------------------------------
// Leaf nodes (i.e. these nodes have no sub-operands):
//-----------------------------------------------------------------------------
-GTNODE(LCL_VAR , "lclVar" ,0,GTK_LEAF|GTK_LOCAL) // local variable
-GTNODE(LCL_FLD , "lclFld" ,0,GTK_LEAF|GTK_LOCAL) // field in a non-primitive variable
-GTNODE(LCL_VAR_ADDR , "&lclVar" ,0,GTK_LEAF) // address of local variable
-GTNODE(LCL_FLD_ADDR , "&lclFld" ,0,GTK_LEAF) // address of field in a non-primitive variable
-GTNODE(STORE_LCL_VAR , "st.lclVar" ,0,GTK_UNOP|GTK_LOCAL|GTK_NOVALUE) // store to local variable
-GTNODE(STORE_LCL_FLD , "st.lclFld" ,0,GTK_UNOP|GTK_LOCAL|GTK_NOVALUE) // store to field in a non-primitive variable
-GTNODE(CATCH_ARG , "catchArg" ,0,GTK_LEAF) // Exception object in a catch block
-GTNODE(LABEL , "codeLabel" ,0,GTK_LEAF) // Jump-target
-GTNODE(FTN_ADDR , "ftnAddr" ,0,GTK_LEAF) // Address of a function
-GTNODE(RET_EXPR , "retExpr" ,0,GTK_LEAF) // Place holder for the return expression from an inline candidate
+GTNODE(LCL_VAR , "lclVar" ,GenTreeLclVar ,0,GTK_LEAF|GTK_LOCAL) // local variable
+GTNODE(LCL_FLD , "lclFld" ,GenTreeLclFld ,0,GTK_LEAF|GTK_LOCAL) // field in a non-primitive variable
+GTNODE(LCL_VAR_ADDR , "&lclVar" ,GenTreeLclVar ,0,GTK_LEAF) // address of local variable
+GTNODE(LCL_FLD_ADDR , "&lclFld" ,GenTreeLclFld ,0,GTK_LEAF) // address of field in a non-primitive variable
+GTNODE(STORE_LCL_VAR , "st.lclVar" ,GenTreeLclVar ,0,GTK_UNOP|GTK_LOCAL|GTK_NOVALUE) // store to local variable
+GTNODE(STORE_LCL_FLD , "st.lclFld" ,GenTreeLclFld ,0,GTK_UNOP|GTK_LOCAL|GTK_NOVALUE) // store to field in a non-primitive variable
+GTNODE(CATCH_ARG , "catchArg" ,GenTree ,0,GTK_LEAF) // Exception object in a catch block
+GTNODE(LABEL , "codeLabel" ,GenTreeLabel ,0,GTK_LEAF) // Jump-target
+GTNODE(FTN_ADDR , "ftnAddr" ,GenTreeFptrVal ,0,GTK_LEAF) // Address of a function
+GTNODE(RET_EXPR , "retExpr" ,GenTreeRetExpr ,0,GTK_LEAF) // Place holder for the return expression from an inline candidate
//-----------------------------------------------------------------------------
// Constant nodes:
//-----------------------------------------------------------------------------
-GTNODE(CNS_INT , "const" ,0,GTK_LEAF|GTK_CONST)
-GTNODE(CNS_LNG , "lconst" ,0,GTK_LEAF|GTK_CONST)
-GTNODE(CNS_DBL , "dconst" ,0,GTK_LEAF|GTK_CONST)
-GTNODE(CNS_STR , "sconst" ,0,GTK_LEAF|GTK_CONST)
+GTNODE(CNS_INT , "const" ,GenTreeIntCon ,0,GTK_LEAF|GTK_CONST)
+GTNODE(CNS_LNG , "lconst" ,GenTreeLngCon ,0,GTK_LEAF|GTK_CONST)
+GTNODE(CNS_DBL , "dconst" ,GenTreeDblCon ,0,GTK_LEAF|GTK_CONST)
+GTNODE(CNS_STR , "sconst" ,GenTreeStrCon ,0,GTK_LEAF|GTK_CONST)
//-----------------------------------------------------------------------------
// Unary operators (1 operand):
//-----------------------------------------------------------------------------
-GTNODE(NOT , "~" ,0,GTK_UNOP)
-GTNODE(NOP , "nop" ,0,GTK_UNOP)
-GTNODE(NEG , "unary -" ,0,GTK_UNOP)
-GTNODE(COPY , "copy" ,0,GTK_UNOP) // Copies a variable from its current location to a register that satisfies
- // code generation constraints. The child is the actual lclVar node.
-GTNODE(RELOAD , "reload" ,0,GTK_UNOP)
-GTNODE(CHS , "flipsign" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR) // GT_CHS is actually unary -- op2 is ignored.
- // Changing to unary presently causes problems, though -- take a little work to fix.
-
-GTNODE(ARR_LENGTH , "arrLen" ,0,GTK_UNOP|GTK_EXOP) // array-length
-
-GTNODE(INTRINSIC , "intrinsic" ,0,GTK_BINOP|GTK_EXOP) // intrinsics
-
-GTNODE(LOCKADD , "lockAdd" ,0,GTK_BINOP|GTK_NOVALUE)
-GTNODE(XADD , "XAdd" ,0,GTK_BINOP)
-GTNODE(XCHG , "Xchg" ,0,GTK_BINOP)
-GTNODE(CMPXCHG , "cmpxchg" ,0,GTK_SPECIAL)
-GTNODE(MEMORYBARRIER , "memoryBarrier" ,0,GTK_LEAF|GTK_NOVALUE)
-
-GTNODE(CAST , "cast" ,0,GTK_UNOP|GTK_EXOP) // conversion to another type
-GTNODE(CKFINITE , "ckfinite" ,0,GTK_UNOP) // Check for NaN
-GTNODE(LCLHEAP , "lclHeap" ,0,GTK_UNOP) // alloca()
-GTNODE(JMP , "jump" ,0,GTK_LEAF|GTK_NOVALUE) // Jump to another function
-
-
-GTNODE(ADDR , "addr" ,0,GTK_UNOP) // address of
-GTNODE(IND , "indir" ,0,GTK_UNOP) // load indirection
-GTNODE(STOREIND , "storeIndir" ,0,GTK_BINOP|GTK_NOVALUE) // store indirection
-
- // TODO-Cleanup: GT_ARR_BOUNDS_CHECK should be made a GTK_BINOP now that it has only two child nodes
-GTNODE(ARR_BOUNDS_CHECK , "arrBndsChk" ,0,GTK_SPECIAL|GTK_NOVALUE) // array bounds check
-GTNODE(OBJ , "obj" ,0,GTK_UNOP|GTK_EXOP) // Object that MAY have gc pointers, and thus includes the relevant gc layout info.
-GTNODE(STORE_OBJ , "storeObj" ,0,GTK_BINOP|GTK_EXOP|GTK_NOVALUE) // Object that MAY have gc pointers, and thus includes the relevant gc layout info.
-GTNODE(BLK , "blk" ,0,GTK_UNOP) // Block/object with no gc pointers, and with a known size (e.g. a struct with no gc fields)
-GTNODE(STORE_BLK , "storeBlk" ,0,GTK_BINOP|GTK_NOVALUE) // Block/object with no gc pointers, and with a known size (e.g. a struct with no gc fields)
-GTNODE(DYN_BLK , "DynBlk" ,0,GTK_SPECIAL) // Dynamically sized block object
-GTNODE(STORE_DYN_BLK , "storeDynBlk" ,0,GTK_SPECIAL|GTK_NOVALUE) // Dynamically sized block object
-GTNODE(BOX , "box" ,0,GTK_UNOP|GTK_EXOP|GTK_NOTLIR)
+GTNODE(NOT , "~" ,GenTreeOp ,0,GTK_UNOP)
+GTNODE(NOP , "nop" ,GenTree ,0,GTK_UNOP)
+GTNODE(NEG , "unary -" ,GenTreeOp ,0,GTK_UNOP)
+GTNODE(COPY , "copy" ,GenTreeCopyOrReload,0,GTK_UNOP) // Copies a variable from its current location to a register that satisfies
+ // code generation constraints. The child is the actual lclVar node.
+GTNODE(RELOAD , "reload" ,GenTreeCopyOrReload,0,GTK_UNOP)
+GTNODE(CHS , "flipsign" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR) // GT_CHS is actually unary -- op2 is ignored.
+ // Changing to unary presently causes problems, though -- take a little work to fix.
+
+GTNODE(ARR_LENGTH , "arrLen" ,GenTreeArrLen ,0,GTK_UNOP|GTK_EXOP) // array-length
+
+GTNODE(INTRINSIC , "intrinsic" ,GenTreeIntrinsic ,0,GTK_BINOP|GTK_EXOP) // intrinsics
+
+GTNODE(LOCKADD , "lockAdd" ,GenTreeOp ,0,GTK_BINOP|GTK_NOVALUE)
+GTNODE(XADD , "XAdd" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(XCHG , "Xchg" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(CMPXCHG , "cmpxchg" ,GenTreeCmpXchg ,0,GTK_SPECIAL)
+GTNODE(MEMORYBARRIER , "memoryBarrier",GenTree ,0,GTK_LEAF|GTK_NOVALUE)
+
+GTNODE(CAST , "cast" ,GenTreeCast ,0,GTK_UNOP|GTK_EXOP) // conversion to another type
+GTNODE(CKFINITE , "ckfinite" ,GenTreeOp ,0,GTK_UNOP) // Check for NaN
+GTNODE(LCLHEAP , "lclHeap" ,GenTreeOp ,0,GTK_UNOP) // alloca()
+GTNODE(JMP , "jump" ,GenTreeVal ,0,GTK_LEAF|GTK_NOVALUE) // Jump to another function
+
+GTNODE(ADDR , "addr" ,GenTreeOp ,0,GTK_UNOP) // address of
+GTNODE(IND , "indir" ,GenTreeOp ,0,GTK_UNOP) // load indirection
+GTNODE(STOREIND , "storeIndir" ,GenTreeStoreInd ,0,GTK_BINOP|GTK_NOVALUE) // store indirection
+
+ // TODO-Cleanup: GT_ARR_BOUNDS_CHECK should be made a GTK_BINOP now that it has only two child nodes
+GTNODE(ARR_BOUNDS_CHECK , "arrBndsChk" ,GenTreeBoundsChk ,0,GTK_SPECIAL|GTK_NOVALUE)// array bounds check
+GTNODE(OBJ , "obj" ,GenTreeObj ,0,GTK_UNOP|GTK_EXOP) // Object that MAY have gc pointers, and thus includes the relevant gc layout info.
+GTNODE(STORE_OBJ , "storeObj" ,GenTreeBlk ,0,GTK_BINOP|GTK_EXOP|GTK_NOVALUE) // Object that MAY have gc pointers, and thus includes the relevant gc layout info.
+GTNODE(BLK , "blk" ,GenTreeBlk ,0,GTK_UNOP) // Block/object with no gc pointers, and with a known size (e.g. a struct with no gc fields)
+GTNODE(STORE_BLK , "storeBlk" ,GenTreeBlk ,0,GTK_BINOP|GTK_NOVALUE) // Block/object with no gc pointers, and with a known size (e.g. a struct with no gc fields)
+GTNODE(DYN_BLK , "DynBlk" ,GenTreeBlk ,0,GTK_SPECIAL) // Dynamically sized block object
+GTNODE(STORE_DYN_BLK , "storeDynBlk" ,GenTreeBlk ,0,GTK_SPECIAL|GTK_NOVALUE)// Dynamically sized block object
+GTNODE(BOX , "box" ,GenTreeBox ,0,GTK_UNOP|GTK_EXOP|GTK_NOTLIR)
#ifdef FEATURE_SIMD
-GTNODE(SIMD_CHK , "simdChk" ,0,GTK_SPECIAL|GTK_NOVALUE) // Compare whether an index is less than the given SIMD vector length, and call CORINFO_HELP_RNGCHKFAIL if not.
- // TODO-CQ: In future may want to add a field that specifies different exceptions but we'll
- // need VM assistance for that.
- // TODO-CQ: It would actually be very nice to make this an unconditional throw, and expose the control flow that
- // does the compare, so that it can be more easily optimized. But that involves generating qmarks at import time...
+GTNODE(SIMD_CHK , "simdChk" ,GenTreeBoundsChk ,0,GTK_SPECIAL|GTK_NOVALUE)// Compare whether an index is less than the given SIMD vector length, and call CORINFO_HELP_RNGCHKFAIL if not.
+ // TODO-CQ: In future may want to add a field that specifies different exceptions but we'll
+ // need VM assistance for that.
+ // TODO-CQ: It would actually be very nice to make this an unconditional throw, and expose the control flow that
+ // does the compare, so that it can be more easily optimized. But that involves generating qmarks at import time...
#endif // FEATURE_SIMD
-GTNODE(ALLOCOBJ , "allocObj" ,0,GTK_UNOP|GTK_EXOP) // object allocator
+GTNODE(ALLOCOBJ , "allocObj" ,GenTreeAllocObj ,0,GTK_UNOP|GTK_EXOP) // object allocator
//-----------------------------------------------------------------------------
// Binary operators (2 operands):
//-----------------------------------------------------------------------------
-GTNODE(ADD , "+" ,1,GTK_BINOP)
-GTNODE(SUB , "-" ,0,GTK_BINOP)
-GTNODE(MUL , "*" ,1,GTK_BINOP)
-GTNODE(DIV , "/" ,0,GTK_BINOP)
-GTNODE(MOD , "%" ,0,GTK_BINOP)
+GTNODE(ADD , "+" ,GenTreeOp ,1,GTK_BINOP)
+GTNODE(SUB , "-" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(MUL , "*" ,GenTreeOp ,1,GTK_BINOP)
+GTNODE(DIV , "/" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(MOD , "%" ,GenTreeOp ,0,GTK_BINOP)
-GTNODE(UDIV , "un-/" ,0,GTK_BINOP)
-GTNODE(UMOD , "un-%" ,0,GTK_BINOP)
+GTNODE(UDIV , "un-/" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(UMOD , "un-%" ,GenTreeOp ,0,GTK_BINOP)
-GTNODE(OR , "|" ,1,GTK_BINOP|GTK_LOGOP)
-GTNODE(XOR , "^" ,1,GTK_BINOP|GTK_LOGOP)
-GTNODE(AND , "&" ,1,GTK_BINOP|GTK_LOGOP)
+GTNODE(OR , "|" ,GenTreeOp ,1,GTK_BINOP|GTK_LOGOP)
+GTNODE(XOR , "^" ,GenTreeOp ,1,GTK_BINOP|GTK_LOGOP)
+GTNODE(AND , "&" ,GenTreeOp ,1,GTK_BINOP|GTK_LOGOP)
-GTNODE(LSH , "<<" ,0,GTK_BINOP)
-GTNODE(RSH , ">>" ,0,GTK_BINOP)
-GTNODE(RSZ , ">>>" ,0,GTK_BINOP)
-GTNODE(ROL , "rol" ,0,GTK_BINOP)
-GTNODE(ROR , "ror" ,0,GTK_BINOP)
-GTNODE(MULHI , "mulhi" ,1,GTK_BINOP) // returns high bits (top N bits of the 2N bit result of an NxN multiply)
- // GT_MULHI is used in division by a constant (fgMorphDivByConst). We turn
- // the div into a MULHI + some adjustments. In codegen, we only use the
- // results of the high register, and we drop the low results.
+GTNODE(LSH , "<<" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(RSH , ">>" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(RSZ , ">>>" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(ROL , "rol" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(ROR , "ror" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(MULHI , "mulhi" ,GenTreeOp ,1,GTK_BINOP) // returns high bits (top N bits of the 2N bit result of an NxN multiply)
+ // GT_MULHI is used in division by a constant (fgMorphDivByConst). We turn
+ // the div into a MULHI + some adjustments. In codegen, we only use the
+ // results of the high register, and we drop the low results.
-GTNODE(ASG , "=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_ADD , "+=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_SUB , "-=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_MUL , "*=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_DIV , "/=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_MOD , "%=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG , "=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_ADD , "+=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_SUB , "-=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_MUL , "*=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_DIV , "/=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_MOD , "%=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_UDIV , "/=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_UMOD , "%=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_UDIV , "/=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_UMOD , "%=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_OR , "|=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_XOR , "^=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_AND , "&=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_LSH , "<<=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_RSH , ">>=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(ASG_RSZ , ">>>=" ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_OR , "|=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_XOR , "^=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_AND , "&=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_LSH , "<<=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_RSH , ">>=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
+GTNODE(ASG_RSZ , ">>>=" ,GenTreeOp ,0,GTK_BINOP|GTK_ASGOP|GTK_NOTLIR)
-GTNODE(EQ , "==" ,0,GTK_BINOP|GTK_RELOP)
-GTNODE(NE , "!=" ,0,GTK_BINOP|GTK_RELOP)
-GTNODE(LT , "<" ,0,GTK_BINOP|GTK_RELOP)
-GTNODE(LE , "<=" ,0,GTK_BINOP|GTK_RELOP)
-GTNODE(GE , ">=" ,0,GTK_BINOP|GTK_RELOP)
-GTNODE(GT , ">" ,0,GTK_BINOP|GTK_RELOP)
+GTNODE(EQ , "==" ,GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
+GTNODE(NE , "!=" ,GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
+GTNODE(LT , "<" ,GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
+GTNODE(LE , "<=" ,GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
+GTNODE(GE , ">=" ,GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
+GTNODE(GT , ">" ,GenTreeOp ,0,GTK_BINOP|GTK_RELOP)
-GTNODE(COMMA , "comma" ,0,GTK_BINOP|GTK_NOTLIR)
+GTNODE(COMMA , "comma" ,GenTreeOp ,0,GTK_BINOP|GTK_NOTLIR)
-GTNODE(QMARK , "qmark" ,0,GTK_BINOP|GTK_EXOP|GTK_NOTLIR)
-GTNODE(COLON , "colon" ,0,GTK_BINOP|GTK_NOTLIR)
+GTNODE(QMARK , "qmark" ,GenTreeQmark ,0,GTK_BINOP|GTK_EXOP|GTK_NOTLIR)
+GTNODE(COLON , "colon" ,GenTreeColon ,0,GTK_BINOP|GTK_NOTLIR)
-GTNODE(INDEX , "[]" ,0,GTK_BINOP|GTK_EXOP|GTK_NOTLIR) // SZ-array-element
+GTNODE(INDEX , "[]" ,GenTreeIndex ,0,GTK_BINOP|GTK_EXOP|GTK_NOTLIR) // SZ-array-element
-GTNODE(MKREFANY , "mkrefany" ,0,GTK_BINOP)
+GTNODE(MKREFANY , "mkrefany" ,GenTreeOp ,0,GTK_BINOP)
-GTNODE(LEA , "lea" ,0,GTK_BINOP|GTK_EXOP)
+GTNODE(LEA , "lea" ,GenTreeAddrMode ,0,GTK_BINOP|GTK_EXOP)
#if !defined(LEGACY_BACKEND) && !defined(_TARGET_64BIT_)
// A GT_LONG node simply represents the long value produced by the concatenation
// of its two (lower and upper half) operands. Some GT_LONG nodes are transient,
// during the decomposing of longs; others are handled by codegen as operands of
// nodes such as calls, returns and stores of long lclVars.
-GTNODE(LONG , "gt_long" ,0,GTK_BINOP)
+GTNODE(LONG , "gt_long" ,GenTreeOp ,0,GTK_BINOP)
// The following are nodes representing x86 specific long operators, including
// high operators of a 64-bit operations that requires a carry/borrow, which are
// named GT_XXX_HI for consistency, low operators of 64-bit operations that need
// to not be modified in phases post-decompose, and operators that return 64-bit
// results in one instruction.
-GTNODE(ADD_LO , "+Lo" ,1,GTK_BINOP)
-GTNODE(ADD_HI , "+Hi" ,1,GTK_BINOP)
-GTNODE(SUB_LO , "-Lo" ,0,GTK_BINOP)
-GTNODE(SUB_HI , "-Hi" ,0,GTK_BINOP)
-GTNODE(DIV_HI , "/Hi" ,0,GTK_BINOP)
-GTNODE(MOD_HI , "%Hi" ,0,GTK_BINOP)
-GTNODE(MUL_LONG , "*long" ,1,GTK_BINOP) // A mul that returns the 2N bit result of an NxN multiply. This op
- // is used for x86 multiplies that take two ints and return a long
- // result. All other multiplies with long results are morphed into
- // helper calls. It is similar to GT_MULHI, the difference being that
- // GT_MULHI drops the lo part of the result, whereas GT_MUL_LONG keeps
- // both parts of the result.
+GTNODE(ADD_LO , "+Lo" ,GenTreeOp ,1,GTK_BINOP)
+GTNODE(ADD_HI , "+Hi" ,GenTreeOp ,1,GTK_BINOP)
+GTNODE(SUB_LO , "-Lo" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(SUB_HI , "-Hi" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(DIV_HI , "/Hi" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(MOD_HI , "%Hi" ,GenTreeOp ,0,GTK_BINOP)
+GTNODE(MUL_LONG , "*long" ,GenTreeOp ,1,GTK_BINOP) // A mul that returns the 2N bit result of an NxN multiply. This op
+ // is used for x86 multiplies that take two ints and return a long
+ // result. All other multiplies with long results are morphed into
+ // helper calls. It is similar to GT_MULHI, the difference being that
+ // GT_MULHI drops the lo part of the result, whereas GT_MUL_LONG keeps
+ // both parts of the result.
#endif // !defined(LEGACY_BACKEND) && !defined(_TARGET_64BIT_)
#ifdef FEATURE_SIMD
-GTNODE(SIMD , "simd" ,0,GTK_BINOP|GTK_EXOP) // SIMD functions/operators/intrinsics
+GTNODE(SIMD , "simd" ,GenTreeSIMD ,0,GTK_BINOP|GTK_EXOP) // SIMD functions/operators/intrinsics
#endif // FEATURE_SIMD
//-----------------------------------------------------------------------------
// Other nodes that look like unary/binary operators:
//-----------------------------------------------------------------------------
-// The following are both conditional branches. GT_JTRUE has a single operand that computes a condition. GT_JCC
-// implicitly reads the condition bits from a previous operation. The latter is allowed only in the LIR form
-// used in the RyuJIT backend.
-GTNODE(JTRUE , "jmpTrue" ,0,GTK_UNOP|GTK_NOVALUE)
-GTNODE(JCC , "jcc" ,0,GTK_LEAF|GTK_NOVALUE)
+GTNODE(JTRUE , "jmpTrue" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE)
+GTNODE(JCC , "jcc" ,GenTreeJumpCC ,0,GTK_LEAF|GTK_NOVALUE)
-GTNODE(LIST , "<list>" ,0,GTK_BINOP)
+GTNODE(LIST , "<list>" ,GenTreeOp ,0,GTK_BINOP)
//-----------------------------------------------------------------------------
// Other nodes that have special structure:
//-----------------------------------------------------------------------------
-GTNODE(FIELD , "field" ,0,GTK_SPECIAL) // Member-field
-GTNODE(ARR_ELEM , "arrMD&" ,0,GTK_SPECIAL) // Multi-dimensional array-element address
-GTNODE(ARR_INDEX , "arrMDIdx" ,0,GTK_BINOP|GTK_EXOP) // Effective, bounds-checked index for one dimension of a multi-dimensional array element
-GTNODE(ARR_OFFSET , "arrMDOffs" ,0,GTK_SPECIAL) // Flattened offset of multi-dimensional array element
-GTNODE(CALL , "call()" ,0,GTK_SPECIAL)
+GTNODE(FIELD , "field" ,GenTreeField ,0,GTK_SPECIAL) // Member-field
+GTNODE(ARR_ELEM , "arrMD&" ,GenTreeArrElem ,0,GTK_SPECIAL) // Multi-dimensional array-element address
+GTNODE(ARR_INDEX , "arrMDIdx" ,GenTreeArrIndex ,0,GTK_BINOP|GTK_EXOP) // Effective, bounds-checked index for one dimension of a multi-dimensional array element
+GTNODE(ARR_OFFSET , "arrMDOffs" ,GenTreeArrOffs ,0,GTK_SPECIAL) // Flattened offset of multi-dimensional array element
+GTNODE(CALL , "call()" ,GenTreeCall ,0,GTK_SPECIAL)
//-----------------------------------------------------------------------------
// Statement operator nodes:
//-----------------------------------------------------------------------------
-GTNODE(BEG_STMTS , "begStmts" ,0,GTK_SPECIAL|GTK_NOVALUE) // used only temporarily in importer by impBegin/EndTreeList()
-GTNODE(STMT , "stmtExpr" ,0,GTK_SPECIAL|GTK_NOVALUE) // top-level list nodes in bbTreeList
+GTNODE(BEG_STMTS , "begStmts" ,GenTree ,0,GTK_SPECIAL|GTK_NOVALUE)// used only temporarily in importer by impBegin/EndTreeList()
+GTNODE(STMT , "stmtExpr" ,GenTreeStmt ,0,GTK_SPECIAL|GTK_NOVALUE)// top-level list nodes in bbTreeList
-GTNODE(RETURN , "return" ,0,GTK_UNOP|GTK_NOVALUE) // return from current function
-GTNODE(SWITCH , "switch" ,0,GTK_UNOP|GTK_NOVALUE) // switch
+GTNODE(RETURN , "return" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // return from current function
+GTNODE(SWITCH , "switch" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // switch
-GTNODE(NO_OP , "no_op" ,0,GTK_LEAF|GTK_NOVALUE) // nop!
+GTNODE(NO_OP , "no_op" ,GenTree ,0,GTK_LEAF|GTK_NOVALUE) // nop!
-GTNODE(START_NONGC, "start_nongc",0,GTK_LEAF|GTK_NOVALUE) // starts a new instruction group that will be non-gc interruptible
+GTNODE(START_NONGC , "start_nongc" ,GenTree ,0,GTK_LEAF|GTK_NOVALUE) // starts a new instruction group that will be non-gc interruptible
-GTNODE(PROF_HOOK , "prof_hook" ,0,GTK_LEAF|GTK_NOVALUE) // profiler Enter/Leave/TailCall hook
+GTNODE(PROF_HOOK , "prof_hook" ,GenTree ,0,GTK_LEAF|GTK_NOVALUE) // profiler Enter/Leave/TailCall hook
-GTNODE(RETFILT , "retfilt", 0,GTK_UNOP|GTK_NOVALUE) // end filter with TYP_I_IMPL return value
+GTNODE(RETFILT , "retfilt" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // end filter with TYP_I_IMPL return value
#if !FEATURE_EH_FUNCLETS
-GTNODE(END_LFIN , "endLFin" ,0,GTK_LEAF|GTK_NOVALUE) // end locally-invoked finally
+GTNODE(END_LFIN , "endLFin" ,GenTreeVal ,0,GTK_LEAF|GTK_NOVALUE) // end locally-invoked finally
#endif // !FEATURE_EH_FUNCLETS
//-----------------------------------------------------------------------------
// Nodes used for optimizations.
//-----------------------------------------------------------------------------
-GTNODE(PHI , "phi" ,0,GTK_UNOP) // phi node for ssa.
-GTNODE(PHI_ARG , "phiArg" ,0,GTK_LEAF|GTK_LOCAL) // phi(phiarg, phiarg, phiarg)
+GTNODE(PHI , "phi" ,GenTreeOp ,0,GTK_UNOP) // phi node for ssa.
+GTNODE(PHI_ARG , "phiArg" ,GenTreePhiArg ,0,GTK_LEAF|GTK_LOCAL) // phi(phiarg, phiarg, phiarg)
//-----------------------------------------------------------------------------
// Nodes used by Lower to generate a closer CPU representation of other nodes
//-----------------------------------------------------------------------------
-GTNODE(JMPTABLE , "jumpTable" , 0, GTK_LEAF) // Generates the jump table for switches
-GTNODE(SWITCH_TABLE, "tableSwitch", 0, GTK_BINOP|GTK_NOVALUE) // Jump Table based switch construct
+GTNODE(JMPTABLE , "jumpTable" ,GenTreeJumpTable ,0, GTK_LEAF) // Generates the jump table for switches
+GTNODE(SWITCH_TABLE , "tableSwitch" ,GenTreeOp ,0, GTK_BINOP|GTK_NOVALUE) // Jump Table based switch construct
//-----------------------------------------------------------------------------
// Nodes used only within the code generator:
//-----------------------------------------------------------------------------
-GTNODE(REG_VAR , "regVar" ,0,GTK_LEAF|GTK_LOCAL) // register variable
-GTNODE(CLS_VAR , "clsVar" ,0,GTK_LEAF) // static data member
-GTNODE(CLS_VAR_ADDR , "&clsVar" ,0,GTK_LEAF) // static data member address
-GTNODE(STORE_CLS_VAR, "st.clsVar" ,0,GTK_LEAF|GTK_NOVALUE) // store to static data member
-GTNODE(ARGPLACE , "argPlace" ,0,GTK_LEAF) // placeholder for a register arg
-GTNODE(NULLCHECK , "nullcheck" ,0,GTK_UNOP|GTK_NOVALUE) // null checks the source
-GTNODE(PHYSREG , "physregSrc" ,0,GTK_LEAF) // read from a physical register
-GTNODE(PHYSREGDST , "physregDst" ,0,GTK_UNOP|GTK_NOVALUE) // write to a physical register
-GTNODE(EMITNOP , "emitnop" ,0,GTK_LEAF|GTK_NOVALUE) // emitter-placed nop
-GTNODE(PINVOKE_PROLOG,"pinvoke_prolog",0,GTK_LEAF|GTK_NOVALUE) // pinvoke prolog seq
-GTNODE(PINVOKE_EPILOG,"pinvoke_epilog",0,GTK_LEAF|GTK_NOVALUE) // pinvoke epilog seq
-GTNODE(PUTARG_REG , "putarg_reg" ,0,GTK_UNOP) // operator that places outgoing arg in register
-GTNODE(PUTARG_STK , "putarg_stk" ,0,GTK_UNOP) // operator that places outgoing arg in stack
-GTNODE(RETURNTRAP , "returnTrap" ,0,GTK_UNOP|GTK_NOVALUE) // a conditional call to wait on gc
-GTNODE(SWAP , "swap" ,0,GTK_BINOP|GTK_NOVALUE) // op1 and op2 swap (registers)
-GTNODE(IL_OFFSET , "il_offset" ,0,GTK_LEAF|GTK_NOVALUE) // marks an IL offset for debugging purposes
+GTNODE(REG_VAR , "regVar" ,GenTreeLclVar ,0,GTK_LEAF|GTK_LOCAL) // register variable
+GTNODE(CLS_VAR , "clsVar" ,GenTreeClsVar ,0,GTK_LEAF) // static data member
+GTNODE(CLS_VAR_ADDR , "&clsVar" ,GenTreeClsVar ,0,GTK_LEAF) // static data member address
+GTNODE(ARGPLACE , "argPlace" ,GenTreeArgPlace ,0,GTK_LEAF) // placeholder for a register arg
+GTNODE(NULLCHECK , "nullcheck" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // null checks the source
+GTNODE(PHYSREG , "physregSrc" ,GenTreePhysReg ,0,GTK_LEAF) // read from a physical register
+GTNODE(PHYSREGDST , "physregDst" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // write to a physical register
+GTNODE(EMITNOP , "emitnop" ,GenTree ,0,GTK_LEAF|GTK_NOVALUE) // emitter-placed nop
+GTNODE(PINVOKE_PROLOG ,"pinvoke_prolog",GenTree ,0,GTK_LEAF|GTK_NOVALUE) // pinvoke prolog seq
+GTNODE(PINVOKE_EPILOG ,"pinvoke_epilog",GenTree ,0,GTK_LEAF|GTK_NOVALUE) // pinvoke epilog seq
+GTNODE(PUTARG_REG , "putarg_reg" ,GenTreeOp ,0,GTK_UNOP) // operator that places outgoing arg in register
+GTNODE(PUTARG_STK , "putarg_stk" ,GenTreePutArgStk ,0,GTK_UNOP) // operator that places outgoing arg in stack
+GTNODE(RETURNTRAP , "returnTrap" ,GenTreeOp ,0,GTK_UNOP|GTK_NOVALUE) // a conditional call to wait on gc
+GTNODE(SWAP , "swap" ,GenTreeOp ,0,GTK_BINOP|GTK_NOVALUE) // op1 and op2 swap (registers)
+GTNODE(IL_OFFSET , "il_offset" ,GenTreeStmt ,0,GTK_LEAF|GTK_NOVALUE) // marks an IL offset for debugging purposes
/*****************************************************************************/
#undef GTNODE