1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
10 XX This is the node in the semantic tree graph. It represents the operation XX
11 XX corresponding to the node, and other information during code-gen. XX
13 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
17 /*****************************************************************************/
20 /*****************************************************************************/
22 #include "vartype.h" // For "var_types"
23 #include "target.h" // For "regNumber"
24 #include "ssaconfig.h" // For "SsaConfig::RESERVED_SSA_NUM"
26 #include "valuenumtype.h"
28 #include "jithashtable.h"
31 #include "namedintrinsiclist.h"
33 // Debugging GenTree is much easier if we add a magic virtual function to make the debugger able to figure out what type
34 // it's got. This is enabled by default in DEBUG. To enable it in RET builds (temporarily!), you need to change the
35 // build to define DEBUGGABLE_GENTREE=1, as well as pass /OPT:NOICF to the linker (or else all the vtables get merged,
36 // making the debugging value supplied by them useless). See protojit.nativeproj for a commented example of setting the
37 // build flags correctly.
38 #ifndef DEBUGGABLE_GENTREE
40 #define DEBUGGABLE_GENTREE 1
42 #define DEBUGGABLE_GENTREE 0
44 #endif // !DEBUGGABLE_GENTREE
46 // The SpecialCodeKind enum is used to indicate the type of special (unique)
47 // target block that will be targeted by an instruction.
49 // GenTreeBoundsChk nodes (SCK_RNGCHK_FAIL, SCK_ARG_EXCPN, SCK_ARG_RNG_EXCPN)
50 // - these nodes have a field (gtThrowKind) to indicate which kind
51 // GenTreeOps nodes, for which codegen will generate the branch
52 // - it will use the appropriate kind based on the opcode, though it's not
53 // clear why SCK_OVERFLOW == SCK_ARITH_EXCPN
54 // SCK_PAUSE_EXEC is not currently used.
59 SCK_RNGCHK_FAIL, // target when range check fails
60 SCK_PAUSE_EXEC, // target to stop (e.g. to allow GC)
61 SCK_DIV_BY_ZERO, // target for divide by zero (Not used on X86/X64)
62 SCK_ARITH_EXCPN, // target on arithmetic exception
63 SCK_OVERFLOW = SCK_ARITH_EXCPN, // target on overflow
64 SCK_ARG_EXCPN, // target on ArgumentException (currently used only for SIMD intrinsics)
65 SCK_ARG_RNG_EXCPN, // target on ArgumentOutOfRangeException (currently used only for SIMD intrinsics)
69 /*****************************************************************************/
71 enum genTreeOps : BYTE
73 #define GTNODE(en, st, cm, ok) GT_##en,
79 // GT_CNS_NATIVELONG is the gtOper symbol for GT_CNS_LNG or GT_CNS_INT, depending on the target.
80 // For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
81 GT_CNS_NATIVELONG = GT_CNS_INT,
83 // For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
84 // In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
85 GT_CNS_NATIVELONG = GT_CNS_LNG,
89 /*****************************************************************************
91 * The following enum defines a set of bit flags that can be used
92 * to classify expression tree nodes. Note that some operators will
93 * have more than one bit set, as follows:
95 * GTK_CONST implies GTK_LEAF
96 * GTK_RELOP implies GTK_BINOP
97 * GTK_LOGOP implies GTK_BINOP
102 GTK_SPECIAL = 0x0000, // unclassified operator (special handling reqd)
104 GTK_CONST = 0x0001, // constant operator
105 GTK_LEAF = 0x0002, // leaf operator
106 GTK_UNOP = 0x0004, // unary operator
107 GTK_BINOP = 0x0008, // binary operator
108 GTK_RELOP = 0x0010, // comparison operator
109 GTK_LOGOP = 0x0020, // logical operator
110 #ifdef LEGACY_BACKEND
111 GTK_ASGOP = 0x0040, // assignment operator
114 GTK_KINDMASK = 0x007F, // operator kind mask
116 GTK_COMMUTE = 0x0080, // commutative operator
118 GTK_EXOP = 0x0100, // Indicates that an oper for a node type that extends GenTreeOp (or GenTreeUnOp)
119 // by adding non-node fields to unary or binary operator.
121 GTK_LOCAL = 0x0200, // is a local access (load, store, phi)
123 GTK_NOVALUE = 0x0400, // node does not produce a value
124 GTK_NOTLIR = 0x0800, // node is not allowed in LIR
126 GTK_NOCONTAIN = 0x1000, // this node is a value, but may not be contained
128 /* Define composite value(s) */
130 GTK_SMPOP = (GTK_UNOP | GTK_BINOP | GTK_RELOP | GTK_LOGOP)
133 /*****************************************************************************/
135 #define SMALL_TREE_NODES 1
137 /*****************************************************************************/
139 enum gtCallTypes : BYTE
141 CT_USER_FUNC, // User function
142 CT_HELPER, // Jit-helper
143 CT_INDIRECT, // Indirect call
145 CT_COUNT // fake entry (must be last)
148 /*****************************************************************************/
152 struct InlineCandidateInfo;
154 typedef unsigned short AssertionIndex;
156 static const AssertionIndex NO_ASSERTION_INDEX = 0;
158 //------------------------------------------------------------------------
159 // GetAssertionIndex: return 1-based AssertionIndex from 0-based int index.
162 // index - 0-based index
164 // 1-based AssertionIndex.
165 inline AssertionIndex GetAssertionIndex(unsigned index)
167 return (AssertionIndex)(index + 1);
172 // true if the assertion holds on the bbNext edge instead of the bbJumpDest edge (for GT_JTRUE nodes)
173 unsigned short m_isNextEdgeAssertion : 1;
174 // 1-based index of the assertion
175 unsigned short m_assertionIndex : 15;
177 AssertionInfo(bool isNextEdgeAssertion, AssertionIndex assertionIndex)
178 : m_isNextEdgeAssertion(isNextEdgeAssertion), m_assertionIndex(assertionIndex)
180 assert(m_assertionIndex == assertionIndex);
184 AssertionInfo() : AssertionInfo(false, 0)
188 AssertionInfo(AssertionIndex assertionIndex) : AssertionInfo(false, assertionIndex)
192 static AssertionInfo ForNextEdge(AssertionIndex assertionIndex)
194 // Ignore the edge information if there's no assertion
195 bool isNextEdge = (assertionIndex != NO_ASSERTION_INDEX);
196 return AssertionInfo(isNextEdge, assertionIndex);
201 m_isNextEdgeAssertion = 0;
202 m_assertionIndex = NO_ASSERTION_INDEX;
205 bool HasAssertion() const
207 return m_assertionIndex != NO_ASSERTION_INDEX;
210 AssertionIndex GetAssertionIndex() const
212 return m_assertionIndex;
215 bool IsNextEdgeAssertion() const
217 return m_isNextEdgeAssertion;
221 /*****************************************************************************/
223 // GT_FIELD nodes will be lowered into more "code-gen-able" representations, like
224 // GT_IND's of addresses, or GT_LCL_FLD nodes. We'd like to preserve the more abstract
225 // information, and will therefore annotate such lowered nodes with FieldSeq's. A FieldSeq
226 // represents a (possibly) empty sequence of fields. The fields are in the order
227 // in which they are dereferenced. The first field may be an object field or a struct field;
228 // all subsequent fields must be struct fields.
231 CORINFO_FIELD_HANDLE m_fieldHnd;
232 FieldSeqNode* m_next;
234 FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next) : m_fieldHnd(fieldHnd), m_next(next)
238 // returns true when this is the pseudo #FirstElem field sequence
239 bool IsFirstElemFieldSeq();
241 // returns true when this is the pseudo #ConstantIndex field sequence
242 bool IsConstantIndexFieldSeq();
244 // returns true when this is the the pseudo #FirstElem field sequence or the pseudo #ConstantIndex field sequence
245 bool IsPseudoField();
247 // Make sure this provides methods that allow it to be used as a KeyFuncs type in SimplerHash.
248 static int GetHashCode(FieldSeqNode fsn)
250 return static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_fieldHnd)) ^
251 static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_next));
254 static bool Equals(FieldSeqNode fsn1, FieldSeqNode fsn2)
256 return fsn1.m_fieldHnd == fsn2.m_fieldHnd && fsn1.m_next == fsn2.m_next;
260 // This class canonicalizes field sequences.
263 typedef JitHashTable<FieldSeqNode, /*KeyFuncs*/ FieldSeqNode, FieldSeqNode*> FieldSeqNodeCanonMap;
265 CompAllocator* m_alloc;
266 FieldSeqNodeCanonMap* m_canonMap;
268 static FieldSeqNode s_notAField; // No value, just exists to provide an address.
270 // Dummy variables to provide the addresses for the "pseudo field handle" statics below.
271 static int FirstElemPseudoFieldStruct;
272 static int ConstantIndexPseudoFieldStruct;
275 FieldSeqStore(CompAllocator* alloc);
277 // Returns the (canonical in the store) singleton field sequence for the given handle.
278 FieldSeqNode* CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd);
280 // This is a special distinguished FieldSeqNode indicating that a constant does *not*
281 // represent a valid field sequence. This is "infectious", in the sense that appending it
282 // (on either side) to any field sequence yields the "NotAField()" sequence.
283 static FieldSeqNode* NotAField()
288 // Returns the (canonical in the store) field sequence representing the concatenation of
289 // the sequences represented by "a" and "b". Assumes that "a" and "b" are canonical; that is,
290 // they are the results of CreateSingleton, NotAField, or Append calls. If either of the arguments
291 // are the "NotAField" value, so is the result.
292 FieldSeqNode* Append(FieldSeqNode* a, FieldSeqNode* b);
294 // We have a few "pseudo" field handles:
296 // This treats the constant offset of the first element of something as if it were a field.
297 // Works for method table offsets of boxed structs, or first elem offset of arrays/strings.
298 static CORINFO_FIELD_HANDLE FirstElemPseudoField;
300 // If there is a constant index, we make a psuedo field to correspond to the constant added to
301 // offset of the indexed field. This keeps the field sequence structure "normalized", especially in the
302 // case where the element type is a struct, so we might add a further struct field offset.
303 static CORINFO_FIELD_HANDLE ConstantIndexPseudoField;
305 static bool IsPseudoField(CORINFO_FIELD_HANDLE hnd)
307 return hnd == FirstElemPseudoField || hnd == ConstantIndexPseudoField;
311 class GenTreeUseEdgeIterator;
312 class GenTreeOperandIterator;
314 /*****************************************************************************/
316 typedef struct GenTree* GenTreePtr;
318 // Forward declarations of the subtypes
319 #define GTSTRUCT_0(fn, en) struct GenTree##fn;
320 #define GTSTRUCT_1(fn, en) struct GenTree##fn;
321 #define GTSTRUCT_2(fn, en, en2) struct GenTree##fn;
322 #define GTSTRUCT_3(fn, en, en2, en3) struct GenTree##fn;
323 #define GTSTRUCT_4(fn, en, en2, en3, en4) struct GenTree##fn;
324 #define GTSTRUCT_N(fn, ...) struct GenTree##fn;
325 #define GTSTRUCT_2_SPECIAL(fn, en, en2) GTSTRUCT_2(fn, en, en2)
326 #define GTSTRUCT_3_SPECIAL(fn, en, en2, en3) GTSTRUCT_3(fn, en, en2, en3)
327 #include "gtstructs.h"
329 /*****************************************************************************/
332 #include <pshpack4.h>
337 // We use GT_STRUCT_0 only for the category of simple ops.
338 #define GTSTRUCT_0(fn, en) \
339 GenTree##fn* As##fn() \
341 assert(OperIsSimple()); \
342 return reinterpret_cast<GenTree##fn*>(this); \
344 const GenTree##fn* As##fn() const \
346 assert(OperIsSimple()); \
347 return reinterpret_cast<const GenTree##fn*>(this); \
349 GenTree##fn& As##fn##Ref() \
353 __declspec(property(get = As##fn##Ref)) GenTree##fn& gt##fn;
355 #define GTSTRUCT_N(fn, ...) \
356 GenTree##fn* As##fn() \
358 assert(OperIs(__VA_ARGS__)); \
359 return reinterpret_cast<GenTree##fn*>(this); \
361 const GenTree##fn* As##fn() const \
363 assert(OperIs(__VA_ARGS__)); \
364 return reinterpret_cast<const GenTree##fn*>(this); \
366 GenTree##fn& As##fn##Ref() \
370 __declspec(property(get = As##fn##Ref)) GenTree##fn& gt##fn;
372 #define GTSTRUCT_1(fn, en) GTSTRUCT_N(fn, en)
373 #define GTSTRUCT_2(fn, en, en2) GTSTRUCT_N(fn, en, en2)
374 #define GTSTRUCT_3(fn, en, en2, en3) GTSTRUCT_N(fn, en, en2, en3)
375 #define GTSTRUCT_4(fn, en, en2, en3, en4) GTSTRUCT_N(fn, en, en2, en3, en4)
376 #define GTSTRUCT_2_SPECIAL(fn, en, en2) GTSTRUCT_2(fn, en, en2)
377 #define GTSTRUCT_3_SPECIAL(fn, en, en2, en3) GTSTRUCT_3(fn, en, en2, en3)
379 #include "gtstructs.h"
381 genTreeOps gtOper; // enum subtype BYTE
382 var_types gtType; // enum subtype BYTE
384 genTreeOps OperGet() const
388 var_types TypeGet() const
394 genTreeOps gtOperSave; // Only used to save gtOper when we destroy a node, to aid debugging.
401 #define IS_CSE_INDEX(x) (x != 0)
402 #define IS_CSE_USE(x) (x > 0)
403 #define IS_CSE_DEF(x) (x < 0)
404 #define GET_CSE_INDEX(x) ((x > 0) ? x : -x)
405 #define TO_CSE_DEF(x) (-x)
407 signed char gtCSEnum; // 0 or the CSE index (negated if def)
408 // valid only for CSE expressions
410 #endif // FEATURE_ANYCSE
412 unsigned char gtLIRFlags; // Used for nodes that are in LIR. See LIR::Flags in lir.h for the various flags.
415 AssertionInfo gtAssertionInfo; // valid only for non-GT_STMT nodes
417 bool GeneratesAssertion() const
419 return gtAssertionInfo.HasAssertion();
422 void ClearAssertion()
424 gtAssertionInfo.Clear();
427 AssertionInfo GetAssertionInfo() const
429 return gtAssertionInfo;
432 void SetAssertionInfo(AssertionInfo info)
434 gtAssertionInfo = info;
438 #if FEATURE_STACK_FP_X87
439 unsigned char gtFPlvl; // x87 stack depth at this node
440 void gtCopyFPlvl(GenTree* other)
442 gtFPlvl = other->gtFPlvl;
444 void gtSetFPlvl(unsigned level)
446 noway_assert(FitsIn<unsigned char>(level));
447 gtFPlvl = (unsigned char)level;
449 #else // FEATURE_STACK_FP_X87
451 void gtCopyFPlvl(GenTree* other)
454 void gtSetFPlvl(unsigned level)
458 #endif // FEATURE_STACK_FP_X87
461 // Cost metrics on the node. Don't allow direct access to the variable for setting.
466 // You are not allowed to read the cost values before they have been set in gtSetEvalOrder().
467 // Keep track of whether the costs have been initialized, and assert if they are read before being initialized.
468 // Obviously, this information does need to be initialized when a node is created.
469 // This is public so the dumpers can see it.
471 bool gtCostsInitialized;
474 #define MAX_COST UCHAR_MAX
475 #define IND_COST_EX 3 // execution cost for an indirection
477 __declspec(property(get = GetCostEx)) unsigned char gtCostEx; // estimate of expression execution cost
479 __declspec(property(get = GetCostSz)) unsigned char gtCostSz; // estimate of expression code size cost
481 unsigned char GetCostEx() const
483 assert(gtCostsInitialized);
486 unsigned char GetCostSz() const
488 assert(gtCostsInitialized);
492 // Set the costs. They are always both set at the same time.
493 // Don't use the "put" property: force calling this function, to make it more obvious in the few places
494 // that set the values.
495 // Note that costs are only set in gtSetEvalOrder() and its callees.
496 void SetCosts(unsigned costEx, unsigned costSz)
498 assert(costEx != (unsigned)-1); // looks bogus
499 assert(costSz != (unsigned)-1); // looks bogus
500 INDEBUG(gtCostsInitialized = true;)
502 _gtCostEx = (costEx > MAX_COST) ? MAX_COST : (unsigned char)costEx;
503 _gtCostSz = (costSz > MAX_COST) ? MAX_COST : (unsigned char)costSz;
506 // Opimized copy function, to avoid the SetCosts() function comparisons, and make it more clear that a node copy is
508 void CopyCosts(const GenTree* const tree)
510 INDEBUG(gtCostsInitialized =
511 tree->gtCostsInitialized;) // If the 'tree' costs aren't initialized, we'll hit an assert below.
512 _gtCostEx = tree->gtCostEx;
513 _gtCostSz = tree->gtCostSz;
516 // Same as CopyCosts, but avoids asserts if the costs we are copying have not been initialized.
517 // This is because the importer, for example, clones nodes, before these costs have been initialized.
518 // Note that we directly access the 'tree' costs, not going through the accessor functions (either
519 // directly or through the properties).
520 void CopyRawCosts(const GenTree* const tree)
522 INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;)
523 _gtCostEx = tree->_gtCostEx;
524 _gtCostSz = tree->_gtCostSz;
528 unsigned char _gtCostEx; // estimate of expression execution cost
529 unsigned char _gtCostSz; // estimate of expression code size cost
532 // Register or register pair number of the node.
534 CLANG_FORMAT_COMMENT_ANCHOR;
540 GT_REGTAG_NONE, // Nothing has been assigned to _gtRegNum/_gtRegPair
541 GT_REGTAG_REG, // _gtRegNum has been assigned
542 #if CPU_LONG_USES_REGPAIR
543 GT_REGTAG_REGPAIR // _gtRegPair has been assigned
546 genRegTag GetRegTag() const
548 #if CPU_LONG_USES_REGPAIR
549 assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG || gtRegTag == GT_REGTAG_REGPAIR);
551 assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG);
557 genRegTag gtRegTag; // What is in _gtRegNum/_gtRegPair?
562 // These store the register assigned to the node. If a register is not assigned, _gtRegNum is set to REG_NA
563 // or _gtRegPair is set to REG_PAIR_NONE, depending on the node type.
564 // For the LEGACY_BACKEND,these are valid only if GTF_REG_VAL is set in gtFlags.
565 regNumberSmall _gtRegNum; // which register the value is in
566 regPairNoSmall _gtRegPair; // which register pair the value is in
570 // The register number is stored in a small format (8 bits), but the getters return and the setters take
571 // a full-size (unsigned) format, to localize the casts here.
573 __declspec(property(get = GetRegNum, put = SetRegNum)) regNumber gtRegNum;
575 bool canBeContained() const;
577 // for codegen purposes, is this node a subnode of its parent
578 bool isContained() const;
580 bool isContainedIndir() const;
582 bool isIndirAddrMode();
584 bool isIndir() const;
586 bool isContainedIntOrIImmed() const
588 return isContained() && IsCnsIntOrI() && !isUsedFromSpillTemp();
591 bool isContainedFltOrDblImmed() const
593 return isContained() && (OperGet() == GT_CNS_DBL);
596 bool isLclField() const
598 return OperGet() == GT_LCL_FLD || OperGet() == GT_STORE_LCL_FLD;
601 bool isUsedFromSpillTemp() const;
603 // Indicates whether it is a memory op.
604 // Right now it includes Indir and LclField ops.
605 bool isMemoryOp() const
607 return isIndir() || isLclField();
610 bool isUsedFromMemory() const
612 return ((isContained() && (isMemoryOp() || (OperGet() == GT_LCL_VAR) || (OperGet() == GT_CNS_DBL))) ||
613 isUsedFromSpillTemp());
616 bool isLclVarUsedFromMemory() const
618 return (OperGet() == GT_LCL_VAR) && (isContained() || isUsedFromSpillTemp());
621 bool isLclFldUsedFromMemory() const
623 return isLclField() && (isContained() || isUsedFromSpillTemp());
626 bool isUsedFromReg() const
628 return !isContained() && !isUsedFromSpillTemp();
631 regNumber GetRegNum() const
633 assert((gtRegTag == GT_REGTAG_REG) || (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case,
634 // and fix everyplace that reads undefined
636 regNumber reg = (regNumber)_gtRegNum;
637 assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads
639 (reg >= REG_FIRST && reg <= REG_COUNT));
643 void SetRegNum(regNumber reg)
645 assert(reg >= REG_FIRST && reg <= REG_COUNT);
646 // Make sure the upper bits of _gtRegPair are clear
647 _gtRegPair = (regPairNoSmall)0;
648 _gtRegNum = (regNumberSmall)reg;
649 INDEBUG(gtRegTag = GT_REGTAG_REG;)
650 assert(_gtRegNum == reg);
653 #if CPU_LONG_USES_REGPAIR
654 __declspec(property(get = GetRegPair, put = SetRegPair)) regPairNo gtRegPair;
656 regPairNo GetRegPair() const
658 assert((gtRegTag == GT_REGTAG_REGPAIR) || (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE
659 // case, and fix everyplace that reads
661 regPairNo regPair = (regPairNo)_gtRegPair;
662 assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads
664 (regPair >= REG_PAIR_FIRST && regPair <= REG_PAIR_LAST) ||
665 (regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
669 void SetRegPair(regPairNo regPair)
671 assert((regPair >= REG_PAIR_FIRST && regPair <= REG_PAIR_LAST) ||
672 (regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
673 _gtRegPair = (regPairNoSmall)regPair;
674 INDEBUG(gtRegTag = GT_REGTAG_REGPAIR;)
675 assert(_gtRegPair == regPair);
679 // Copy the _gtRegNum/_gtRegPair/gtRegTag fields
680 void CopyReg(GenTreePtr from);
681 bool gtHasReg() const;
683 int GetRegisterDstCount() const;
685 regMaskTP gtGetRegMask() const;
687 unsigned gtFlags; // see GTF_xxxx below
690 unsigned gtDebugFlags; // see GTF_DEBUG_xxx below
691 #endif // defined(DEBUG)
693 ValueNumPair gtVNPair;
695 regMaskSmall gtRsvdRegs; // set of fixed trashed registers
697 #ifndef LEGACY_BACKEND
698 unsigned AvailableTempRegCount(regMaskTP mask = (regMaskTP)-1) const;
699 regNumber GetSingleTempReg(regMaskTP mask = (regMaskTP)-1);
700 regNumber ExtractTempReg(regMaskTP mask = (regMaskTP)-1);
701 #endif // !LEGACY_BACKEND
703 #ifdef LEGACY_BACKEND
704 regMaskSmall gtUsedRegs; // set of used (trashed) registers
705 #endif // LEGACY_BACKEND
707 void SetVNsFromNode(GenTreePtr tree)
709 gtVNPair = tree->gtVNPair;
712 ValueNum GetVN(ValueNumKind vnk) const
714 if (vnk == VNK_Liberal)
716 return gtVNPair.GetLiberal();
720 assert(vnk == VNK_Conservative);
721 return gtVNPair.GetConservative();
724 void SetVN(ValueNumKind vnk, ValueNum vn)
726 if (vnk == VNK_Liberal)
728 return gtVNPair.SetLiberal(vn);
732 assert(vnk == VNK_Conservative);
733 return gtVNPair.SetConservative(vn);
736 void SetVNs(ValueNumPair vnp)
742 gtVNPair = ValueNumPair(); // Initializes both elements to "NoVN".
747 //---------------------------------------------------------------------
749 // GenTree flags stored in gtFlags.
751 //---------------------------------------------------------------------
753 //---------------------------------------------------------------------
754 // The first set of flags can be used with a large set of nodes, and
755 // thus they must all have distinct values. That is, one can test any
756 // expression node for one of these flags.
757 //---------------------------------------------------------------------
759 #define GTF_ASG 0x00000001 // sub-expression contains an assignment
760 #define GTF_CALL 0x00000002 // sub-expression contains a func. call
761 #define GTF_EXCEPT 0x00000004 // sub-expression might throw an exception
762 #define GTF_GLOB_REF 0x00000008 // sub-expression uses global variable(s)
763 #define GTF_ORDER_SIDEEFF 0x00000010 // sub-expression has a re-ordering side effect
765 // If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree,
766 // otherwise the C# (run csc /o-) code:
767 // var v = side_eff_operation
768 // with no use of v will drop your tree on the floor.
769 #define GTF_PERSISTENT_SIDE_EFFECTS (GTF_ASG | GTF_CALL)
770 #define GTF_SIDE_EFFECT (GTF_PERSISTENT_SIDE_EFFECTS | GTF_EXCEPT)
771 #define GTF_GLOB_EFFECT (GTF_SIDE_EFFECT | GTF_GLOB_REF)
772 #define GTF_ALL_EFFECT (GTF_GLOB_EFFECT | GTF_ORDER_SIDEEFF)
774 // The extra flag GTF_IS_IN_CSE is used to tell the consumer of these flags
775 // that we are calling in the context of performing a CSE, thus we
776 // should allow the run-once side effects of running a class constructor.
778 // The only requirement of this flag is that it not overlap any of the
779 // side-effect flags. The actual bit used is otherwise arbitrary.
780 #define GTF_IS_IN_CSE GTF_BOOLEAN
781 #define GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE (GTF_ASG | GTF_CALL | GTF_IS_IN_CSE)
783 // Can any side-effects be observed externally, say by a caller method?
784 // For assignments, only assignments to global memory can be observed
785 // externally, whereas simple assignments to local variables can not.
787 // Be careful when using this inside a "try" protected region as the
788 // order of assignments to local variables would need to be preserved
789 // wrt side effects if the variables are alive on entry to the
790 // "catch/finally" region. In such cases, even assignments to locals
791 // will have to be restricted.
792 #define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \
793 (((flags) & (GTF_CALL | GTF_EXCEPT)) || (((flags) & (GTF_ASG | GTF_GLOB_REF)) == (GTF_ASG | GTF_GLOB_REF)))
795 #define GTF_REVERSE_OPS 0x00000020 // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second)
797 #ifdef LEGACY_BACKEND
798 #define GTF_REG_VAL 0x00000040 // operand is sitting in a register (or part of a TYP_LONG operand is sitting in a register)
799 #else // !LEGACY_BACKEND
800 #define GTF_CONTAINED 0x00000040 // This node is contained (executed as part of its parent)
801 #endif // !LEGACY_BACKEND
803 #define GTF_SPILLED 0x00000080 // the value has been spilled
805 #ifdef LEGACY_BACKEND
806 #define GTF_SPILLED_OPER 0x00000100 // op1 has been spilled
807 #define GTF_SPILLED_OP2 0x00000200 // op2 has been spilled
808 #define GTF_ZSF_SET 0x00000400 // the zero(ZF) and sign(SF) flags set to the operand
809 #else // !LEGACY_BACKEND
810 #define GTF_NOREG_AT_USE 0x00000100 // tree node is in memory at the point of use
811 #endif // !LEGACY_BACKEND
813 #define GTF_SET_FLAGS 0x00000800 // Requires that codegen for this node set the flags. Use gtSetFlags() to check this flag.
814 #define GTF_USE_FLAGS 0x00001000 // Indicates that this node uses the flags bits.
816 #define GTF_MAKE_CSE 0x00002000 // Hoisted expression: try hard to make this into CSE (see optPerformHoistExpr)
817 #define GTF_DONT_CSE 0x00004000 // Don't bother CSE'ing this expr
818 #define GTF_COLON_COND 0x00008000 // This node is conditionally executed (part of ? :)
820 #define GTF_NODE_MASK (GTF_COLON_COND)
822 #define GTF_BOOLEAN 0x00040000 // value is known to be 0/1
823 #if CPU_HAS_BYTE_REGS && defined(LEGACY_BACKEND)
824 #define GTF_SMALL_OK 0x00080000 // actual small int sufficient
827 #define GTF_UNSIGNED 0x00100000 // With GT_CAST: the source operand is an unsigned type
828 // With operators: the specified node is an unsigned operator
829 #define GTF_LATE_ARG 0x00200000 // The specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs.
830 #define GTF_SPILL 0x00400000 // Needs to be spilled here
832 #define GTF_COMMON_MASK 0x007FFFFF // mask of all the flags above
834 #define GTF_REUSE_REG_VAL 0x00800000 // This is set by the register allocator on nodes whose value already exists in the
835 // register assigned to this node, so the code generator does not have to generate
836 // code to produce the value. It is currently used only on constant nodes.
837 // It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since
838 // it is not needed for lclVars and is highly unlikely to be useful for indir nodes.
840 //---------------------------------------------------------------------
841 // The following flags can be used only with a small set of nodes, and
842 // thus their values need not be distinct (other than within the set
843 // that goes with a particular node/nodes, of course). That is, one can
844 // only test for one of these flags if the 'gtOper' value is tested as
845 // well to make sure it's the right operator for the particular flag.
846 //---------------------------------------------------------------------
848 // NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags, because
849 // GT_LCL_VAR nodes may be changed to GT_REG_VAR nodes without resetting
850 // the flags. These are also used by GT_LCL_FLD.
851 #define GTF_VAR_DEF 0x80000000 // GT_LCL_VAR -- this is a definition
852 #define GTF_VAR_USEASG 0x40000000 // GT_LCL_VAR -- this is a use/def for a x<op>=y
853 #define GTF_VAR_CAST 0x10000000 // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local)
854 #define GTF_VAR_ITERATOR 0x08000000 // GT_LCL_VAR -- this is a iterator reference in the loop condition
855 #define GTF_VAR_CLONED 0x01000000 // GT_LCL_VAR -- this node has been cloned or is a clone
856 // Relevant for inlining optimizations (see fgInlinePrependStatements)
858 // TODO-Cleanup: Currently, GTF_REG_BIRTH is used only by stackfp
859 // We should consider using it more generally for VAR_BIRTH, instead of
860 // GTF_VAR_DEF && !GTF_VAR_USEASG
861 #define GTF_REG_BIRTH 0x04000000 // GT_REG_VAR -- enregistered variable born here
862 #define GTF_VAR_DEATH 0x02000000 // GT_LCL_VAR, GT_REG_VAR -- variable dies here (last use)
864 #define GTF_VAR_ARR_INDEX 0x00000020 // The variable is part of (the index portion of) an array index expression.
865 // Shares a value with GTF_REVERSE_OPS, which is meaningless for local var.
867 #define GTF_LIVENESS_MASK (GTF_VAR_DEF | GTF_VAR_USEASG | GTF_REG_BIRTH | GTF_VAR_DEATH)
869 #define GTF_CALL_UNMANAGED 0x80000000 // GT_CALL -- direct call to unmanaged code
870 #define GTF_CALL_INLINE_CANDIDATE 0x40000000 // GT_CALL -- this call has been marked as an inline candidate
872 #define GTF_CALL_VIRT_KIND_MASK 0x30000000 // GT_CALL -- mask of the below call kinds
873 #define GTF_CALL_NONVIRT 0x00000000 // GT_CALL -- a non virtual call
874 #define GTF_CALL_VIRT_STUB 0x10000000 // GT_CALL -- a stub-dispatch virtual call
875 #define GTF_CALL_VIRT_VTABLE 0x20000000 // GT_CALL -- a vtable-based virtual call
877 #define GTF_CALL_NULLCHECK 0x08000000 // GT_CALL -- must check instance pointer for null
878 #define GTF_CALL_POP_ARGS 0x04000000 // GT_CALL -- caller pop arguments?
879 #define GTF_CALL_HOISTABLE 0x02000000 // GT_CALL -- call is hoistable
880 #ifdef LEGACY_BACKEND
882 // The GTF_CALL_REG_SAVE flag indicates that the call preserves all integer registers. This is used for
883 // the PollGC helper. However, since the PollGC helper on ARM follows the standard calling convention,
884 // for that target we don't use this flag.
885 #define GTF_CALL_REG_SAVE 0x00000000
887 #define GTF_CALL_REG_SAVE 0x01000000 // GT_CALL -- This call preserves all integer regs
888 #endif // _TARGET_ARM_
889 #endif // LEGACY_BACKEND
890 // For additional flags for GT_CALL node see GTF_CALL_M_*
892 #define GTF_NOP_DEATH 0x40000000 // GT_NOP -- operand dies here
894 #define GTF_FLD_NULLCHECK 0x80000000 // GT_FIELD -- need to nullcheck the "this" pointer
895 #define GTF_FLD_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
896 #define GTF_FLD_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- field access requires preceding class/static init helper
898 #define GTF_INX_RNGCHK 0x80000000 // GT_INDEX -- the array reference should be range-checked.
899 #define GTF_INX_REFARR_LAYOUT 0x20000000 // GT_INDEX
900 #define GTF_INX_STRING_LAYOUT 0x40000000 // GT_INDEX -- this uses the special string array layout
902 #define GTF_IND_ARR_LEN 0x80000000 // GT_IND -- the indirection represents an array length (of the REF
903 // contribution to its argument).
904 #define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
905 #define GTF_IND_NONFAULTING 0x20000000 // Operations for which OperIsIndir() is true -- An indir that cannot fault.
906 // Same as GTF_ARRLEN_NONFAULTING.
907 #define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere
908 #define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS
909 #define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
910 // assignment; don't evaluate it independently.
911 #define GTF_IND_REQ_ADDR_IN_REG GTF_IND_ASG_LHS // GT_IND -- requires its addr operand to be evaluated
912 // into a register. This flag is useful in cases where it
913 // is required to generate register indirect addressing mode.
914 // One such case is virtual stub calls on xarch. This is only
915 // valid in the backend, where GTF_IND_ASG_LHS is not necessary
916 // (all such indirections will be lowered to GT_STOREIND).
917 #define GTF_IND_UNALIGNED 0x02000000 // GT_IND -- the load or store is unaligned (we assume worst case
918 // alignment of 1 byte)
919 #define GTF_IND_INVARIANT 0x01000000 // GT_IND -- the target is invariant (a prejit indirection)
920 #define GTF_IND_ARR_INDEX 0x00800000 // GT_IND -- the indirection represents an (SZ) array index
922 #define GTF_IND_FLAGS \
923 (GTF_IND_VOLATILE | GTF_IND_TGTANYWHERE | GTF_IND_NONFAULTING | GTF_IND_TLS_REF | \
924 GTF_IND_UNALIGNED | GTF_IND_INVARIANT | GTF_IND_ARR_INDEX)
926 #define GTF_CLS_VAR_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
927 #define GTF_CLS_VAR_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_FLD_INITCLASS
928 #define GTF_CLS_VAR_ASG_LHS 0x04000000 // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS
929 // of an assignment; don't evaluate it independently.
931 #define GTF_ADDR_ONSTACK 0x80000000 // GT_ADDR -- this expression is guaranteed to be on the stack
933 #define GTF_ADDRMODE_NO_CSE 0x80000000 // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex
936 #define GTF_MUL_64RSLT 0x40000000 // GT_MUL -- produce 64-bit result
938 #ifdef LEGACY_BACKEND
939 #define GTF_MOD_INT_RESULT 0x80000000 // GT_MOD, -- the real tree represented by this
940 // GT_UMOD node evaluates to an int even though its type is long.
941 // The result is placed in the low member of the reg pair.
942 #endif // LEGACY_BACKEND
944 #define GTF_RELOP_NAN_UN 0x80000000 // GT_<relop> -- Is branch taken if ops are NaN?
945 #define GTF_RELOP_JMP_USED 0x40000000 // GT_<relop> -- result of compare used for jump or ?:
946 #define GTF_RELOP_QMARK 0x20000000 // GT_<relop> -- the node is the condition for ?:
947 #define GTF_RELOP_ZTT 0x08000000 // GT_<relop> -- Loop test cloned for converting while-loops into do-while
948 // with explicit "loop test" in the header block.
950 #define GTF_JCMP_EQ 0x80000000 // GTF_JCMP_EQ -- Branch on equal rather than not equal
951 #define GTF_JCMP_TST 0x40000000 // GTF_JCMP_TST -- Use bit test instruction rather than compare against zero instruction
953 #define GTF_RET_MERGED 0x80000000 // GT_RETURN -- This is a return generated during epilog merging.
955 #define GTF_QMARK_CAST_INSTOF 0x80000000 // GT_QMARK -- Is this a top (not nested) level qmark created for
956 // castclass or instanceof?
958 #define GTF_BOX_VALUE 0x80000000 // GT_BOX -- "box" is on a value type
960 #define GTF_ICON_HDL_MASK 0xF0000000 // Bits used by handle types below
961 #define GTF_ICON_SCOPE_HDL 0x10000000 // GT_CNS_INT -- constant is a scope handle
962 #define GTF_ICON_CLASS_HDL 0x20000000 // GT_CNS_INT -- constant is a class handle
963 #define GTF_ICON_METHOD_HDL 0x30000000 // GT_CNS_INT -- constant is a method handle
964 #define GTF_ICON_FIELD_HDL 0x40000000 // GT_CNS_INT -- constant is a field handle
965 #define GTF_ICON_STATIC_HDL 0x50000000 // GT_CNS_INT -- constant is a handle to static data
966 #define GTF_ICON_STR_HDL 0x60000000 // GT_CNS_INT -- constant is a string handle
967 #define GTF_ICON_PSTR_HDL 0x70000000 // GT_CNS_INT -- constant is a ptr to a string handle
968 #define GTF_ICON_PTR_HDL 0x80000000 // GT_CNS_INT -- constant is a ldptr handle
969 #define GTF_ICON_VARG_HDL 0x90000000 // GT_CNS_INT -- constant is a var arg cookie handle
970 #define GTF_ICON_PINVKI_HDL 0xA0000000 // GT_CNS_INT -- constant is a pinvoke calli handle
971 #define GTF_ICON_TOKEN_HDL 0xB0000000 // GT_CNS_INT -- constant is a token handle
972 #define GTF_ICON_TLS_HDL 0xC0000000 // GT_CNS_INT -- constant is a TLS ref with offset
973 #define GTF_ICON_FTN_ADDR 0xD0000000 // GT_CNS_INT -- constant is a function address
974 #define GTF_ICON_CIDMID_HDL 0xE0000000 // GT_CNS_INT -- constant is a class ID or a module ID
975 #define GTF_ICON_BBC_PTR 0xF0000000 // GT_CNS_INT -- constant is a basic block count pointer
977 #define GTF_ICON_FIELD_OFF 0x08000000 // GT_CNS_INT -- constant is a field offset
978 #define GTF_ICON_SIMD_COUNT 0x04000000 // GT_CNS_INT -- constant is Vector<T>.Count
980 #define GTF_ICON_INITCLASS 0x02000000 // GT_CNS_INT -- Constant is used to access a static that requires preceding
981 // class/static init helper. In some cases, the constant is
982 // the address of the static field itself, and in other cases
983 // there's an extra layer of indirection and it is the address
984 // of the cell that the runtime will fill in with the address
985 // of the static field; in both of those cases, the constant
986 // is what gets flagged.
988 #define GTF_BLK_VOLATILE GTF_IND_VOLATILE // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation
989 #define GTF_BLK_UNALIGNED GTF_IND_UNALIGNED // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation
991 #define GTF_OVERFLOW 0x10000000 // GT_ADD, GT_SUB, GT_MUL, -- Need overflow check. Use gtOverflow(Ex)() to check this flag.
992 // GT_ASG_ADD, GT_ASG_SUB,
995 #define GTF_ARR_BOUND_INBND 0x80000000 // GT_ARR_BOUNDS_CHECK -- have proved this check is always in-bounds
997 #define GTF_ARRLEN_ARR_IDX 0x80000000 // GT_ARR_LENGTH -- Length which feeds into an array index expression
998 #define GTF_ARRLEN_NONFAULTING 0x20000000 // GT_ARR_LENGTH -- An array length operation that cannot fault. Same as GT_IND_NONFAULTING.
1000 #define GTF_FIELD_LIST_HEAD 0x80000000 // GT_FIELD_LIST -- Indicates that this is the first field in a list of
1001 // struct fields constituting a single call argument.
1003 #define GTF_SIMD12_OP 0x80000000 // GT_SIMD -- Indicates that the operands need to be handled as SIMD12
1004 // even if they have been retyped as SIMD16.
1006 #define GTF_STMT_CMPADD 0x80000000 // GT_STMT -- added by compiler
1007 #define GTF_STMT_HAS_CSE 0x40000000 // GT_STMT -- CSE def or use was subsituted
1009 //---------------------------------------------------------------------
1011 // GenTree flags stored in gtDebugFlags.
1013 //---------------------------------------------------------------------
1016 #define GTF_DEBUG_NONE 0x00000000 // No debug flags.
1018 #define GTF_DEBUG_NODE_MORPHED 0x00000001 // the node has been morphed (in the global morphing phase)
1019 #define GTF_DEBUG_NODE_SMALL 0x00000002
1020 #define GTF_DEBUG_NODE_LARGE 0x00000004
1021 #define GTF_DEBUG_NODE_CG_PRODUCED 0x00000008 // genProduceReg has been called on this node
1022 #define GTF_DEBUG_NODE_CG_CONSUMED 0x00000010 // genConsumeReg has been called on this node
1023 #define GTF_DEBUG_NODE_LSRA_ADDED 0x00000020 // This node was added by LSRA
1025 #define GTF_DEBUG_NODE_MASK 0x0000003F // These flags are all node (rather than operation) properties.
1027 #define GTF_DEBUG_VAR_CSE_REF 0x00800000 // GT_LCL_VAR -- This is a CSE LCL_VAR node
1028 #endif // defined(DEBUG)
1030 //---------------------------------------------------------------------
1032 // end of GenTree flags definitions
1034 //---------------------------------------------------------------------
1043 unsigned gtSeqNum; // liveness traversal order within the current statement
1045 int gtUseNum; // use-ordered traversal within the function
1048 static const unsigned short gtOperKindTable[];
1050 static unsigned OperKind(unsigned gtOper)
1052 assert(gtOper < GT_COUNT);
1054 return gtOperKindTable[gtOper];
1057 unsigned OperKind() const
1059 assert(gtOper < GT_COUNT);
1061 return gtOperKindTable[gtOper];
1064 static bool IsExOp(unsigned opKind)
1066 return (opKind & GTK_EXOP) != 0;
1068 // Returns the operKind with the GTK_EX_OP bit removed (the
1069 // kind of operator, unary or binary, that is extended).
1070 static unsigned StripExOp(unsigned opKind)
1072 return opKind & ~GTK_EXOP;
1075 bool IsValue() const
1077 if ((OperKind(gtOper) & GTK_NOVALUE) != 0)
1082 if (gtType == TYP_VOID)
1084 // These are the only operators which can produce either VOID or non-VOID results.
1085 assert(OperIs(GT_NOP, GT_CALL, GT_LOCKADD, GT_FIELD_LIST, GT_COMMA) || OperIsCompare() || OperIsLong() ||
1086 OperIsSIMD() || OperIsHWIntrinsic());
1090 if (gtOper == GT_FIELD_LIST)
1092 return (gtFlags & GTF_FIELD_LIST_HEAD) != 0;
1100 if ((OperKind(gtOper) & GTK_NOTLIR) != 0)
1108 // NOPs may only be present in LIR if they do not produce a value.
1109 return IsNothingNode();
1112 // LIST nodes may not be present in a block's LIR sequence, but they may
1113 // be present as children of an LIR node.
1114 return (gtNext == nullptr) && (gtPrev == nullptr);
1117 // Only the head of the FIELD_LIST is present in the block's LIR sequence.
1118 return (((gtFlags & GTF_FIELD_LIST_HEAD) != 0) || ((gtNext == nullptr) && (gtPrev == nullptr)));
1122 // ADDR ndoes may only be present in LIR if the location they refer to is not a
1123 // local, class variable, or IND node.
1124 GenTree* location = gtGetOp1();
1125 genTreeOps locationOp = location->OperGet();
1126 return !location->IsLocal() && (locationOp != GT_CLS_VAR) && (locationOp != GT_IND);
1130 // All other nodes are assumed to be correct.
1136 // These helper methods, along with the flag values they manipulate, are defined in lir.h
1138 // UnusedValue indicates that, although this node produces a value, it is unused.
1139 inline void SetUnusedValue();
1140 inline void ClearUnusedValue();
1141 inline bool IsUnusedValue() const;
1142 // RegOptional indicates that codegen can still generate code even if it isn't allocated a register.
1143 inline bool IsRegOptional() const;
1144 inline void SetRegOptional();
1145 inline void ClearRegOptional();
1147 void dumpLIRFlags();
1150 bool OperIs(genTreeOps oper) const
1152 return OperGet() == oper;
1155 template <typename... T>
1156 bool OperIs(genTreeOps oper, T... rest) const
1158 return OperIs(oper) || OperIs(rest...);
1161 static bool OperIsConst(genTreeOps gtOper)
1163 return (OperKind(gtOper) & GTK_CONST) != 0;
1166 bool OperIsConst() const
1168 return (OperKind(gtOper) & GTK_CONST) != 0;
1171 static bool OperIsLeaf(genTreeOps gtOper)
1173 return (OperKind(gtOper) & GTK_LEAF) != 0;
1176 bool OperIsLeaf() const
1178 return (OperKind(gtOper) & GTK_LEAF) != 0;
1181 static bool OperIsCompare(genTreeOps gtOper)
1183 return (OperKind(gtOper) & GTK_RELOP) != 0;
1186 static bool OperIsLocal(genTreeOps gtOper)
1188 bool result = (OperKind(gtOper) & GTK_LOCAL) != 0;
1189 assert(result == (gtOper == GT_LCL_VAR || gtOper == GT_PHI_ARG || gtOper == GT_REG_VAR ||
1190 gtOper == GT_LCL_FLD || gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD));
1194 static bool OperIsLocalAddr(genTreeOps gtOper)
1196 return (gtOper == GT_LCL_VAR_ADDR || gtOper == GT_LCL_FLD_ADDR);
1199 static bool OperIsLocalField(genTreeOps gtOper)
1201 return (gtOper == GT_LCL_FLD || gtOper == GT_LCL_FLD_ADDR || gtOper == GT_STORE_LCL_FLD);
1204 inline bool OperIsLocalField() const
1206 return OperIsLocalField(gtOper);
1209 static bool OperIsScalarLocal(genTreeOps gtOper)
1211 return (gtOper == GT_LCL_VAR || gtOper == GT_REG_VAR || gtOper == GT_STORE_LCL_VAR);
1214 static bool OperIsNonPhiLocal(genTreeOps gtOper)
1216 return OperIsLocal(gtOper) && (gtOper != GT_PHI_ARG);
1219 static bool OperIsLocalRead(genTreeOps gtOper)
1221 return (OperIsLocal(gtOper) && !OperIsLocalStore(gtOper));
1224 static bool OperIsLocalStore(genTreeOps gtOper)
1226 return (gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD);
1229 static bool OperIsAddrMode(genTreeOps gtOper)
1231 return (gtOper == GT_LEA);
1234 static bool OperIsInitVal(genTreeOps gtOper)
1236 return (gtOper == GT_INIT_VAL);
1239 bool OperIsInitVal() const
1241 return OperIsInitVal(OperGet());
1244 bool IsConstInitVal()
1246 return (gtOper == GT_CNS_INT) || (OperIsInitVal() && (gtGetOp1()->gtOper == GT_CNS_INT));
1250 bool OperIsCopyBlkOp();
1251 bool OperIsInitBlkOp();
1252 bool OperIsDynBlkOp();
1254 static bool OperIsBlk(genTreeOps gtOper)
1256 return ((gtOper == GT_BLK) || (gtOper == GT_OBJ) || (gtOper == GT_DYN_BLK) || (gtOper == GT_STORE_BLK) ||
1257 (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK));
1260 bool OperIsBlk() const
1262 return OperIsBlk(OperGet());
1265 static bool OperIsDynBlk(genTreeOps gtOper)
1267 return ((gtOper == GT_DYN_BLK) || (gtOper == GT_STORE_DYN_BLK));
1270 bool OperIsDynBlk() const
1272 return OperIsDynBlk(OperGet());
1275 static bool OperIsStoreBlk(genTreeOps gtOper)
1277 return ((gtOper == GT_STORE_BLK) || (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK));
1280 bool OperIsStoreBlk() const
1282 return OperIsStoreBlk(OperGet());
1285 bool OperIsPutArgSplit() const
1287 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1288 return gtOper == GT_PUTARG_SPLIT;
1294 bool OperIsPutArgStk() const
1296 return gtOper == GT_PUTARG_STK;
1299 bool OperIsPutArgReg() const
1301 return gtOper == GT_PUTARG_REG;
1304 bool OperIsPutArg() const
1306 return OperIsPutArgStk() || OperIsPutArgReg() || OperIsPutArgSplit();
1309 bool OperIsMultiRegOp() const
1311 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1312 if ((gtOper == GT_MUL_LONG) || (gtOper == GT_PUTARG_REG) || (gtOper == GT_BITCAST))
1321 bool OperIsAddrMode() const
1323 return OperIsAddrMode(OperGet());
1326 bool OperIsLocal() const
1328 return OperIsLocal(OperGet());
1331 bool OperIsLocalAddr() const
1333 return OperIsLocalAddr(OperGet());
1336 bool OperIsScalarLocal() const
1338 return OperIsScalarLocal(OperGet());
1341 bool OperIsNonPhiLocal() const
1343 return OperIsNonPhiLocal(OperGet());
1346 bool OperIsLocalStore() const
1348 return OperIsLocalStore(OperGet());
1351 bool OperIsLocalRead() const
1353 return OperIsLocalRead(OperGet());
1356 bool OperIsCompare() const
1358 return (OperKind(gtOper) & GTK_RELOP) != 0;
1361 static bool OperIsLogical(genTreeOps gtOper)
1363 return (OperKind(gtOper) & GTK_LOGOP) != 0;
1366 bool OperIsLogical() const
1368 return (OperKind(gtOper) & GTK_LOGOP) != 0;
1371 static bool OperIsShift(genTreeOps gtOper)
1373 return (gtOper == GT_LSH) || (gtOper == GT_RSH) || (gtOper == GT_RSZ);
1376 bool OperIsShift() const
1378 return OperIsShift(OperGet());
1381 static bool OperIsRotate(genTreeOps gtOper)
1383 return (gtOper == GT_ROL) || (gtOper == GT_ROR);
1386 bool OperIsRotate() const
1388 return OperIsRotate(OperGet());
1391 static bool OperIsShiftOrRotate(genTreeOps gtOper)
1393 return OperIsShift(gtOper) || OperIsRotate(gtOper);
1396 bool OperIsShiftOrRotate() const
1398 return OperIsShiftOrRotate(OperGet());
1401 bool OperIsArithmetic() const
1403 genTreeOps op = OperGet();
1404 return op == GT_ADD || op == GT_SUB || op == GT_MUL || op == GT_DIV || op == GT_MOD
1406 || op == GT_UDIV || op == GT_UMOD
1408 || op == GT_OR || op == GT_XOR || op == GT_AND
1410 || OperIsShiftOrRotate(op);
1413 #ifdef _TARGET_XARCH_
1414 static bool OperIsRMWMemOp(genTreeOps gtOper)
1416 // Return if binary op is one of the supported operations for RMW of memory.
1417 return (gtOper == GT_ADD || gtOper == GT_SUB || gtOper == GT_AND || gtOper == GT_OR || gtOper == GT_XOR ||
1418 gtOper == GT_NOT || gtOper == GT_NEG || OperIsShiftOrRotate(gtOper));
1420 bool OperIsRMWMemOp() const
1422 // Return if binary op is one of the supported operations for RMW of memory.
1423 return OperIsRMWMemOp(gtOper);
1425 #endif // _TARGET_XARCH_
1427 static bool OperIsUnary(genTreeOps gtOper)
1429 return (OperKind(gtOper) & GTK_UNOP) != 0;
1432 bool OperIsUnary() const
1434 return OperIsUnary(gtOper);
1437 static bool OperIsBinary(genTreeOps gtOper)
1439 return (OperKind(gtOper) & GTK_BINOP) != 0;
1442 bool OperIsBinary() const
1444 return OperIsBinary(gtOper);
1447 static bool OperIsSimple(genTreeOps gtOper)
1449 return (OperKind(gtOper) & GTK_SMPOP) != 0;
1452 static bool OperIsSpecial(genTreeOps gtOper)
1454 return ((OperKind(gtOper) & GTK_KINDMASK) == GTK_SPECIAL);
1457 bool OperIsSimple() const
1459 return OperIsSimple(gtOper);
1463 bool isCommutativeSIMDIntrinsic();
1465 bool isCommutativeSIMDIntrinsic()
1469 #endif // FEATURE_SIMD
1471 static bool OperIsCommutative(genTreeOps gtOper)
1473 return (OperKind(gtOper) & GTK_COMMUTE) != 0;
1476 bool OperIsCommutative()
1478 return OperIsCommutative(gtOper) || (OperIsSIMD(gtOper) && isCommutativeSIMDIntrinsic());
1481 static bool OperIsAssignment(genTreeOps gtOper)
1483 #ifdef LEGACY_BACKEND
1484 return (OperKind(gtOper) & GTK_ASGOP) != 0;
1486 return gtOper == GT_ASG;
1490 bool OperIsAssignment() const
1492 return OperIsAssignment(gtOper);
1495 static bool OperMayOverflow(genTreeOps gtOper)
1497 return ((gtOper == GT_ADD) || (gtOper == GT_SUB) || (gtOper == GT_MUL) || (gtOper == GT_CAST)
1498 #ifdef LEGACY_BACKEND
1499 || (gtOper == GT_ASG_ADD) || (gtOper == GT_ASG_SUB)
1500 #elif !defined(_TARGET_64BIT_)
1501 || (gtOper == GT_ADD_HI) || (gtOper == GT_SUB_HI)
1506 bool OperMayOverflow() const
1508 return OperMayOverflow(gtOper);
1511 static bool OperIsIndir(genTreeOps gtOper)
1513 return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK || OperIsBlk(gtOper);
1516 static bool OperIsIndirOrArrLength(genTreeOps gtOper)
1518 return OperIsIndir(gtOper) || (gtOper == GT_ARR_LENGTH);
1521 bool OperIsIndir() const
1523 return OperIsIndir(gtOper);
1526 bool OperIsIndirOrArrLength() const
1528 return OperIsIndirOrArrLength(gtOper);
1531 static bool OperIsImplicitIndir(genTreeOps gtOper)
1544 case GT_STORE_DYN_BLK:
1555 bool OperIsImplicitIndir() const
1557 return OperIsImplicitIndir(gtOper);
1560 bool OperIsStore() const
1562 return OperIsStore(gtOper);
1565 static bool OperIsStore(genTreeOps gtOper)
1567 return (gtOper == GT_STOREIND || gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD ||
1568 gtOper == GT_STORE_BLK || gtOper == GT_STORE_OBJ || gtOper == GT_STORE_DYN_BLK);
1571 static bool OperIsAtomicOp(genTreeOps gtOper)
1573 return (gtOper == GT_XADD || gtOper == GT_XCHG || gtOper == GT_LOCKADD || gtOper == GT_CMPXCHG);
1576 bool OperIsAtomicOp() const
1578 return OperIsAtomicOp(gtOper);
1581 // This is here for cleaner FEATURE_SIMD #ifdefs.
1582 static bool OperIsSIMD(genTreeOps gtOper)
1585 return gtOper == GT_SIMD;
1586 #else // !FEATURE_SIMD
1588 #endif // !FEATURE_SIMD
1591 bool OperIsSIMD() const
1593 return OperIsSIMD(gtOper);
1596 static bool OperIsHWIntrinsic(genTreeOps gtOper)
1598 #ifdef FEATURE_HW_INTRINSICS
1599 return gtOper == GT_HWIntrinsic;
1602 #endif // FEATURE_HW_INTRINSICS
1605 bool OperIsHWIntrinsic() const
1607 return OperIsHWIntrinsic(gtOper);
1610 #ifdef FEATURE_HW_INTRINSICS
1611 inline bool OperIsSimdHWIntrinsic() const;
1613 inline bool OperIsSimdHWIntrinsic() const
1619 // This is here for cleaner GT_LONG #ifdefs.
1620 static bool OperIsLong(genTreeOps gtOper)
1622 #if defined(_TARGET_64BIT_) || defined(LEGACY_BACKEND)
1625 return gtOper == GT_LONG;
1629 bool OperIsLong() const
1631 return OperIsLong(gtOper);
1634 bool OperIsFieldListHead()
1636 return (gtOper == GT_FIELD_LIST) && ((gtFlags & GTF_FIELD_LIST_HEAD) != 0);
1639 bool OperIsConditionalJump() const
1641 return (gtOper == GT_JTRUE) || (gtOper == GT_JCMP) || (gtOper == GT_JCC);
1644 static bool OperIsBoundsCheck(genTreeOps op)
1646 if (op == GT_ARR_BOUNDS_CHECK)
1651 if (op == GT_SIMD_CHK)
1655 #endif // FEATURE_SIMD
1659 bool OperIsBoundsCheck() const
1661 return OperIsBoundsCheck(OperGet());
1664 #ifdef LEGACY_BACKEND
1665 // Requires that "op" is an op= operator. Returns
1666 // the corresponding "op".
1667 static genTreeOps OpAsgToOper(genTreeOps op);
1671 bool NullOp1Legal() const
1673 assert(OperIsSimple(gtOper));
1680 #ifdef FEATURE_HW_INTRINSICS
1681 case GT_HWIntrinsic:
1682 #endif // FEATURE_HW_INTRINSICS
1685 return gtType == TYP_VOID;
1691 bool NullOp2Legal() const
1693 assert(OperIsSimple(gtOper) || OperIsBlk(gtOper));
1694 if (!OperIsBinary(gtOper))
1706 #endif // !FEATURE_SIMD
1708 #ifdef FEATURE_HW_INTRINSICS
1709 case GT_HWIntrinsic:
1710 #endif // FEATURE_HW_INTRINSICS
1712 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1714 #endif // !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1721 static inline bool RequiresNonNullOp2(genTreeOps oper);
1722 bool IsValidCallArgument();
1725 inline bool IsFPZero();
1726 inline bool IsIntegralConst(ssize_t constVal);
1727 inline bool IsIntegralConstVector(ssize_t constVal);
1729 inline bool IsBoxedValue();
1731 inline bool IsSIMDEqualityOrInequality() const;
1733 static bool OperIsList(genTreeOps gtOper)
1735 return gtOper == GT_LIST;
1738 bool OperIsList() const
1740 return OperIsList(gtOper);
1743 static bool OperIsFieldList(genTreeOps gtOper)
1745 return gtOper == GT_FIELD_LIST;
1748 bool OperIsFieldList() const
1750 return OperIsFieldList(gtOper);
1753 static bool OperIsAnyList(genTreeOps gtOper)
1755 return OperIsList(gtOper) || OperIsFieldList(gtOper);
1758 bool OperIsAnyList() const
1760 return OperIsAnyList(gtOper);
1763 inline GenTreePtr MoveNext();
1765 inline GenTreePtr Current();
1767 inline GenTreePtr* pCurrent();
1769 inline GenTree* gtGetOp1() const;
1771 // Directly return op2. Asserts the node is binary. Might return nullptr if the binary node allows
1772 // a nullptr op2, such as GT_LIST. This is more efficient than gtGetOp2IfPresent() if you know what
1773 // node type you have.
1774 inline GenTree* gtGetOp2() const;
1776 // The returned pointer might be nullptr if the node is not binary, or if non-null op2 is not required.
1777 inline GenTree* gtGetOp2IfPresent() const;
1779 // Given a tree node, if this is a child of that node, return the pointer to the child node so that it
1780 // can be modified; otherwise, return null.
1781 GenTreePtr* gtGetChildPointer(GenTreePtr parent) const;
1783 // Given a tree node, if this node uses that node, return the use as an out parameter and return true.
1784 // Otherwise, return false.
1785 bool TryGetUse(GenTree* def, GenTree*** use);
1788 bool TryGetUseList(GenTree* def, GenTree*** use);
1790 bool TryGetUseBinOp(GenTree* def, GenTree*** use);
1793 // Get the parent of this node, and optionally capture the pointer to the child so that it can be modified.
1794 GenTreePtr gtGetParent(GenTreePtr** parentChildPtrPtr) const;
1796 void ReplaceOperand(GenTree** useEdge, GenTree* replacement);
1798 inline GenTreePtr gtEffectiveVal(bool commaOnly = false);
1800 // Tunnel through any GT_RET_EXPRs
1801 inline GenTree* gtRetExprVal();
1803 // Return the child of this node if it is a GT_RELOAD or GT_COPY; otherwise simply return the node itself
1804 inline GenTree* gtSkipReloadOrCopy();
1806 // Returns true if it is a call node returning its value in more than one register
1807 inline bool IsMultiRegCall() const;
1809 // Returns true if it is a node returning its value in more than one register
1810 inline bool IsMultiRegNode() const;
1812 // Returns true if it is a GT_COPY or GT_RELOAD node
1813 inline bool IsCopyOrReload() const;
1815 // Returns true if it is a GT_COPY or GT_RELOAD of a multi-reg call node
1816 inline bool IsCopyOrReloadOfMultiRegCall() const;
1818 bool OperRequiresAsgFlag();
1820 bool OperMayThrow(Compiler* comp);
1822 unsigned GetScaleIndexMul();
1823 unsigned GetScaleIndexShf();
1824 unsigned GetScaledIndex();
1826 // Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer
1827 // (<= 32 bit) constant. If it returns true, it sets "*offset" to (one of the) constant value(s), and
1828 // "*addr" to the other argument.
1829 bool IsAddWithI32Const(GenTreePtr* addr, int* offset);
1832 #if SMALL_TREE_NODES
1833 static unsigned char s_gtNodeSizes[];
1834 #if NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
1835 static unsigned char s_gtTrueSizes[];
1838 static LONG s_gtNodeCounts[];
1840 #endif // SMALL_TREE_NODES
1842 static void InitNodeSize();
1844 size_t GetNodeSize() const;
1846 bool IsNodeProperlySized() const;
1848 void ReplaceWith(GenTree* src, Compiler* comp);
1850 static genTreeOps ReverseRelop(genTreeOps relop);
1852 static genTreeOps SwapRelop(genTreeOps relop);
1854 //---------------------------------------------------------------------
1856 static bool Compare(GenTreePtr op1, GenTreePtr op2, bool swapOK = false);
1858 //---------------------------------------------------------------------
1860 #if defined(DEBUG) || NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
1861 static const char* OpName(genTreeOps op);
1864 #if MEASURE_NODE_SIZE && SMALL_TREE_NODES
1865 static const char* OpStructName(genTreeOps op);
1868 //---------------------------------------------------------------------
1870 bool IsNothingNode() const;
1873 // Value number update action enumeration
1874 enum ValueNumberUpdate
1876 CLEAR_VN, // Clear value number
1877 PRESERVE_VN // Preserve value number
1880 void SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN); // set gtOper
1881 void SetOperResetFlags(genTreeOps oper); // set gtOper and reset flags
1883 void ChangeOperConst(genTreeOps oper); // ChangeOper(constOper)
1884 // set gtOper and only keep GTF_COMMON_MASK flags
1885 void ChangeOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN);
1886 void ChangeOperUnchecked(genTreeOps oper);
1887 void SetOperRaw(genTreeOps oper);
1889 void ChangeType(var_types newType)
1891 var_types oldType = gtType;
1893 GenTree* node = this;
1894 while (node->gtOper == GT_COMMA)
1896 node = node->gtGetOp2();
1897 assert(node->gtType == oldType);
1898 node->gtType = newType;
1902 #if SMALL_TREE_NODES
1904 static void RecordOperBashing(genTreeOps operOld, genTreeOps operNew);
1905 static void ReportOperBashing(FILE* fp);
1907 static void RecordOperBashing(genTreeOps operOld, genTreeOps operNew)
1910 static void ReportOperBashing(FILE* fp)
1916 bool IsLocal() const
1918 return OperIsLocal(OperGet());
1921 // Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
1922 // is not the same size as the type of the GT_LCL_VAR.
1923 bool IsPartialLclFld(Compiler* comp);
1925 // Returns "true" iff "this" defines a local variable. Requires "comp" to be the
1926 // current compilation. If returns "true", sets "*pLclVarTree" to the
1927 // tree for the local that is defined, and, if "pIsEntire" is non-null, sets "*pIsEntire" to
1928 // true or false, depending on whether the assignment writes to the entirety of the local
1929 // variable, or just a portion of it.
1930 bool DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire = nullptr);
1932 // Returns true if "this" represents the address of a local, or a field of a local. If returns true, sets
1933 // "*pLclVarTree" to the node indicating the local variable. If the address is that of a field of this node,
1934 // sets "*pFldSeq" to the field sequence representing that field, else null.
1935 bool IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq);
1937 // Simpler variant of the above which just returns the local node if this is an expression that
1938 // yields an address into a local
1939 GenTreeLclVarCommon* IsLocalAddrExpr();
1941 // Determine if this is a LclVarCommon node and return some additional info about it in the
1942 // two out parameters.
1943 bool IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq);
1945 // Determine whether this is an assignment tree of the form X = X (op) Y,
1946 // where Y is an arbitrary tree, and X is a lclVar.
1947 unsigned IsLclVarUpdateTree(GenTree** otherTree, genTreeOps* updateOper);
1949 // If returns "true", "this" may represent the address of a static or instance field
1950 // (or a field of such a field, in the case of an object field of type struct).
1951 // If returns "true", then either "*pObj" is set to the object reference,
1952 // or "*pStatic" is set to the baseAddr or offset to be added to the "*pFldSeq"
1953 // Only one of "*pObj" or "*pStatic" will be set, the other one will be null.
1954 // The boolean return value only indicates that "this" *may* be a field address
1955 // -- the field sequence must also be checked.
1956 // If it is a field address, the field sequence will be a sequence of length >= 1,
1957 // starting with an instance or static field, and optionally continuing with struct fields.
1958 bool IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic, FieldSeqNode** pFldSeq);
1960 // Requires "this" to be the address of an array (the child of a GT_IND labeled with GTF_IND_ARR_INDEX).
1961 // Sets "pArr" to the node representing the array (either an array object pointer, or perhaps a byref to the some
1963 // Sets "*pArrayType" to the class handle for the array type.
1964 // Sets "*inxVN" to the value number inferred for the array index.
1965 // Sets "*pFldSeq" to the sequence, if any, of struct fields used to index into the array element.
1966 void ParseArrayAddress(
1967 Compiler* comp, struct ArrayInfo* arrayInfo, GenTreePtr* pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq);
1969 // Helper method for the above.
1970 void ParseArrayAddressWork(
1971 Compiler* comp, ssize_t inputMul, GenTreePtr* pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq);
1973 // Requires "this" to be a GT_IND. Requires the outermost caller to set "*pFldSeq" to nullptr.
1974 // Returns true if it is an array index expression, or access to a (sequence of) struct field(s)
1975 // within a struct array element. If it returns true, sets *arrayInfo to the array information, and sets *pFldSeq
1976 // to the sequence of struct field accesses.
1977 bool ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq);
1979 // Requires "this" to be the address of a (possible) array element (or struct field within that).
1980 // If it is, sets "*arrayInfo" to the array access info, "*pFldSeq" to the sequence of struct fields
1981 // accessed within the array element, and returns true. If not, returns "false".
1982 bool ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq);
1984 // Requires "this" to be an int expression. If it is a sequence of one or more integer constants added together,
1985 // returns true and sets "*pFldSeq" to the sequence of fields with which those constants are annotated.
1986 bool ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq);
1988 // Labels "*this" as an array index expression: label all constants and variables that could contribute, as part of
1989 // an affine expression, to the value of the of the index.
1990 void LabelIndex(Compiler* comp, bool isConst = true);
1992 // Assumes that "this" occurs in a context where it is being dereferenced as the LHS of an assignment-like
1993 // statement (assignment, initblk, or copyblk). The "width" should be the number of bytes copied by the
1994 // operation. Returns "true" if "this" is an address of (or within)
1995 // a local variable; sets "*pLclVarTree" to that local variable instance; and, if "pIsEntire" is non-null,
1996 // sets "*pIsEntire" to true if this assignment writes the full width of the local.
1997 bool DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire);
1999 #ifdef LEGACY_BACKEND
2000 bool IsRegVar() const
2002 return OperGet() == GT_REG_VAR ? true : false;
2006 return (gtFlags & GTF_REG_VAL) ? true : false;
2008 void SetInReg(bool value = true)
2012 gtFlags |= GTF_REG_VAL;
2016 gtFlags &= ~GTF_REG_VAL;
2019 regNumber GetReg() const
2021 return InReg() ? gtRegNum : REG_NA;
2024 #else // !LEGACY_BACKEND
2025 // For the non-legacy backend, these are only used for dumping.
2026 // The gtRegNum is only valid in LIR, but the dumping methods are not easily
2027 // modified to check this.
2028 CLANG_FORMAT_COMMENT_ANCHOR;
2032 return (GetRegTag() != GT_REGTAG_NONE) ? true : false;
2034 regNumber GetReg() const
2036 return (GetRegTag() != GT_REGTAG_NONE) ? gtRegNum : REG_NA;
2040 static bool IsContained(unsigned flags)
2042 return ((flags & GTF_CONTAINED) != 0);
2048 gtFlags |= GTF_CONTAINED;
2049 assert(isContained());
2052 void ClearContained()
2055 gtFlags &= ~GTF_CONTAINED;
2059 #endif // !LEGACY_BACKEND
2061 bool IsRegVarDeath() const
2063 assert(OperGet() == GT_REG_VAR);
2064 return (gtFlags & GTF_VAR_DEATH) ? true : false;
2066 bool IsRegVarBirth() const
2068 assert(OperGet() == GT_REG_VAR);
2069 return (gtFlags & GTF_REG_BIRTH) ? true : false;
2071 bool IsReverseOp() const
2073 return (gtFlags & GTF_REVERSE_OPS) ? true : false;
2075 bool IsUnsigned() const
2077 return ((gtFlags & GTF_UNSIGNED) != 0);
2080 inline bool IsCnsIntOrI() const;
2082 inline bool IsIntegralConst() const;
2084 inline bool IsIntCnsFitsInI32(); // Constant fits in INT32
2086 inline bool IsCnsFltOrDbl() const;
2088 inline bool IsCnsNonZeroFltOrDbl();
2090 bool IsIconHandle() const
2092 assert(gtOper == GT_CNS_INT);
2093 return (gtFlags & GTF_ICON_HDL_MASK) ? true : false;
2096 bool IsIconHandle(unsigned handleType) const
2098 assert(gtOper == GT_CNS_INT);
2099 assert((handleType & GTF_ICON_HDL_MASK) != 0); // check that handleType is one of the valid GTF_ICON_* values
2100 assert((handleType & ~GTF_ICON_HDL_MASK) == 0);
2101 return (gtFlags & GTF_ICON_HDL_MASK) == handleType;
2104 // Return just the part of the flags corresponding to the GTF_ICON_*_HDL flag. For example,
2105 // GTF_ICON_SCOPE_HDL. The tree node must be a const int, but it might not be a handle, in which
2106 // case we'll return zero.
2107 unsigned GetIconHandleFlag() const
2109 assert(gtOper == GT_CNS_INT);
2110 return (gtFlags & GTF_ICON_HDL_MASK);
2113 // Mark this node as no longer being a handle; clear its GTF_ICON_*_HDL bits.
2114 void ClearIconHandleMask()
2116 assert(gtOper == GT_CNS_INT);
2117 gtFlags &= ~GTF_ICON_HDL_MASK;
2120 // Return true if the two GT_CNS_INT trees have the same handle flag (GTF_ICON_*_HDL).
2121 static bool SameIconHandleFlag(GenTree* t1, GenTree* t2)
2123 return t1->GetIconHandleFlag() == t2->GetIconHandleFlag();
2126 bool IsArgPlaceHolderNode() const
2128 return OperGet() == GT_ARGPLACE;
2132 return OperGet() == GT_CALL;
2134 bool IsStatement() const
2136 return OperGet() == GT_STMT;
2138 inline bool IsHelperCall();
2140 bool IsVarAddr() const;
2141 bool gtOverflow() const;
2142 bool gtOverflowEx() const;
2143 bool gtSetFlags() const;
2144 bool gtRequestSetFlags();
2146 #ifdef LEGACY_BACKEND
2147 // Returns true if the codegen of this tree node
2148 // sets ZF and SF flags.
2149 bool gtSetZSFlags() const
2151 return (gtFlags & GTF_ZSF_SET) != 0;
2156 bool gtIsValid64RsltMul();
2157 static int gtDispFlags(unsigned flags, unsigned debugFlags);
2161 inline var_types CastFromType();
2162 inline var_types& CastToType();
2164 // Returns "true" iff "this" is a phi-related node (i.e. a GT_PHI_ARG, GT_PHI, or a PhiDefn).
2167 // Returns "true" iff "*this" is an assignment (GT_ASG) tree that defines an SSA name (lcl = phi(...));
2170 // Returns "true" iff "*this" is a statement containing an assignment that defines an SSA name (lcl = phi(...));
2171 bool IsPhiDefnStmt();
2173 // Can't use an assignment operator, because we need the extra "comp" argument
2174 // (to provide the allocator necessary for the VarSet assignment).
2175 // TODO-Cleanup: Not really needed now, w/o liveset on tree nodes
2176 void CopyTo(class Compiler* comp, const GenTree& gt);
2178 // Like the above, excepts assumes copying from small node to small node.
2179 // (Following the code it replaces, it does *not* copy the GenTree fields,
2180 // which CopyTo does.)
2181 void CopyToSmall(const GenTree& gt);
2183 // Because of the fact that we hid the assignment operator of "BitSet" (in DEBUG),
2184 // we can't synthesize an assignment operator.
2185 // TODO-Cleanup: Could change this w/o liveset on tree nodes
2186 // (This is also necessary for the VTable trick.)
2191 // Returns the number of children of the current node.
2192 unsigned NumChildren();
2194 // Requires "childNum < NumChildren()". Returns the "n"th child of "this."
2195 GenTreePtr GetChild(unsigned childNum);
2197 // Returns an iterator that will produce the use edge to each operand of this node. Differs
2198 // from the sequence of nodes produced by a loop over `GetChild` in its handling of call, phi,
2199 // and block op nodes.
2200 GenTreeUseEdgeIterator UseEdgesBegin();
2201 GenTreeUseEdgeIterator UseEdgesEnd();
2203 IteratorPair<GenTreeUseEdgeIterator> UseEdges();
2205 // Returns an iterator that will produce each operand of this node. Differs from the sequence
2206 // of nodes produced by a loop over `GetChild` in its handling of call, phi, and block op
2208 GenTreeOperandIterator OperandsBegin();
2209 GenTreeOperandIterator OperandsEnd();
2211 // Returns a range that will produce the operands of this node in use order.
2212 IteratorPair<GenTreeOperandIterator> Operands();
2214 enum class VisitResult
2220 // Visits each operand of this node. The operand must be either a lambda, function, or functor with the signature
2221 // `GenTree::VisitResult VisitorFunction(GenTree* operand)`. Here is a simple example:
2223 // unsigned operandCount = 0;
2224 // node->VisitOperands([&](GenTree* operand) -> GenTree::VisitResult)
2227 // return GenTree::VisitResult::Continue;
2230 // This function is generally more efficient that the operand iterator and should be preferred over that API for
2231 // hot code, as it affords better opportunities for inlining and acheives shorter dynamic path lengths when
2232 // deciding how operands need to be accessed.
2234 // Note that this function does not respect `GTF_REVERSE_OPS` and `gtEvalSizeFirst`. This is always safe in LIR,
2235 // but may be dangerous in HIR if for some reason you need to visit operands in the order in which they will
2237 template <typename TVisitor>
2238 void VisitOperands(TVisitor visitor);
2241 template <typename TVisitor>
2242 VisitResult VisitListOperands(TVisitor visitor);
2244 template <typename TVisitor>
2245 void VisitBinOpOperands(TVisitor visitor);
2248 bool Precedes(GenTree* other);
2250 // The maximum possible # of children of any node.
2251 static const int MAX_CHILDREN = 6;
2253 bool IsReuseRegVal() const
2255 // This can be extended to non-constant nodes, but not to local or indir nodes.
2256 if (OperIsConst() && ((gtFlags & GTF_REUSE_REG_VAL) != 0))
2262 void SetReuseRegVal()
2264 assert(OperIsConst());
2265 gtFlags |= GTF_REUSE_REG_VAL;
2267 void ResetReuseRegVal()
2269 assert(OperIsConst());
2270 gtFlags &= ~GTF_REUSE_REG_VAL;
2273 void SetIndirExceptionFlags(Compiler* comp)
2275 assert(OperIsIndirOrArrLength());
2276 gtFlags |= OperMayThrow(comp) ? GTF_EXCEPT : GTF_IND_NONFAULTING;
2279 #if MEASURE_NODE_SIZE
2280 static void DumpNodeSizes(FILE* fp);
2286 GenTree& operator=(const GenTree& gt)
2288 assert(!"Don't copy");
2293 #if DEBUGGABLE_GENTREE
2294 // In DEBUG builds, add a dummy virtual method, to give the debugger run-time type information.
2295 virtual void DummyVirt()
2299 typedef void* VtablePtr;
2301 VtablePtr GetVtableForOper(genTreeOps oper);
2302 void SetVtableForOper(genTreeOps oper);
2304 static VtablePtr s_vtablesForOpers[GT_COUNT];
2305 static VtablePtr s_vtableForOp;
2306 #endif // DEBUGGABLE_GENTREE
2309 inline void* operator new(size_t sz, class Compiler*, genTreeOps oper);
2311 inline GenTree(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false));
2314 //------------------------------------------------------------------------
2315 // GenTreeUseEdgeIterator: an iterator that will produce each use edge of a GenTree node in the order in which
2318 // The use edges of a node may not correspond exactly to the nodes on the other ends of its use edges: in
2319 // particular, GT_LIST nodes are expanded into their component parts. This differs from the behavior of
2320 // GenTree::GetChildPointer(), which does not expand lists.
2322 // Operand iteration is common enough in the back end of the compiler that the implementation of this type has
2323 // traded some simplicity for speed:
2324 // - As much work as is reasonable is done in the constructor rather than during operand iteration
2325 // - Node-specific functionality is handled by a small class of "advance" functions called by operator++
2326 // rather than making operator++ itself handle all nodes
2327 // - Some specialization has been performed for specific node types/shapes (e.g. the advance function for
2328 // binary nodes is specialized based on whether or not the node has the GTF_REVERSE_OPS flag set)
2330 // Valid values of this type may be obtained by calling `GenTree::UseEdgesBegin` and `GenTree::UseEdgesEnd`.
2332 class GenTreeUseEdgeIterator final
2334 friend class GenTreeOperandIterator;
2335 friend GenTreeUseEdgeIterator GenTree::UseEdgesBegin();
2336 friend GenTreeUseEdgeIterator GenTree::UseEdgesEnd();
2343 CALL_CONTROL_EXPR = 3,
2349 typedef void (GenTreeUseEdgeIterator::*AdvanceFn)();
2351 AdvanceFn m_advance;
2357 GenTreeUseEdgeIterator(GenTree* node);
2359 // Advance functions for special nodes
2360 void AdvanceCmpXchg();
2361 void AdvanceBoundsChk();
2362 void AdvanceArrElem();
2363 void AdvanceArrOffset();
2364 void AdvanceDynBlk();
2365 void AdvanceStoreDynBlk();
2367 template <bool ReverseOperands>
2368 void AdvanceBinOp();
2369 void SetEntryStateForBinOp();
2371 // An advance function for list-like nodes (Phi, SIMDIntrinsicInitN, FieldList)
2373 void SetEntryStateForList(GenTree* list);
2375 // The advance function for call nodes
2376 template <int state>
2382 GenTreeUseEdgeIterator();
2384 inline GenTree** operator*()
2386 assert(m_state != -1);
2390 inline GenTree** operator->()
2392 assert(m_state != -1);
2396 inline bool operator==(const GenTreeUseEdgeIterator& other) const
2398 if (m_state == -1 || other.m_state == -1)
2400 return m_state == other.m_state;
2403 return (m_node == other.m_node) && (m_edge == other.m_edge) && (m_argList == other.m_argList) &&
2404 (m_state == other.m_state);
2407 inline bool operator!=(const GenTreeUseEdgeIterator& other) const
2409 return !(operator==(other));
2412 GenTreeUseEdgeIterator& operator++();
2415 //------------------------------------------------------------------------
2416 // GenTreeOperandIterator: an iterator that will produce each operand of a
2417 // GenTree node in the order in which they are
2418 // used. This uses `GenTreeUseEdgeIterator` under
2419 // the covers and comes with the same caveats
2420 // w.r.t. `GetChild`.
2422 // Note: valid values of this type may be obtained by calling
2423 // `GenTree::OperandsBegin` and `GenTree::OperandsEnd`.
2424 class GenTreeOperandIterator final
2426 friend GenTreeOperandIterator GenTree::OperandsBegin();
2427 friend GenTreeOperandIterator GenTree::OperandsEnd();
2429 GenTreeUseEdgeIterator m_useEdges;
2431 GenTreeOperandIterator(GenTree* node) : m_useEdges(node)
2436 GenTreeOperandIterator() : m_useEdges()
2440 inline GenTree* operator*()
2442 return *(*m_useEdges);
2445 inline GenTree* operator->()
2447 return *(*m_useEdges);
2450 inline bool operator==(const GenTreeOperandIterator& other) const
2452 return m_useEdges == other.m_useEdges;
2455 inline bool operator!=(const GenTreeOperandIterator& other) const
2457 return !(operator==(other));
2460 inline GenTreeOperandIterator& operator++()
2467 /*****************************************************************************/
2468 // In the current design, we never instantiate GenTreeUnOp: it exists only to be
2469 // used as a base class. For unary operators, we instantiate GenTreeOp, with a NULL second
2470 // argument. We check that this is true dynamically. We could tighten this and get static
2471 // checking, but that would entail accessing the first child of a unary operator via something
2472 // like gtUnOp.gtOp1 instead of gtOp.gtOp1.
2473 struct GenTreeUnOp : public GenTree
2478 GenTreeUnOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
2479 : GenTree(oper, type DEBUGARG(largeNode)), gtOp1(nullptr)
2483 GenTreeUnOp(genTreeOps oper, var_types type, GenTreePtr op1 DEBUGARG(bool largeNode = false))
2484 : GenTree(oper, type DEBUGARG(largeNode)), gtOp1(op1)
2486 assert(op1 != nullptr || NullOp1Legal());
2488 { // Propagate effects flags from child.
2489 gtFlags |= op1->gtFlags & GTF_ALL_EFFECT;
2493 #if DEBUGGABLE_GENTREE
2494 GenTreeUnOp() : GenTree(), gtOp1(nullptr)
2500 struct GenTreeOp : public GenTreeUnOp
2504 GenTreeOp(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2 DEBUGARG(bool largeNode = false))
2505 : GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode)), gtOp2(op2)
2507 // comparisons are always integral types
2508 assert(!GenTree::OperIsCompare(oper) || varTypeIsIntegral(type));
2509 // Binary operators, with a few exceptions, require a non-nullptr
2511 assert(op2 != nullptr || NullOp2Legal());
2512 // Unary operators, on the other hand, require a null second argument.
2513 assert(!OperIsUnary(oper) || op2 == nullptr);
2514 // Propagate effects flags from child. (UnOp handled this for first child.)
2517 gtFlags |= op2->gtFlags & GTF_ALL_EFFECT;
2521 // A small set of types are unary operators with optional arguments. We use
2522 // this constructor to build those.
2523 GenTreeOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
2524 : GenTreeUnOp(oper, type DEBUGARG(largeNode)), gtOp2(nullptr)
2526 // Unary operators with optional arguments:
2527 assert(oper == GT_NOP || oper == GT_RETURN || oper == GT_RETFILT || OperIsBlk(oper));
2530 #if DEBUGGABLE_GENTREE
2531 GenTreeOp() : GenTreeUnOp(), gtOp2(nullptr)
2537 struct GenTreeVal : public GenTree
2541 GenTreeVal(genTreeOps oper, var_types type, ssize_t val) : GenTree(oper, type), gtVal1(val)
2544 #if DEBUGGABLE_GENTREE
2545 GenTreeVal() : GenTree()
2551 struct GenTreeIntConCommon : public GenTree
2553 inline INT64 LngValue();
2554 inline void SetLngValue(INT64 val);
2555 inline ssize_t IconValue();
2556 inline void SetIconValue(ssize_t val);
2557 inline INT64 IntegralValue();
2559 GenTreeIntConCommon(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
2560 : GenTree(oper, type DEBUGARG(largeNode))
2564 bool FitsInI8() // IconValue() fits into 8-bit signed storage
2566 return FitsInI8(IconValue());
2569 static bool FitsInI8(ssize_t val) // Constant fits into 8-bit signed storage
2571 return (int8_t)val == val;
2574 bool FitsInI32() // IconValue() fits into 32-bit signed storage
2576 return FitsInI32(IconValue());
2579 static bool FitsInI32(ssize_t val) // Constant fits into 32-bit signed storage
2581 #ifdef _TARGET_64BIT_
2582 return (int32_t)val == val;
2588 bool ImmedValNeedsReloc(Compiler* comp);
2589 bool ImmedValCanBeFolded(Compiler* comp, genTreeOps op);
2591 #ifdef _TARGET_XARCH_
2592 bool FitsInAddrBase(Compiler* comp);
2593 bool AddrNeedsReloc(Compiler* comp);
2596 #if DEBUGGABLE_GENTREE
2597 GenTreeIntConCommon() : GenTree()
2603 // node representing a read from a physical register
2604 struct GenTreePhysReg : public GenTree
2606 // physregs need a field beyond gtRegNum because
2607 // gtRegNum indicates the destination (and can be changed)
2608 // whereas reg indicates the source
2610 GenTreePhysReg(regNumber r, var_types type = TYP_I_IMPL) : GenTree(GT_PHYSREG, type), gtSrcReg(r)
2613 #if DEBUGGABLE_GENTREE
2614 GenTreePhysReg() : GenTree()
2620 #ifndef LEGACY_BACKEND
2621 // gtJumpTable - Switch Jump Table
2623 // This node stores a DWORD constant that represents the
2624 // absolute address of a jump table for switches. The code
2625 // generator uses this table to code the destination for every case
2626 // in an array of addresses which starting position is stored in
2628 struct GenTreeJumpTable : public GenTreeIntConCommon
2630 ssize_t gtJumpTableAddr;
2632 GenTreeJumpTable(var_types type DEBUGARG(bool largeNode = false))
2633 : GenTreeIntConCommon(GT_JMPTABLE, type DEBUGARG(largeNode))
2636 #if DEBUGGABLE_GENTREE
2637 GenTreeJumpTable() : GenTreeIntConCommon()
2642 #endif // !LEGACY_BACKEND
2644 /* gtIntCon -- integer constant (GT_CNS_INT) */
2645 struct GenTreeIntCon : public GenTreeIntConCommon
2648 * This is the GT_CNS_INT struct definition.
2649 * It's used to hold for both int constants and pointer handle constants.
2650 * For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
2651 * For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
2652 * In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
2654 ssize_t gtIconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeLngCon below.
2656 /* The InitializeArray intrinsic needs to go back to the newarray statement
2657 to find the class handle of the array so that we can get its size. However,
2658 in ngen mode, the handle in that statement does not correspond to the compile
2659 time handle (rather it lets you get a handle at run-time). In that case, we also
2660 need to store a compile time handle, which goes in this gtCompileTimeHandle field.
2662 ssize_t gtCompileTimeHandle;
2664 // TODO-Cleanup: It's not clear what characterizes the cases where the field
2665 // above is used. It may be that its uses and those of the "gtFieldSeq" field below
2666 // are mutually exclusive, and they could be put in a union. Or else we should separate
2667 // this type into three subtypes.
2669 // If this constant represents the offset of one or more fields, "gtFieldSeq" represents that
2670 // sequence of fields.
2671 FieldSeqNode* gtFieldSeq;
2673 GenTreeIntCon(var_types type, ssize_t value DEBUGARG(bool largeNode = false))
2674 : GenTreeIntConCommon(GT_CNS_INT, type DEBUGARG(largeNode))
2676 , gtCompileTimeHandle(0)
2677 , gtFieldSeq(FieldSeqStore::NotAField())
2681 GenTreeIntCon(var_types type, ssize_t value, FieldSeqNode* fields DEBUGARG(bool largeNode = false))
2682 : GenTreeIntConCommon(GT_CNS_INT, type DEBUGARG(largeNode))
2684 , gtCompileTimeHandle(0)
2685 , gtFieldSeq(fields)
2687 assert(fields != nullptr);
2690 void FixupInitBlkValue(var_types asgType);
2692 #ifdef _TARGET_64BIT_
2693 void TruncateOrSignExtend32()
2695 if (gtFlags & GTF_UNSIGNED)
2697 gtIconVal = UINT32(gtIconVal);
2701 gtIconVal = INT32(gtIconVal);
2704 #endif // _TARGET_64BIT_
2706 #if DEBUGGABLE_GENTREE
2707 GenTreeIntCon() : GenTreeIntConCommon()
2713 /* gtLngCon -- long constant (GT_CNS_LNG) */
2715 struct GenTreeLngCon : public GenTreeIntConCommon
2717 INT64 gtLconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeIntCon above.
2720 return (INT32)(gtLconVal & 0xffffffff);
2725 return (INT32)(gtLconVal >> 32);
2728 GenTreeLngCon(INT64 val) : GenTreeIntConCommon(GT_CNS_NATIVELONG, TYP_LONG)
2732 #if DEBUGGABLE_GENTREE
2733 GenTreeLngCon() : GenTreeIntConCommon()
2739 inline INT64 GenTreeIntConCommon::LngValue()
2741 #ifndef _TARGET_64BIT_
2742 assert(gtOper == GT_CNS_LNG);
2743 return AsLngCon()->gtLconVal;
2749 inline void GenTreeIntConCommon::SetLngValue(INT64 val)
2751 #ifndef _TARGET_64BIT_
2752 assert(gtOper == GT_CNS_LNG);
2753 AsLngCon()->gtLconVal = val;
2755 // Compile time asserts that these two fields overlap and have the same offsets: gtIconVal and gtLconVal
2756 C_ASSERT(offsetof(GenTreeLngCon, gtLconVal) == offsetof(GenTreeIntCon, gtIconVal));
2757 C_ASSERT(sizeof(AsLngCon()->gtLconVal) == sizeof(AsIntCon()->gtIconVal));
2759 SetIconValue(ssize_t(val));
2763 inline ssize_t GenTreeIntConCommon::IconValue()
2765 assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target!
2766 return AsIntCon()->gtIconVal;
2769 inline void GenTreeIntConCommon::SetIconValue(ssize_t val)
2771 assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target!
2772 AsIntCon()->gtIconVal = val;
2775 inline INT64 GenTreeIntConCommon::IntegralValue()
2777 #ifdef _TARGET_64BIT_
2780 return gtOper == GT_CNS_LNG ? LngValue() : (INT64)IconValue();
2781 #endif // _TARGET_64BIT_
2784 /* gtDblCon -- double constant (GT_CNS_DBL) */
2786 struct GenTreeDblCon : public GenTree
2790 bool isBitwiseEqual(GenTreeDblCon* other)
2792 unsigned __int64 bits = *(unsigned __int64*)(>DconVal);
2793 unsigned __int64 otherBits = *(unsigned __int64*)(&(other->gtDconVal));
2794 return (bits == otherBits);
2797 GenTreeDblCon(double val) : GenTree(GT_CNS_DBL, TYP_DOUBLE), gtDconVal(val)
2800 #if DEBUGGABLE_GENTREE
2801 GenTreeDblCon() : GenTree()
2807 /* gtStrCon -- string constant (GT_CNS_STR) */
2809 struct GenTreeStrCon : public GenTree
2812 CORINFO_MODULE_HANDLE gtScpHnd;
2814 // Because this node can come from an inlined method we need to
2815 // have the scope handle, since it will become a helper call.
2816 GenTreeStrCon(unsigned sconCPX, CORINFO_MODULE_HANDLE mod DEBUGARG(bool largeNode = false))
2817 : GenTree(GT_CNS_STR, TYP_REF DEBUGARG(largeNode)), gtSconCPX(sconCPX), gtScpHnd(mod)
2820 #if DEBUGGABLE_GENTREE
2821 GenTreeStrCon() : GenTree()
2827 // Common supertype of LCL_VAR, LCL_FLD, REG_VAR, PHI_ARG
2828 // This inherits from UnOp because lclvar stores are Unops
2829 struct GenTreeLclVarCommon : public GenTreeUnOp
2832 unsigned _gtLclNum; // The local number. An index into the Compiler::lvaTable array.
2833 unsigned _gtSsaNum; // The SSA number.
2836 GenTreeLclVarCommon(genTreeOps oper, var_types type, unsigned lclNum DEBUGARG(bool largeNode = false))
2837 : GenTreeUnOp(oper, type DEBUGARG(largeNode))
2842 unsigned GetLclNum() const
2846 __declspec(property(get = GetLclNum)) unsigned gtLclNum;
2848 void SetLclNum(unsigned lclNum)
2851 _gtSsaNum = SsaConfig::RESERVED_SSA_NUM;
2854 unsigned GetSsaNum() const
2858 __declspec(property(get = GetSsaNum)) unsigned gtSsaNum;
2860 void SetSsaNum(unsigned ssaNum)
2867 return (gtSsaNum != SsaConfig::RESERVED_SSA_NUM);
2870 #if DEBUGGABLE_GENTREE
2871 GenTreeLclVarCommon() : GenTreeUnOp()
2877 // gtLclVar -- load/store/addr of local variable
2879 struct GenTreeLclVar : public GenTreeLclVarCommon
2881 IL_OFFSET gtLclILoffs; // instr offset of ref (only for debug info)
2883 GenTreeLclVar(var_types type, unsigned lclNum, IL_OFFSET ilOffs DEBUGARG(bool largeNode = false))
2884 : GenTreeLclVarCommon(GT_LCL_VAR, type, lclNum DEBUGARG(largeNode)), gtLclILoffs(ilOffs)
2888 GenTreeLclVar(genTreeOps oper, var_types type, unsigned lclNum, IL_OFFSET ilOffs DEBUGARG(bool largeNode = false))
2889 : GenTreeLclVarCommon(oper, type, lclNum DEBUGARG(largeNode)), gtLclILoffs(ilOffs)
2891 assert(OperIsLocal(oper) || OperIsLocalAddr(oper));
2894 #if DEBUGGABLE_GENTREE
2895 GenTreeLclVar() : GenTreeLclVarCommon()
2901 // gtLclFld -- load/store/addr of local variable field
2903 struct GenTreeLclFld : public GenTreeLclVarCommon
2905 unsigned gtLclOffs; // offset into the variable to access
2907 FieldSeqNode* gtFieldSeq; // This LclFld node represents some sequences of accesses.
2909 // old/FE style constructor where load/store/addr share same opcode
2910 GenTreeLclFld(var_types type, unsigned lclNum, unsigned lclOffs)
2911 : GenTreeLclVarCommon(GT_LCL_FLD, type, lclNum), gtLclOffs(lclOffs), gtFieldSeq(nullptr)
2913 assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]);
2916 GenTreeLclFld(genTreeOps oper, var_types type, unsigned lclNum, unsigned lclOffs)
2917 : GenTreeLclVarCommon(oper, type, lclNum), gtLclOffs(lclOffs), gtFieldSeq(nullptr)
2919 assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]);
2921 #if DEBUGGABLE_GENTREE
2922 GenTreeLclFld() : GenTreeLclVarCommon()
2928 struct GenTreeRegVar : public GenTreeLclVarCommon
2930 // TODO-Cleanup: Note that the base class GenTree already has a gtRegNum field.
2931 // It's not clear exactly why a GT_REG_VAR has a separate field. When
2932 // GT_REG_VAR is created, the two are identical. It appears that they may
2933 // or may not remain so. In particular, there is a comment in stackfp.cpp
2936 // There used to be an assertion: assert(src->gtRegNum == src->gtRegVar.gtRegNum, ...)
2937 // here, but there's actually no reason to assume that. AFAICT, for FP vars under stack FP,
2938 // src->gtRegVar.gtRegNum is the allocated stack pseudo-register, but src->gtRegNum is the
2939 // FP stack position into which that is loaded to represent a particular use of the variable.
2941 // It might be the case that only for stackfp do they ever differ.
2943 // The following might be possible: the GT_REG_VAR node has a last use prior to a complex
2944 // subtree being evaluated. It could then be spilled from the register. Later,
2945 // it could be unspilled into a different register, which would be recorded at
2946 // the unspill time in the GenTree::gtRegNum, whereas GenTreeRegVar::gtRegNum
2947 // is left alone. It's not clear why that is useful.
2949 // Assuming there is a particular use, like stack fp, that requires it, maybe we
2950 // can get rid of GT_REG_VAR and just leave it as GT_LCL_VAR, using the base class gtRegNum field.
2951 // If we need it for stackfp, we could add a GenTreeStackFPRegVar type, which carries both the
2952 // pieces of information, in a clearer and more specific way (in particular, with
2953 // a different member name).
2957 regNumberSmall _gtRegNum;
2960 GenTreeRegVar(var_types type, unsigned lclNum, regNumber regNum) : GenTreeLclVarCommon(GT_REG_VAR, type, lclNum)
2965 // The register number is stored in a small format (8 bits), but the getters return and the setters take
2966 // a full-size (unsigned) format, to localize the casts here.
2968 __declspec(property(get = GetRegNum, put = SetRegNum)) regNumber gtRegNum;
2970 regNumber GetRegNum() const
2972 return (regNumber)_gtRegNum;
2975 void SetRegNum(regNumber reg)
2977 _gtRegNum = (regNumberSmall)reg;
2978 assert(_gtRegNum == reg);
2981 #if DEBUGGABLE_GENTREE
2982 GenTreeRegVar() : GenTreeLclVarCommon()
2988 /* gtCast -- conversion to a different type (GT_CAST) */
2990 struct GenTreeCast : public GenTreeOp
2992 GenTreePtr& CastOp()
2996 var_types gtCastType;
2998 GenTreeCast(var_types type, GenTreePtr op, var_types castType DEBUGARG(bool largeNode = false))
2999 : GenTreeOp(GT_CAST, type, op, nullptr DEBUGARG(largeNode)), gtCastType(castType)
3002 #if DEBUGGABLE_GENTREE
3003 GenTreeCast() : GenTreeOp()
3009 // GT_BOX nodes are place markers for boxed values. The "real" tree
3010 // for most purposes is in gtBoxOp.
3011 struct GenTreeBox : public GenTreeUnOp
3013 // An expanded helper call to implement the "box" if we don't get
3014 // rid of it any other way. Must be in same position as op1.
3020 // This is the statement that contains the assignment tree when the node is an inlined GT_BOX on a value
3022 GenTreePtr gtAsgStmtWhenInlinedBoxValue;
3023 // And this is the statement that copies from the value being boxed to the box payload
3024 GenTreePtr gtCopyStmtWhenInlinedBoxValue;
3026 GenTreeBox(var_types type,
3028 GenTreePtr asgStmtWhenInlinedBoxValue,
3029 GenTreePtr copyStmtWhenInlinedBoxValue)
3030 : GenTreeUnOp(GT_BOX, type, boxOp)
3031 , gtAsgStmtWhenInlinedBoxValue(asgStmtWhenInlinedBoxValue)
3032 , gtCopyStmtWhenInlinedBoxValue(copyStmtWhenInlinedBoxValue)
3035 #if DEBUGGABLE_GENTREE
3036 GenTreeBox() : GenTreeUnOp()
3042 /* gtField -- data member ref (GT_FIELD) */
3044 struct GenTreeField : public GenTree
3046 GenTreePtr gtFldObj;
3047 CORINFO_FIELD_HANDLE gtFldHnd;
3049 bool gtFldMayOverlap;
3050 #ifdef FEATURE_READYTORUN_COMPILER
3051 CORINFO_CONST_LOOKUP gtFieldLookup;
3054 GenTreeField(var_types type) : GenTree(GT_FIELD, type)
3056 gtFldMayOverlap = false;
3058 #if DEBUGGABLE_GENTREE
3059 GenTreeField() : GenTree()
3065 // Represents the Argument list of a call node, as a Lisp-style linked list.
3066 // (Originally I had hoped that this could have *only* the m_arg/m_rest fields, but it turns out
3067 // that enough of the GenTree mechanism is used that it makes sense just to make it a subtype. But
3068 // note that in many ways, this is *not* a "real" node of the tree, but rather a mechanism for
3069 // giving call nodes a flexible number of children. GenTreeArgListNodes never evaluate to registers,
3072 // Note that while this extends GenTreeOp, it is *not* an EXOP. We don't add any new fields, and one
3073 // is free to allocate a GenTreeOp of type GT_LIST. If you use this type, you get the convenient Current/Rest
3074 // method names for the arguments.
3075 struct GenTreeArgList : public GenTreeOp
3077 GenTreePtr& Current()
3081 GenTreeArgList*& Rest()
3083 assert(gtOp2 == nullptr || gtOp2->OperIsAnyList());
3084 return *reinterpret_cast<GenTreeArgList**>(>Op2);
3087 #if DEBUGGABLE_GENTREE
3088 GenTreeArgList() : GenTreeOp()
3093 GenTreeArgList(GenTreePtr arg) : GenTreeArgList(arg, nullptr)
3097 GenTreeArgList(GenTreePtr arg, GenTreeArgList* rest) : GenTreeArgList(GT_LIST, arg, rest)
3101 GenTreeArgList(genTreeOps oper, GenTreePtr arg, GenTreeArgList* rest) : GenTreeOp(oper, TYP_VOID, arg, rest)
3103 assert(OperIsAnyList(oper));
3104 assert((arg != nullptr) && arg->IsValidCallArgument());
3105 gtFlags |= arg->gtFlags & GTF_ALL_EFFECT;
3106 if (rest != nullptr)
3108 gtFlags |= rest->gtFlags & GTF_ALL_EFFECT;
3113 // Represents a list of fields constituting a struct, when it is passed as an argument.
3114 // The first field of the struct is marked with the GTF_FIELD_LIST_HEAD flag, and
3115 // in LIR form it is the only member of the list that is threaded into the execution
3117 // It differs from the GenTreeArgList in a couple of ways:
3118 // - The entire list represents a single argument.
3119 // - It contains additional fields to provide the offset and type of the field.
3121 struct GenTreeFieldList : public GenTreeArgList
3123 unsigned gtFieldOffset;
3124 var_types gtFieldType;
3126 bool IsFieldListHead() const
3128 return (gtFlags & GTF_FIELD_LIST_HEAD) != 0;
3131 #if DEBUGGABLE_GENTREE
3132 GenTreeFieldList() : GenTreeArgList()
3137 GenTreeFieldList*& Rest()
3139 assert(gtOp2 == nullptr || gtOp2->OperGet() == GT_FIELD_LIST);
3140 return *reinterpret_cast<GenTreeFieldList**>(>Op2);
3143 GenTreeFieldList(GenTreePtr arg, unsigned fieldOffset, var_types fieldType, GenTreeFieldList* prevList)
3144 : GenTreeArgList(GT_FIELD_LIST, arg, nullptr)
3146 // While GT_FIELD_LIST can be in a GT_LIST, GT_FIELD_LISTs cannot be nested or have GT_LISTs.
3147 assert(!arg->OperIsAnyList());
3148 gtFieldOffset = fieldOffset;
3149 gtFieldType = fieldType;
3151 if (prevList == nullptr)
3153 gtFlags |= GTF_FIELD_LIST_HEAD;
3154 #ifndef LEGACY_BACKEND
3155 // A GT_FIELD_LIST head is always contained. Other nodes return false from IsValue()
3156 // and should not be marked as contained.
3162 prevList->gtOp2 = this;
3167 // There was quite a bit of confusion in the code base about which of gtOp1 and gtOp2 was the
3168 // 'then' and 'else' clause of a colon node. Adding these accessors, while not enforcing anything,
3169 // at least *allows* the programmer to be obviously correct.
3170 // However, these conventions seem backward.
3171 // TODO-Cleanup: If we could get these accessors used everywhere, then we could switch them.
3172 struct GenTreeColon : public GenTreeOp
3174 GenTreePtr& ThenNode()
3178 GenTreePtr& ElseNode()
3183 #if DEBUGGABLE_GENTREE
3184 GenTreeColon() : GenTreeOp()
3189 GenTreeColon(var_types typ, GenTreePtr thenNode, GenTreePtr elseNode) : GenTreeOp(GT_COLON, typ, elseNode, thenNode)
3194 // gtCall -- method call (GT_CALL)
3195 enum class InlineObservation;
3197 // Return type descriptor of a GT_CALL node.
3198 // x64 Unix, Arm64, Arm32 and x86 allow a value to be returned in multiple
3199 // registers. For such calls this struct provides the following info
3200 // on their return type
3201 // - type of value returned in each return register
3202 // - ABI return register numbers in which the value is returned
3203 // - count of return registers in which the value is returned
3205 // TODO-ARM: Update this to meet the needs of Arm64 and Arm32
3207 // TODO-AllArch: Right now it is used for describing multi-reg returned types.
3208 // Eventually we would want to use it for describing even single-reg
3209 // returned types (e.g. structs returned in single register x64/arm).
3210 // This would allow us not to lie or normalize single struct return
3211 // values in importer/morph.
3212 struct ReturnTypeDesc
3215 var_types m_regType[MAX_RET_REG_COUNT];
3227 // Initialize the Return Type Descriptor for a method that returns a struct type
3228 void InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd);
3230 // Initialize the Return Type Descriptor for a method that returns a TYP_LONG
3231 // Only needed for X86
3232 void InitializeLongReturnType(Compiler* comp);
3234 // Reset type descriptor to defaults
3237 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
3239 m_regType[i] = TYP_UNKNOWN;
3247 // NOTE: we only use this function when writing out IR dumps. These dumps may take place before the ReturnTypeDesc
3248 // has been initialized.
3249 unsigned TryGetReturnRegCount() const
3251 return m_inited ? GetReturnRegCount() : 0;
3255 //--------------------------------------------------------------------------------------------
3256 // GetReturnRegCount: Get the count of return registers in which the return value is returned.
3262 // Count of return registers.
3263 // Returns 0 if the return type is not returned in registers.
3264 unsigned GetReturnRegCount() const
3269 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
3271 if (m_regType[i] == TYP_UNKNOWN)
3280 // Any remaining elements in m_regTypes[] should also be TYP_UNKNOWN
3281 for (unsigned i = regCount + 1; i < MAX_RET_REG_COUNT; ++i)
3283 assert(m_regType[i] == TYP_UNKNOWN);
3290 //-----------------------------------------------------------------------
3291 // IsMultiRegRetType: check whether the type is returned in multiple
3292 // return registers.
3298 // Returns true if the type is returned in multiple return registers.
3300 // Note that we only have to examine the first two values to determine this
3302 bool IsMultiRegRetType() const
3304 if (MAX_RET_REG_COUNT < 2)
3310 return ((m_regType[0] != TYP_UNKNOWN) && (m_regType[1] != TYP_UNKNOWN));
3314 //--------------------------------------------------------------------------
3315 // GetReturnRegType: Get var_type of the return register specified by index.
3318 // index - Index of the return register.
3319 // First return register will have an index 0 and so on.
3322 // var_type of the return register specified by its index.
3323 // asserts if the index does not have a valid register return type.
3325 var_types GetReturnRegType(unsigned index)
3327 var_types result = m_regType[index];
3328 assert(result != TYP_UNKNOWN);
3333 // Get ith ABI return register
3334 regNumber GetABIReturnReg(unsigned idx);
3336 // Get reg mask of ABI return registers
3337 regMaskTP GetABIReturnRegs();
3342 struct GenTreeCall final : public GenTree
3344 GenTreePtr gtCallObjp; // The instance argument ('this' pointer)
3345 GenTreeArgList* gtCallArgs; // The list of arguments in original evaluation order
3346 GenTreeArgList* gtCallLateArgs; // On x86: The register arguments in an optimal order
3347 // On ARM/x64: - also includes any outgoing arg space arguments
3348 // - that were evaluated into a temp LclVar
3349 fgArgInfo* fgArgInfo;
3351 #if !FEATURE_FIXED_OUT_ARGS
3352 int regArgListCount;
3356 // TODO-Throughput: Revisit this (this used to be only defined if
3357 // FEATURE_FIXED_OUT_ARGS was enabled, so this makes GenTreeCall 4 bytes bigger on x86).
3358 CORINFO_SIG_INFO* callSig; // Used by tail calls and to register callsites with the EE
3360 #ifdef LEGACY_BACKEND
3361 regMaskTP gtCallRegUsedMask; // mask of registers used to pass parameters
3362 #endif // LEGACY_BACKEND
3364 #if FEATURE_MULTIREG_RET
3366 // State required to support multi-reg returning call nodes.
3367 // For now it is enabled only for x64 unix.
3369 // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns.
3370 ReturnTypeDesc gtReturnTypeDesc;
3372 // gtRegNum would always be the first return reg.
3373 // The following array holds the other reg numbers of multi-reg return.
3374 regNumberSmall gtOtherRegs[MAX_RET_REG_COUNT - 1];
3376 // GTF_SPILL or GTF_SPILLED flag on a multi-reg call node indicates that one or
3377 // more of its result regs are in that state. The spill flag of each of the
3378 // return register is stored here. We only need 2 bits per returned register,
3379 // so this is treated as a 2-bit array. No architecture needs more than 8 bits.
3381 static const unsigned PACKED_GTF_SPILL = 1;
3382 static const unsigned PACKED_GTF_SPILLED = 2;
3383 unsigned char gtSpillFlags;
3385 #endif // FEATURE_MULTIREG_RET
3387 //-----------------------------------------------------------------------
3388 // GetReturnTypeDesc: get the type descriptor of return value of the call
3394 // Type descriptor of the value returned by call
3397 // Right now implemented only for x64 unix and yet to be
3398 // implemented for other multi-reg target arch (Arm64/Arm32/x86).
3400 // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns.
3401 ReturnTypeDesc* GetReturnTypeDesc()
3403 #if FEATURE_MULTIREG_RET
3404 return >ReturnTypeDesc;
3410 //---------------------------------------------------------------------------
3411 // GetRegNumByIdx: get ith return register allocated to this call node.
3414 // idx - index of the return register
3417 // Return regNumber of ith return register of call node.
3418 // Returns REG_NA if there is no valid return register for the given index.
3420 regNumber GetRegNumByIdx(unsigned idx) const
3422 assert(idx < MAX_RET_REG_COUNT);
3429 #if FEATURE_MULTIREG_RET
3430 return (regNumber)gtOtherRegs[idx - 1];
3436 //----------------------------------------------------------------------
3437 // SetRegNumByIdx: set ith return register of this call node
3441 // idx - index of the return register
3446 void SetRegNumByIdx(regNumber reg, unsigned idx)
3448 assert(idx < MAX_RET_REG_COUNT);
3454 #if FEATURE_MULTIREG_RET
3457 gtOtherRegs[idx - 1] = (regNumberSmall)reg;
3458 assert(gtOtherRegs[idx - 1] == reg);
3465 //----------------------------------------------------------------------------
3466 // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated
3474 void ClearOtherRegs()
3476 #if FEATURE_MULTIREG_RET
3477 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
3479 gtOtherRegs[i] = REG_NA;
3484 //----------------------------------------------------------------------------
3485 // CopyOtherRegs: copy multi-reg state from the given call node to this node
3488 // fromCall - GenTreeCall node from which to copy multi-reg state
3493 void CopyOtherRegs(GenTreeCall* fromCall)
3495 #if FEATURE_MULTIREG_RET
3496 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
3498 this->gtOtherRegs[i] = fromCall->gtOtherRegs[i];
3503 // Get reg mask of all the valid registers of gtOtherRegs array
3504 regMaskTP GetOtherRegMask() const;
3506 //----------------------------------------------------------------------
3507 // GetRegSpillFlagByIdx: get spill flag associated with the return register
3508 // specified by its index.
3511 // idx - Position or index of the return register
3514 // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
3516 unsigned GetRegSpillFlagByIdx(unsigned idx) const
3518 static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE);
3519 assert(idx < MAX_RET_REG_COUNT);
3521 #if FEATURE_MULTIREG_RET
3522 unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
3523 unsigned spillFlags = 0;
3524 if (bits & PACKED_GTF_SPILL)
3526 spillFlags |= GTF_SPILL;
3528 if (bits & PACKED_GTF_SPILLED)
3530 spillFlags |= GTF_SPILLED;
3534 assert(!"unreached");
3539 //----------------------------------------------------------------------
3540 // SetRegSpillFlagByIdx: set spill flags for the return register
3541 // specified by its index.
3544 // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed.
3545 // idx - Position or index of the return register
3550 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
3552 static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE);
3553 assert(idx < MAX_RET_REG_COUNT);
3555 #if FEATURE_MULTIREG_RET
3557 if (flags & GTF_SPILL)
3559 bits |= PACKED_GTF_SPILL;
3561 if (flags & GTF_SPILLED)
3563 bits |= PACKED_GTF_SPILLED;
3566 const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED;
3568 // Clear anything that was already there by masking out the bits before 'or'ing in what we want there.
3569 gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2)));
3575 //-------------------------------------------------------------------
3576 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
3583 void ClearOtherRegFlags()
3585 #if FEATURE_MULTIREG_RET
3590 //-------------------------------------------------------------------------
3591 // CopyOtherRegFlags: copy GTF_* flags associated with gtOtherRegs from
3592 // the given call node.
3595 // fromCall - GenTreeCall node from which to copy
3600 void CopyOtherRegFlags(GenTreeCall* fromCall)
3602 #if FEATURE_MULTIREG_RET
3603 this->gtSpillFlags = fromCall->gtSpillFlags;
3609 #define GTF_CALL_M_EXPLICIT_TAILCALL 0x00000001 // GT_CALL -- the call is "tail" prefixed and
3610 // importer has performed tail call checks
3611 #define GTF_CALL_M_TAILCALL 0x00000002 // GT_CALL -- the call is a tailcall
3612 #define GTF_CALL_M_VARARGS 0x00000004 // GT_CALL -- the call uses varargs ABI
3613 #define GTF_CALL_M_RETBUFFARG 0x00000008 // GT_CALL -- first parameter is the return buffer argument
3614 #define GTF_CALL_M_DELEGATE_INV 0x00000010 // GT_CALL -- call to Delegate.Invoke
3615 #define GTF_CALL_M_NOGCCHECK 0x00000020 // GT_CALL -- not a call for computing full interruptability
3616 #define GTF_CALL_M_SPECIAL_INTRINSIC 0x00000040 // GT_CALL -- function that could be optimized as an intrinsic
3617 // in special cases. Used to optimize fast way out in morphing
3618 #define GTF_CALL_M_UNMGD_THISCALL 0x00000080 // GT_CALL -- "this" pointer (first argument)
3619 // should be enregistered (only for GTF_CALL_UNMANAGED)
3620 #define GTF_CALL_M_VIRTSTUB_REL_INDIRECT 0x00000080 // the virtstub is indirected through
3621 // a relative address (only for GTF_CALL_VIRT_STUB)
3622 #define GTF_CALL_M_NONVIRT_SAME_THIS 0x00000080 // GT_CALL -- callee "this" pointer is
3623 // equal to caller this pointer (only for GTF_CALL_NONVIRT)
3624 #define GTF_CALL_M_FRAME_VAR_DEATH 0x00000100 // GT_CALL -- the compLvFrameListRoot variable dies here (last use)
3626 #ifndef LEGACY_BACKEND
3627 #define GTF_CALL_M_TAILCALL_VIA_HELPER 0x00000200 // GT_CALL -- call is a tail call dispatched via tail call JIT helper.
3630 #if FEATURE_TAILCALL_OPT
3631 #define GTF_CALL_M_IMPLICIT_TAILCALL 0x00000400 // GT_CALL -- call is an opportunistic
3632 // tail call and importer has performed tail call checks
3633 #define GTF_CALL_M_TAILCALL_TO_LOOP 0x00000800 // GT_CALL -- call is a fast recursive tail call
3634 // that can be converted into a loop
3637 #define GTF_CALL_M_PINVOKE 0x00001000 // GT_CALL -- call is a pinvoke. This mirrors VM flag CORINFO_FLG_PINVOKE.
3638 // A call marked as Pinvoke is not necessarily a GT_CALL_UNMANAGED. For e.g.
3639 // an IL Stub dynamically generated for a PInvoke declaration is flagged as
3640 // a Pinvoke but not as an unmanaged call. See impCheckForPInvokeCall() to
3641 // know when these flags are set.
3643 #define GTF_CALL_M_R2R_REL_INDIRECT 0x00002000 // GT_CALL -- ready to run call is indirected through a relative address
3644 #define GTF_CALL_M_DOES_NOT_RETURN 0x00004000 // GT_CALL -- call does not return
3645 #define GTF_CALL_M_SECURE_DELEGATE_INV 0x00008000 // GT_CALL -- call is in secure delegate
3646 #define GTF_CALL_M_FAT_POINTER_CHECK 0x00010000 // GT_CALL -- CoreRT managed calli needs transformation, that checks
3647 // special bit in calli address. If it is set, then it is necessary
3648 // to restore real function address and load hidden argument
3649 // as the first argument for calli. It is CoreRT replacement for instantiating
3650 // stubs, because executable code cannot be generated at runtime.
3651 #define GTF_CALL_M_HELPER_SPECIAL_DCE 0x00020000 // GT_CALL -- this helper call can be removed if it is part of a comma and
3652 // the comma result is unused.
3656 bool IsUnmanaged() const
3658 return (gtFlags & GTF_CALL_UNMANAGED) != 0;
3660 bool NeedsNullCheck() const
3662 return (gtFlags & GTF_CALL_NULLCHECK) != 0;
3664 bool CallerPop() const
3666 return (gtFlags & GTF_CALL_POP_ARGS) != 0;
3668 bool IsVirtual() const
3670 return (gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT;
3672 bool IsVirtualStub() const
3674 return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB;
3676 bool IsVirtualVtable() const
3678 return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_VTABLE;
3680 bool IsInlineCandidate() const
3682 return (gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0;
3685 #ifndef LEGACY_BACKEND
3686 bool HasNonStandardAddedArgs(Compiler* compiler) const;
3687 int GetNonStandardAddedArgCount(Compiler* compiler) const;
3688 #endif // !LEGACY_BACKEND
3690 // Returns true if this call uses a retBuf argument and its calling convention
3691 bool HasRetBufArg() const
3693 return (gtCallMoreFlags & GTF_CALL_M_RETBUFFARG) != 0;
3696 //-------------------------------------------------------------------------
3697 // TreatAsHasRetBufArg:
3700 // compiler, the compiler instance so that we can call eeGetHelperNum
3703 // Returns true if we treat the call as if it has a retBuf argument
3704 // This method may actually have a retBuf argument
3705 // or it could be a JIT helper that we are still transforming during
3706 // the importer phase.
3709 // On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
3710 // will make HasRetBufArg() return true, but will also force the
3711 // use of register x8 to pass the RetBuf argument.
3713 bool TreatAsHasRetBufArg(Compiler* compiler) const;
3715 //-----------------------------------------------------------------------------------------
3716 // HasMultiRegRetVal: whether the call node returns its value in multiple return registers.
3722 // True if the call is returning a multi-reg return value. False otherwise.
3725 // This is implemented only for x64 Unix and yet to be implemented for
3726 // other multi-reg return target arch (arm64/arm32/x86).
3728 bool HasMultiRegRetVal() const
3730 #if defined(_TARGET_X86_) && !defined(LEGACY_BACKEND)
3731 // LEGACY_BACKEND does not use multi reg returns for calls with long return types
3732 return varTypeIsLong(gtType);
3733 #elif FEATURE_MULTIREG_RET && (defined(_TARGET_ARM_) && !defined(LEGACY_BACKEND))
3734 // LEGACY_BACKEND does not use multi reg returns for calls with long return types
3735 return varTypeIsLong(gtType) || (varTypeIsStruct(gtType) && !HasRetBufArg());
3736 #elif FEATURE_MULTIREG_RET
3737 return varTypeIsStruct(gtType) && !HasRetBufArg();
3743 // Returns true if VM has flagged this method as CORINFO_FLG_PINVOKE.
3744 bool IsPInvoke() const
3746 return (gtCallMoreFlags & GTF_CALL_M_PINVOKE) != 0;
3749 // Note that the distinction of whether tail prefixed or an implicit tail call
3750 // is maintained on a call node till fgMorphCall() after which it will be
3751 // either a tail call (i.e. IsTailCall() is true) or a non-tail call.
3752 bool IsTailPrefixedCall() const
3754 return (gtCallMoreFlags & GTF_CALL_M_EXPLICIT_TAILCALL) != 0;
3757 // This method returning "true" implies that tail call flowgraph morhphing has
3758 // performed final checks and committed to making a tail call.
3759 bool IsTailCall() const
3761 return (gtCallMoreFlags & GTF_CALL_M_TAILCALL) != 0;
3764 // This method returning "true" implies that importer has performed tail call checks
3765 // and providing a hint that this can be converted to a tail call.
3766 bool CanTailCall() const
3768 return IsTailPrefixedCall() || IsImplicitTailCall();
3771 #ifndef LEGACY_BACKEND
3772 bool IsTailCallViaHelper() const
3774 return IsTailCall() && (gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER);
3776 #else // LEGACY_BACKEND
3777 bool IsTailCallViaHelper() const
3781 #endif // LEGACY_BACKEND
3783 #if FEATURE_FASTTAILCALL
3784 bool IsFastTailCall() const
3786 return IsTailCall() && !(gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER);
3788 #else // !FEATURE_FASTTAILCALL
3789 bool IsFastTailCall() const
3793 #endif // !FEATURE_FASTTAILCALL
3795 #if FEATURE_TAILCALL_OPT
3796 // Returns true if this is marked for opportunistic tail calling.
3797 // That is, can be tail called though not explicitly prefixed with "tail" prefix.
3798 bool IsImplicitTailCall() const
3800 return (gtCallMoreFlags & GTF_CALL_M_IMPLICIT_TAILCALL) != 0;
3802 bool IsTailCallConvertibleToLoop() const
3804 return (gtCallMoreFlags & GTF_CALL_M_TAILCALL_TO_LOOP) != 0;
3806 #else // !FEATURE_TAILCALL_OPT
3807 bool IsImplicitTailCall() const
3811 bool IsTailCallConvertibleToLoop() const
3815 #endif // !FEATURE_TAILCALL_OPT
3817 bool IsSameThis() const
3819 return (gtCallMoreFlags & GTF_CALL_M_NONVIRT_SAME_THIS) != 0;
3821 bool IsDelegateInvoke() const
3823 return (gtCallMoreFlags & GTF_CALL_M_DELEGATE_INV) != 0;
3825 bool IsVirtualStubRelativeIndir() const
3827 return (gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT) != 0;
3830 #ifdef FEATURE_READYTORUN_COMPILER
3831 bool IsR2RRelativeIndir() const
3833 return (gtCallMoreFlags & GTF_CALL_M_R2R_REL_INDIRECT) != 0;
3835 void setEntryPoint(CORINFO_CONST_LOOKUP entryPoint)
3837 gtEntryPoint = entryPoint;
3838 if (gtEntryPoint.accessType == IAT_PVALUE)
3840 gtCallMoreFlags |= GTF_CALL_M_R2R_REL_INDIRECT;
3843 #endif // FEATURE_READYTORUN_COMPILER
3845 bool IsVarargs() const
3847 return (gtCallMoreFlags & GTF_CALL_M_VARARGS) != 0;
3850 bool IsNoReturn() const
3852 return (gtCallMoreFlags & GTF_CALL_M_DOES_NOT_RETURN) != 0;
3855 bool IsFatPointerCandidate() const
3857 return (gtCallMoreFlags & GTF_CALL_M_FAT_POINTER_CHECK) != 0;
3860 bool IsPure(Compiler* compiler) const;
3862 bool HasSideEffects(Compiler* compiler, bool ignoreExceptions = false, bool ignoreCctors = false) const;
3864 void ClearFatPointerCandidate()
3866 gtCallMoreFlags &= ~GTF_CALL_M_FAT_POINTER_CHECK;
3869 void SetFatPointerCandidate()
3871 gtCallMoreFlags |= GTF_CALL_M_FAT_POINTER_CHECK;
3874 unsigned gtCallMoreFlags; // in addition to gtFlags
3876 unsigned char gtCallType : 3; // value from the gtCallTypes enumeration
3877 unsigned char gtReturnType : 5; // exact return type
3879 CORINFO_CLASS_HANDLE gtRetClsHnd; // The return type handle of the call if it is a struct; always available
3882 // only used for CALLI unmanaged calls (CT_INDIRECT)
3883 GenTreePtr gtCallCookie;
3884 // gtInlineCandidateInfo is only used when inlining methods
3885 InlineCandidateInfo* gtInlineCandidateInfo;
3886 void* gtStubCallStubAddr; // GTF_CALL_VIRT_STUB - these are never inlined
3887 CORINFO_GENERIC_HANDLE compileTimeHelperArgumentHandle; // Used to track type handle argument of dynamic helpers
3888 void* gtDirectCallAddress; // Used to pass direct call address between lower and codegen
3891 // expression evaluated after args are placed which determines the control target
3892 GenTree* gtControlExpr;
3895 CORINFO_METHOD_HANDLE gtCallMethHnd; // CT_USER_FUNC
3896 GenTreePtr gtCallAddr; // CT_INDIRECT
3899 #ifdef FEATURE_READYTORUN_COMPILER
3900 // Call target lookup info for method call from a Ready To Run module
3901 CORINFO_CONST_LOOKUP gtEntryPoint;
3904 #if defined(DEBUG) || defined(INLINE_DATA)
3905 // For non-inline candidates, track the first observation
3906 // that blocks candidacy.
3907 InlineObservation gtInlineObservation;
3909 // IL offset of the call wrt its parent method.
3910 IL_OFFSET gtRawILOffset;
3911 #endif // defined(DEBUG) || defined(INLINE_DATA)
3913 bool IsHelperCall() const
3915 return gtCallType == CT_HELPER;
3918 bool IsHelperCall(CORINFO_METHOD_HANDLE callMethHnd) const
3920 return IsHelperCall() && (callMethHnd == gtCallMethHnd);
3923 bool IsHelperCall(Compiler* compiler, unsigned helper) const;
3925 void ReplaceCallOperand(GenTree** operandUseEdge, GenTree* replacement);
3927 bool AreArgsComplete() const;
3929 GenTreeCall(var_types type) : GenTree(GT_CALL, type)
3931 fgArgInfo = nullptr;
3933 #if DEBUGGABLE_GENTREE
3934 GenTreeCall() : GenTree()
3940 struct GenTreeCmpXchg : public GenTree
3942 GenTreePtr gtOpLocation;
3943 GenTreePtr gtOpValue;
3944 GenTreePtr gtOpComparand;
3946 GenTreeCmpXchg(var_types type, GenTreePtr loc, GenTreePtr val, GenTreePtr comparand)
3947 : GenTree(GT_CMPXCHG, type), gtOpLocation(loc), gtOpValue(val), gtOpComparand(comparand)
3949 // There's no reason to do a compare-exchange on a local location, so we'll assume that all of these
3950 // have global effects.
3951 gtFlags |= (GTF_GLOB_REF | GTF_ASG);
3953 #if DEBUGGABLE_GENTREE
3954 GenTreeCmpXchg() : GenTree()
3960 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
3961 struct GenTreeMultiRegOp : public GenTreeOp
3963 regNumber gtOtherReg;
3965 // GTF_SPILL or GTF_SPILLED flag on a multi-reg node indicates that one or
3966 // more of its result regs are in that state. The spill flag of each of the
3967 // return register is stored here. We only need 2 bits per returned register,
3968 // so this is treated as a 2-bit array. No architecture needs more than 8 bits.
3970 static const unsigned PACKED_GTF_SPILL = 1;
3971 static const unsigned PACKED_GTF_SPILLED = 2;
3972 unsigned char gtSpillFlags;
3974 GenTreeMultiRegOp(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2)
3975 : GenTreeOp(oper, type, op1, op2), gtOtherReg(REG_NA)
3977 ClearOtherRegFlags();
3980 unsigned GetRegCount() const
3982 if (gtRegNum == REG_NA || gtRegNum == REG_STK)
3986 return (gtOtherReg == REG_NA || gtOtherReg == REG_STK) ? 1 : 2;
3989 //---------------------------------------------------------------------------
3990 // GetRegNumByIdx: get ith register allocated to this struct argument.
3993 // idx - index of the register
3996 // Return regNumber of ith register of this register argument
3998 regNumber GetRegNumByIdx(unsigned idx) const
4010 //----------------------------------------------------------------------
4011 // GetRegSpillFlagByIdx: get spill flag associated with the register
4012 // specified by its index.
4015 // idx - Position or index of the register
4018 // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
4020 unsigned GetRegSpillFlagByIdx(unsigned idx) const
4022 assert(idx < MAX_REG_ARG);
4024 unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
4025 unsigned spillFlags = 0;
4026 if (bits & PACKED_GTF_SPILL)
4028 spillFlags |= GTF_SPILL;
4030 if (bits & PACKED_GTF_SPILLED)
4032 spillFlags |= GTF_SPILLED;
4038 //----------------------------------------------------------------------
4039 // SetRegSpillFlagByIdx: set spill flags for the register
4040 // specified by its index.
4043 // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed.
4044 // idx - Position or index of the register
4049 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
4051 assert(idx < MAX_REG_ARG);
4054 if (flags & GTF_SPILL)
4056 bits |= PACKED_GTF_SPILL;
4058 if (flags & GTF_SPILLED)
4060 bits |= PACKED_GTF_SPILLED;
4063 const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED;
4065 // Clear anything that was already there by masking out the bits before 'or'ing in what we want there.
4066 gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2)));
4069 //--------------------------------------------------------------------------
4070 // GetRegType: Get var_type of the register specified by index.
4073 // index - Index of the register.
4074 // First register will have an index 0 and so on.
4077 // var_type of the register specified by its index.
4079 var_types GetRegType(unsigned index)
4082 // The type of register is usually the same as GenTree type
4083 // since most of time GenTreeMultiRegOp uses only a single reg (when gtOtherReg is REG_NA).
4084 // The special case is when we have TYP_LONG here, which was `TYP_DOUBLE` originally
4085 // (copied to int regs for argument push on armel). Then we need to separate them into int for each index.
4086 var_types result = TypeGet();
4087 if (result == TYP_LONG)
4089 assert(gtOtherReg != REG_NA);
4095 //-------------------------------------------------------------------
4096 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
4104 void ClearOtherRegFlags()
4109 #if DEBUGGABLE_GENTREE
4110 GenTreeMultiRegOp() : GenTreeOp()
4117 struct GenTreeFptrVal : public GenTree
4119 CORINFO_METHOD_HANDLE gtFptrMethod;
4121 #ifdef FEATURE_READYTORUN_COMPILER
4122 CORINFO_CONST_LOOKUP gtEntryPoint;
4125 GenTreeFptrVal(var_types type, CORINFO_METHOD_HANDLE meth) : GenTree(GT_FTN_ADDR, type), gtFptrMethod(meth)
4128 #if DEBUGGABLE_GENTREE
4129 GenTreeFptrVal() : GenTree()
4136 struct GenTreeQmark : public GenTreeOp
4138 #ifdef LEGACY_BACKEND
4139 // Livesets on entry to then and else subtrees
4140 VARSET_TP gtThenLiveSet;
4141 VARSET_TP gtElseLiveSet;
4144 // The "Compiler*" argument is not a DEBUGARG here because we use it to keep track of the set of
4145 // (possible) QMark nodes.
4146 GenTreeQmark(var_types type, GenTreePtr cond, GenTreePtr colonOp, class Compiler* comp);
4148 #if DEBUGGABLE_GENTREE
4149 GenTreeQmark() : GenTreeOp(GT_QMARK, TYP_INT, nullptr, nullptr)
4155 /* gtIntrinsic -- intrinsic (possibly-binary op [NULL op2 is allowed] with an additional field) */
4157 struct GenTreeIntrinsic : public GenTreeOp
4159 CorInfoIntrinsics gtIntrinsicId;
4160 CORINFO_METHOD_HANDLE gtMethodHandle; // Method handle of the method which is treated as an intrinsic.
4162 #ifdef FEATURE_READYTORUN_COMPILER
4163 // Call target lookup info for method call from a Ready To Run module
4164 CORINFO_CONST_LOOKUP gtEntryPoint;
4167 GenTreeIntrinsic(var_types type, GenTreePtr op1, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle)
4168 : GenTreeOp(GT_INTRINSIC, type, op1, nullptr), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle)
4172 GenTreeIntrinsic(var_types type,
4175 CorInfoIntrinsics intrinsicId,
4176 CORINFO_METHOD_HANDLE methodHandle)
4177 : GenTreeOp(GT_INTRINSIC, type, op1, op2), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle)
4181 #if DEBUGGABLE_GENTREE
4182 GenTreeIntrinsic() : GenTreeOp()
4188 struct GenTreeJitIntrinsic : public GenTreeOp
4190 var_types gtSIMDBaseType; // SIMD vector base type
4191 unsigned gtSIMDSize; // SIMD vector size in bytes, use 0 for scalar intrinsics
4193 GenTreeJitIntrinsic(
4194 genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2, var_types baseType, unsigned size)
4195 : GenTreeOp(oper, type, op1, op2), gtSIMDBaseType(baseType), gtSIMDSize(size)
4201 return gtSIMDSize != 0;
4204 #if DEBUGGABLE_GENTREE
4205 GenTreeJitIntrinsic() : GenTreeOp()
4213 /* gtSIMD -- SIMD intrinsic (possibly-binary op [NULL op2 is allowed] with additional fields) */
4214 struct GenTreeSIMD : public GenTreeJitIntrinsic
4216 SIMDIntrinsicID gtSIMDIntrinsicID; // operation Id
4218 GenTreeSIMD(var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
4219 : GenTreeJitIntrinsic(GT_SIMD, type, op1, nullptr, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID)
4223 GenTreeSIMD(var_types type,
4226 SIMDIntrinsicID simdIntrinsicID,
4229 : GenTreeJitIntrinsic(GT_SIMD, type, op1, op2, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID)
4233 #if DEBUGGABLE_GENTREE
4234 GenTreeSIMD() : GenTreeJitIntrinsic()
4239 #endif // FEATURE_SIMD
4241 #ifdef FEATURE_HW_INTRINSICS
4242 struct GenTreeHWIntrinsic : public GenTreeJitIntrinsic
4244 NamedIntrinsic gtHWIntrinsicId;
4246 GenTreeHWIntrinsic(var_types type, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size)
4247 : GenTreeJitIntrinsic(GT_HWIntrinsic, type, nullptr, nullptr, baseType, size), gtHWIntrinsicId(hwIntrinsicID)
4251 GenTreeHWIntrinsic(var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size)
4252 : GenTreeJitIntrinsic(GT_HWIntrinsic, type, op1, nullptr, baseType, size), gtHWIntrinsicId(hwIntrinsicID)
4257 var_types type, GenTree* op1, GenTree* op2, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size)
4258 : GenTreeJitIntrinsic(GT_HWIntrinsic, type, op1, op2, baseType, size), gtHWIntrinsicId(hwIntrinsicID)
4262 #if DEBUGGABLE_GENTREE
4263 GenTreeHWIntrinsic() : GenTreeJitIntrinsic()
4269 inline bool GenTree::OperIsSimdHWIntrinsic() const
4271 if (gtOper == GT_HWIntrinsic)
4273 // We cannot use AsHWIntrinsic() as it is not declared const
4274 const GenTreeHWIntrinsic* hwIntrinsic = reinterpret_cast<const GenTreeHWIntrinsic*>(this);
4275 return hwIntrinsic->isSIMD();
4279 #endif // FEATURE_HW_INTRINSICS
4281 /* gtIndex -- array access */
4283 struct GenTreeIndex : public GenTreeOp
4294 unsigned gtIndElemSize; // size of elements in the array
4295 CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type.
4297 GenTreeIndex(var_types type, GenTreePtr arr, GenTreePtr ind, unsigned indElemSize)
4298 : GenTreeOp(GT_INDEX, type, arr, ind)
4299 , gtIndElemSize(indElemSize)
4300 , gtStructElemClass(nullptr) // We always initialize this after construction.
4303 if (JitConfig.JitSkipArrayBoundCheck() == 1)
4305 // Skip bounds check
4311 gtFlags |= GTF_INX_RNGCHK;
4314 if (type == TYP_REF)
4316 gtFlags |= GTF_INX_REFARR_LAYOUT;
4319 gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;
4321 #if DEBUGGABLE_GENTREE
4322 GenTreeIndex() : GenTreeOp()
4328 // gtIndexAddr: given an array object and an index, checks that the index is within the bounds of the array if
4329 // necessary and produces the address of the value at that index of the array.
4330 struct GenTreeIndexAddr : public GenTreeOp
4341 CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type.
4343 GenTree* gtIndRngFailBB; // Label to jump to for array-index-out-of-range
4344 unsigned gtStkDepth; // Stack depth at which the jump occurs (required for fgSetRngChkTarget)
4346 var_types gtElemType; // The element type of the array.
4347 unsigned gtElemSize; // size of elements in the array
4348 unsigned gtLenOffset; // The offset from the array's base address to its length.
4349 unsigned gtElemOffset; // The offset from the array's base address to its first element.
4351 GenTreeIndexAddr(GenTree* arr,
4354 CORINFO_CLASS_HANDLE structElemClass,
4357 unsigned elemOffset)
4358 : GenTreeOp(GT_INDEX_ADDR, TYP_BYREF, arr, ind)
4359 , gtStructElemClass(structElemClass)
4360 , gtIndRngFailBB(nullptr)
4362 , gtElemType(elemType)
4363 , gtElemSize(elemSize)
4364 , gtLenOffset(lenOffset)
4365 , gtElemOffset(elemOffset)
4368 if (JitConfig.JitSkipArrayBoundCheck() == 1)
4370 // Skip bounds check
4376 gtFlags |= GTF_INX_RNGCHK;
4379 // REVERSE_OPS is set because we must evaluate the index before the array address.
4380 gtFlags |= GTF_EXCEPT | GTF_GLOB_REF | GTF_REVERSE_OPS;
4383 #if DEBUGGABLE_GENTREE
4384 GenTreeIndexAddr() : GenTreeOp()
4390 /* gtArrLen -- array length (GT_ARR_LENGTH)
4391 GT_ARR_LENGTH is used for "arr.length" */
4393 struct GenTreeArrLen : public GenTreeUnOp
4395 GenTreePtr& ArrRef()
4398 } // the array address node
4400 int gtArrLenOffset; // constant to add to "gtArrRef" to get the address of the array length.
4403 inline int ArrLenOffset()
4405 return gtArrLenOffset;
4408 GenTreeArrLen(var_types type, GenTreePtr arrRef, int lenOffset)
4409 : GenTreeUnOp(GT_ARR_LENGTH, type, arrRef), gtArrLenOffset(lenOffset)
4413 #if DEBUGGABLE_GENTREE
4414 GenTreeArrLen() : GenTreeUnOp()
4421 // - a comparison value (generally an array length),
4422 // - an index value, and
4423 // - the label to jump to if the index is out of range.
4424 // - the "kind" of the throw block to branch to on failure
4425 // It generates no result.
4427 struct GenTreeBoundsChk : public GenTree
4429 GenTreePtr gtIndex; // The index expression.
4430 GenTreePtr gtArrLen; // An expression for the length of the array being indexed.
4432 GenTreePtr gtIndRngFailBB; // Label to jump to for array-index-out-of-range
4433 SpecialCodeKind gtThrowKind; // Kind of throw block to branch to on failure
4435 /* Only out-of-ranges at same stack depth can jump to the same label (finding return address is easier)
4436 For delayed calling of fgSetRngChkTarget() so that the
4437 optimizer has a chance of eliminating some of the rng checks */
4438 unsigned gtStkDepth;
4440 GenTreeBoundsChk(genTreeOps oper, var_types type, GenTreePtr index, GenTreePtr arrLen, SpecialCodeKind kind)
4441 : GenTree(oper, type)
4444 , gtIndRngFailBB(nullptr)
4448 // Effects flags propagate upwards.
4449 gtFlags |= (arrLen->gtFlags & GTF_ALL_EFFECT);
4450 gtFlags |= GTF_EXCEPT;
4452 #if DEBUGGABLE_GENTREE
4453 GenTreeBoundsChk() : GenTree()
4458 // If the gtArrLen is really an array length, returns array reference, else "NULL".
4459 GenTreePtr GetArray()
4461 if (gtArrLen->OperGet() == GT_ARR_LENGTH)
4463 return gtArrLen->gtArrLen.ArrRef();
4472 // gtArrElem -- general array element (GT_ARR_ELEM), for non "SZ_ARRAYS"
4473 // -- multidimensional arrays, or 1-d arrays with non-zero lower bounds.
4475 struct GenTreeArrElem : public GenTree
4477 GenTreePtr gtArrObj;
4479 #define GT_ARR_MAX_RANK 3
4480 GenTreePtr gtArrInds[GT_ARR_MAX_RANK]; // Indices
4481 unsigned char gtArrRank; // Rank of the array
4483 unsigned char gtArrElemSize; // !!! Caution, this is an "unsigned char", it is used only
4484 // on the optimization path of array intrisics.
4485 // It stores the size of array elements WHEN it can fit
4486 // into an "unsigned char".
4487 // This has caused VSW 571394.
4488 var_types gtArrElemType; // The array element type
4490 // Requires that "inds" is a pointer to an array of "rank" GenTreePtrs for the indices.
4491 GenTreeArrElem(var_types type,
4494 unsigned char elemSize,
4497 : GenTree(GT_ARR_ELEM, type), gtArrObj(arr), gtArrRank(rank), gtArrElemSize(elemSize), gtArrElemType(elemType)
4499 for (unsigned char i = 0; i < rank; i++)
4501 gtArrInds[i] = inds[i];
4503 gtFlags |= GTF_EXCEPT;
4505 #if DEBUGGABLE_GENTREE
4506 GenTreeArrElem() : GenTree()
4512 //--------------------------------------------
4514 // GenTreeArrIndex (gtArrIndex): Expression to bounds-check the index for one dimension of a
4515 // multi-dimensional or non-zero-based array., and compute the effective index
4516 // (i.e. subtracting the lower bound).
4519 // This node is similar in some ways to GenTreeBoundsChk, which ONLY performs the check.
4520 // The reason that this node incorporates the check into the effective index computation is
4521 // to avoid duplicating the codegen, as the effective index is required to compute the
4523 // TODO-CQ: Enable optimization of the lower bound and length by replacing this:
4526 // +--* ArrIndex[i, ]
4527 // with something like:
4529 // /--* ArrLowerBound[i, ]
4531 // +--* ArrLen[i, ] (either generalize GT_ARR_LENGTH or add a new node)
4533 // +--* ArrIndex[i, ]
4534 // Which could, for example, be optimized to the following when known to be within bounds:
4535 // /--* TempForLowerBoundDim0
4539 struct GenTreeArrIndex : public GenTreeOp
4541 // The array object - may be any expression producing an Array reference, but is likely to be a lclVar.
4542 GenTreePtr& ArrObj()
4546 // The index expression - may be any integral expression.
4547 GenTreePtr& IndexExpr()
4551 unsigned char gtCurrDim; // The current dimension
4552 unsigned char gtArrRank; // Rank of the array
4553 var_types gtArrElemType; // The array element type
4555 GenTreeArrIndex(var_types type,
4557 GenTreePtr indexExpr,
4558 unsigned char currDim,
4559 unsigned char arrRank,
4561 : GenTreeOp(GT_ARR_INDEX, type, arrObj, indexExpr)
4562 , gtCurrDim(currDim)
4563 , gtArrRank(arrRank)
4564 , gtArrElemType(elemType)
4566 gtFlags |= GTF_EXCEPT;
4568 #if DEBUGGABLE_GENTREE
4571 // Used only for GenTree::GetVtableForOper()
4572 GenTreeArrIndex() : GenTreeOp()
4578 //--------------------------------------------
4580 // GenTreeArrOffset (gtArrOffset): Expression to compute the accumulated offset for the address
4581 // of an element of a multi-dimensional or non-zero-based array.
4584 // The result of this expression is (gtOffset * dimSize) + gtIndex
4585 // where dimSize is the length/stride/size of the dimension, and is obtained from gtArrObj.
4586 // This node is generated in conjunction with the GenTreeArrIndex node, which computes the
4587 // effective index for a single dimension. The sub-trees can be separately optimized, e.g.
4588 // within a loop body where the expression for the 0th dimension may be invariant.
4590 // Here is an example of how the tree might look for a two-dimension array reference:
4594 // +--* ArrIndex[i, ]
4596 // /--| arrOffs[i, ]
4599 // +--* ArrIndex[*,j]
4601 // /--| arrOffs[*,j]
4602 // TODO-CQ: see comment on GenTreeArrIndex for how its representation may change. When that
4603 // is done, we will also want to replace the <arrObj> argument to arrOffs with the
4604 // ArrLen as for GenTreeArrIndex.
4606 struct GenTreeArrOffs : public GenTree
4608 GenTreePtr gtOffset; // The accumulated offset for lower dimensions - must be TYP_I_IMPL, and
4609 // will either be a CSE temp, the constant 0, or another GenTreeArrOffs node.
4610 GenTreePtr gtIndex; // The effective index for the current dimension - must be non-negative
4611 // and can be any expression (though it is likely to be either a GenTreeArrIndex,
4612 // node, a lclVar, or a constant).
4613 GenTreePtr gtArrObj; // The array object - may be any expression producing an Array reference,
4614 // but is likely to be a lclVar.
4615 unsigned char gtCurrDim; // The current dimension
4616 unsigned char gtArrRank; // Rank of the array
4617 var_types gtArrElemType; // The array element type
4619 GenTreeArrOffs(var_types type,
4623 unsigned char currDim,
4626 : GenTree(GT_ARR_OFFSET, type)
4630 , gtCurrDim(currDim)
4632 , gtArrElemType(elemType)
4634 assert(index->gtFlags & GTF_EXCEPT);
4635 gtFlags |= GTF_EXCEPT;
4637 #if DEBUGGABLE_GENTREE
4638 GenTreeArrOffs() : GenTree()
4644 /* gtAddrMode -- Target-specific canonicalized addressing expression (GT_LEA) */
4646 struct GenTreeAddrMode : public GenTreeOp
4648 // Address is Base + Index*Scale + Offset.
4649 // These are the legal patterns:
4651 // Base // Base != nullptr && Index == nullptr && Scale == 0 && Offset == 0
4652 // Base + Index*Scale // Base != nullptr && Index != nullptr && Scale != 0 && Offset == 0
4653 // Base + Offset // Base != nullptr && Index == nullptr && Scale == 0 && Offset != 0
4654 // Base + Index*Scale + Offset // Base != nullptr && Index != nullptr && Scale != 0 && Offset != 0
4655 // Index*Scale // Base == nullptr && Index != nullptr && Scale > 1 && Offset == 0
4656 // Index*Scale + Offset // Base == nullptr && Index != nullptr && Scale > 1 && Offset != 0
4657 // Offset // Base == nullptr && Index == nullptr && Scale == 0 && Offset != 0
4660 // 1. Base + Index is legal with Scale==1
4661 // 2. If Index is null, Scale should be zero (or unintialized / unused)
4662 // 3. If Scale==1, then we should have "Base" instead of "Index*Scale", and "Base + Offset" instead of
4663 // "Index*Scale + Offset".
4665 // First operand is base address/pointer
4666 bool HasBase() const
4668 return gtOp1 != nullptr;
4675 // Second operand is scaled index value
4676 bool HasIndex() const
4678 return gtOp2 != nullptr;
4687 return static_cast<int>(gtOffset);
4690 unsigned gtScale; // The scale factor
4692 #ifndef LEGACY_BACKEND
4695 // TODO-Cleanup: gtOffset should be changed to 'int' to match the getter function and avoid accidental
4696 // zero extension to 64 bit. However, this is used by legacy code and initialized, via the offset
4697 // parameter of the constructor, by Lowering::TryCreateAddrMode & CodeGenInterface::genCreateAddrMode.
4698 // The later computes the offset as 'ssize_t' but returns it as 'unsigned'. We should change
4699 // genCreateAddrMode to return 'int' or 'ssize_t' and then update this as well.
4700 unsigned gtOffset; // The offset to add
4703 GenTreeAddrMode(var_types type, GenTreePtr base, GenTreePtr index, unsigned scale, unsigned offset)
4704 : GenTreeOp(GT_LEA, type, base, index)
4706 assert(base != nullptr || index != nullptr);
4710 #if DEBUGGABLE_GENTREE
4713 // Used only for GenTree::GetVtableForOper()
4714 GenTreeAddrMode() : GenTreeOp()
4720 // Indir is just an op, no additional data, but some additional abstractions
4721 struct GenTreeIndir : public GenTreeOp
4723 // The address for the indirection.
4724 // Since GenTreeDynBlk derives from this, but is an "EXOP" (i.e. it has extra fields),
4725 // we can't access Op1 and Op2 in the normal manner if we may have a DynBlk.
4731 // these methods provide an interface to the indirection node which
4739 GenTreeIndir(genTreeOps oper, var_types type, GenTree* addr, GenTree* data) : GenTreeOp(oper, type, addr, data)
4743 #if DEBUGGABLE_GENTREE
4746 // Used only for GenTree::GetVtableForOper()
4747 GenTreeIndir() : GenTreeOp()
4753 // gtBlk -- 'block' (GT_BLK, GT_STORE_BLK).
4755 // This is the base type for all of the nodes that represent block or struct
4757 // Since it can be a store, it includes gtBlkOpKind to specify the type of
4758 // code generation that will be used for the block operation.
4760 struct GenTreeBlk : public GenTreeIndir
4763 // The data to be stored (null for GT_BLK)
4768 void SetData(GenTree* dataNode)
4773 // The size of the buffer to be copied.
4774 unsigned Size() const
4781 // Return true iff the object being copied contains one or more GC pointers.
4784 // True if this BlkOpNode is a volatile memory operation.
4785 bool IsVolatile() const
4787 return (gtFlags & GTF_BLK_VOLATILE) != 0;
4790 // True if this BlkOpNode is an unaligned memory operation.
4791 bool IsUnaligned() const
4793 return (gtFlags & GTF_BLK_UNALIGNED) != 0;
4796 // Instruction selection: during codegen time, what code sequence we will be using
4797 // to encode this operation.
4806 bool gtBlkOpGcUnsafe;
4808 GenTreeBlk(genTreeOps oper, var_types type, GenTreePtr addr, unsigned size)
4809 : GenTreeIndir(oper, type, addr, nullptr)
4811 , gtBlkOpKind(BlkOpKindInvalid)
4812 , gtBlkOpGcUnsafe(false)
4814 assert(OperIsBlk(oper));
4815 gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT);
4818 GenTreeBlk(genTreeOps oper, var_types type, GenTreePtr addr, GenTreePtr data, unsigned size)
4819 : GenTreeIndir(oper, type, addr, data), gtBlkSize(size), gtBlkOpKind(BlkOpKindInvalid), gtBlkOpGcUnsafe(false)
4821 assert(OperIsBlk(oper));
4822 gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT);
4823 gtFlags |= (data->gtFlags & GTF_ALL_EFFECT);
4826 #if DEBUGGABLE_GENTREE
4829 GenTreeBlk() : GenTreeIndir()
4832 #endif // DEBUGGABLE_GENTREE
4835 // gtObj -- 'object' (GT_OBJ).
4837 // This node is used for block values that may have GC pointers.
4839 struct GenTreeObj : public GenTreeBlk
4841 CORINFO_CLASS_HANDLE gtClass; // the class of the object
4843 // If non-null, this array represents the gc-layout of the class.
4844 // This may be simply copied when cloning this node, because it is not changed once computed.
4847 // If non-zero, this is the number of slots in the class layout that
4848 // contain gc-pointers.
4849 __declspec(property(get = GetGcPtrCount)) unsigned gtGcPtrCount;
4850 unsigned GetGcPtrCount() const
4852 assert(_gtGcPtrCount != UINT32_MAX);
4853 return _gtGcPtrCount;
4855 unsigned _gtGcPtrCount;
4857 // If non-zero, the number of pointer-sized slots that constitutes the class token.
4860 bool IsGCInfoInitialized()
4862 return (_gtGcPtrCount != UINT32_MAX);
4865 void SetGCInfo(BYTE* gcPtrs, unsigned gcPtrCount, unsigned slots)
4868 _gtGcPtrCount = gcPtrCount;
4870 if (gtGcPtrCount != 0)
4872 // We assume that we cannot have a struct with GC pointers that is not a multiple
4873 // of the register size.
4874 // The EE currently does not allow this, but it could change.
4875 // Let's assert it just to be safe.
4876 noway_assert(roundUp(gtBlkSize, REGSIZE_BYTES) == gtBlkSize);
4880 genTreeOps newOper = GT_BLK;
4881 if (gtOper == GT_STORE_OBJ)
4883 newOper = GT_STORE_BLK;
4887 assert(gtOper == GT_OBJ);
4893 void CopyGCInfo(GenTreeObj* srcObj)
4895 if (srcObj->IsGCInfoInitialized())
4897 gtGcPtrs = srcObj->gtGcPtrs;
4898 _gtGcPtrCount = srcObj->gtGcPtrCount;
4899 gtSlots = srcObj->gtSlots;
4903 GenTreeObj(var_types type, GenTreePtr addr, CORINFO_CLASS_HANDLE cls, unsigned size)
4904 : GenTreeBlk(GT_OBJ, type, addr, size), gtClass(cls)
4906 // By default, an OBJ is assumed to be a global reference.
4907 gtFlags |= GTF_GLOB_REF;
4908 noway_assert(cls != NO_CLASS_HANDLE);
4909 _gtGcPtrCount = UINT32_MAX;
4912 GenTreeObj(var_types type, GenTreePtr addr, GenTreePtr data, CORINFO_CLASS_HANDLE cls, unsigned size)
4913 : GenTreeBlk(GT_STORE_OBJ, type, addr, data, size), gtClass(cls)
4915 // By default, an OBJ is assumed to be a global reference.
4916 gtFlags |= GTF_GLOB_REF;
4917 noway_assert(cls != NO_CLASS_HANDLE);
4918 _gtGcPtrCount = UINT32_MAX;
4921 #if DEBUGGABLE_GENTREE
4922 GenTreeObj() : GenTreeBlk()
4928 // gtDynBlk -- 'dynamic block' (GT_DYN_BLK).
4930 // This node is used for block values that have a dynamic size.
4931 // Note that such a value can never have GC pointers.
4933 struct GenTreeDynBlk : public GenTreeBlk
4936 GenTreePtr gtDynamicSize;
4937 bool gtEvalSizeFirst;
4939 GenTreeDynBlk(GenTreePtr addr, GenTreePtr dynamicSize)
4940 : GenTreeBlk(GT_DYN_BLK, TYP_STRUCT, addr, 0), gtDynamicSize(dynamicSize), gtEvalSizeFirst(false)
4942 // Conservatively the 'addr' could be null or point into the global heap.
4943 gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;
4944 gtFlags |= (dynamicSize->gtFlags & GTF_ALL_EFFECT);
4947 #if DEBUGGABLE_GENTREE
4950 GenTreeDynBlk() : GenTreeBlk()
4953 #endif // DEBUGGABLE_GENTREE
4956 // Read-modify-write status of a RMW memory op rooted at a storeInd
4959 STOREIND_RMW_STATUS_UNKNOWN, // RMW status of storeInd unknown
4960 // Default status unless modified by IsRMWMemOpRootedAtStoreInd()
4962 // One of these denote storeind is a RMW memory operation.
4963 STOREIND_RMW_DST_IS_OP1, // StoreInd is known to be a RMW memory op and dst candidate is op1
4964 STOREIND_RMW_DST_IS_OP2, // StoreInd is known to be a RMW memory op and dst candidate is op2
4966 // One of these denote the reason for storeind is marked as non-RMW operation
4967 STOREIND_RMW_UNSUPPORTED_ADDR, // Addr mode is not yet supported for RMW memory
4968 STOREIND_RMW_UNSUPPORTED_OPER, // Operation is not supported for RMW memory
4969 STOREIND_RMW_UNSUPPORTED_TYPE, // Type is not supported for RMW memory
4970 STOREIND_RMW_INDIR_UNEQUAL // Indir to read value is not equivalent to indir that writes the value
4973 // StoreInd is just a BinOp, with additional RMW status
4974 struct GenTreeStoreInd : public GenTreeIndir
4976 #if !CPU_LOAD_STORE_ARCH
4977 // The below flag is set and used during lowering
4978 RMWStatus gtRMWStatus;
4980 bool IsRMWStatusUnknown()
4982 return gtRMWStatus == STOREIND_RMW_STATUS_UNKNOWN;
4984 bool IsNonRMWMemoryOp()
4986 return gtRMWStatus == STOREIND_RMW_UNSUPPORTED_ADDR || gtRMWStatus == STOREIND_RMW_UNSUPPORTED_OPER ||
4987 gtRMWStatus == STOREIND_RMW_UNSUPPORTED_TYPE || gtRMWStatus == STOREIND_RMW_INDIR_UNEQUAL;
4989 bool IsRMWMemoryOp()
4991 return gtRMWStatus == STOREIND_RMW_DST_IS_OP1 || gtRMWStatus == STOREIND_RMW_DST_IS_OP2;
4995 return gtRMWStatus == STOREIND_RMW_DST_IS_OP1;
4999 return gtRMWStatus == STOREIND_RMW_DST_IS_OP2;
5001 #endif //! CPU_LOAD_STORE_ARCH
5003 RMWStatus GetRMWStatus()
5005 #if !CPU_LOAD_STORE_ARCH
5008 return STOREIND_RMW_STATUS_UNKNOWN;
5012 void SetRMWStatusDefault()
5014 #if !CPU_LOAD_STORE_ARCH
5015 gtRMWStatus = STOREIND_RMW_STATUS_UNKNOWN;
5019 void SetRMWStatus(RMWStatus status)
5021 #if !CPU_LOAD_STORE_ARCH
5022 gtRMWStatus = status;
5031 GenTreeStoreInd(var_types type, GenTree* destPtr, GenTree* data) : GenTreeIndir(GT_STOREIND, type, destPtr, data)
5033 SetRMWStatusDefault();
5036 #if DEBUGGABLE_GENTREE
5039 // Used only for GenTree::GetVtableForOper()
5040 GenTreeStoreInd() : GenTreeIndir()
5042 SetRMWStatusDefault();
5047 /* gtRetExp -- Place holder for the return expression from an inline candidate (GT_RET_EXPR) */
5049 struct GenTreeRetExpr : public GenTree
5051 GenTree* gtInlineCandidate;
5053 CORINFO_CLASS_HANDLE gtRetClsHnd;
5055 GenTreeRetExpr(var_types type) : GenTree(GT_RET_EXPR, type)
5058 #if DEBUGGABLE_GENTREE
5059 GenTreeRetExpr() : GenTree()
5065 /* gtStmt -- 'statement expr' (GT_STMT) */
5067 class InlineContext;
5069 struct GenTreeStmt : public GenTree
5071 GenTreePtr gtStmtExpr; // root of the expression tree
5072 GenTreePtr gtStmtList; // first node (for forward walks)
5073 InlineContext* gtInlineContext; // The inline context for this statement.
5074 IL_OFFSETX gtStmtILoffsx; // instr offset (if available)
5077 IL_OFFSET gtStmtLastILoffs; // instr offset at end of stmt
5080 __declspec(property(get = getNextStmt)) GenTreeStmt* gtNextStmt;
5082 __declspec(property(get = getPrevStmt)) GenTreeStmt* gtPrevStmt;
5084 GenTreeStmt* getNextStmt()
5086 if (gtNext == nullptr)
5092 return gtNext->AsStmt();
5096 GenTreeStmt* getPrevStmt()
5098 if (gtPrev == nullptr)
5104 return gtPrev->AsStmt();
5108 GenTreeStmt(GenTreePtr expr, IL_OFFSETX offset)
5109 : GenTree(GT_STMT, TYP_VOID)
5111 , gtStmtList(nullptr)
5112 , gtInlineContext(nullptr)
5113 , gtStmtILoffsx(offset)
5115 , gtStmtLastILoffs(BAD_IL_OFFSET)
5118 // Statements can't have statements as part of their expression tree.
5119 assert(expr->gtOper != GT_STMT);
5121 // Set the statement to have the same costs as the top node of the tree.
5122 // This is used long before costs have been assigned, so we need to copy
5127 #if DEBUGGABLE_GENTREE
5128 GenTreeStmt() : GenTree(GT_STMT, TYP_VOID)
5134 /* NOTE: Any tree nodes that are larger than 8 bytes (two ints or
5135 pointers) must be flagged as 'large' in GenTree::InitNodeSize().
5138 /* gtClsVar -- 'static data member' (GT_CLS_VAR) */
5140 struct GenTreeClsVar : public GenTree
5142 CORINFO_FIELD_HANDLE gtClsVarHnd;
5143 FieldSeqNode* gtFieldSeq;
5145 GenTreeClsVar(var_types type, CORINFO_FIELD_HANDLE clsVarHnd, FieldSeqNode* fldSeq)
5146 : GenTree(GT_CLS_VAR, type), gtClsVarHnd(clsVarHnd), gtFieldSeq(fldSeq)
5148 gtFlags |= GTF_GLOB_REF;
5150 #if DEBUGGABLE_GENTREE
5151 GenTreeClsVar() : GenTree()
5157 /* gtArgPlace -- 'register argument placeholder' (GT_ARGPLACE) */
5159 struct GenTreeArgPlace : public GenTree
5161 CORINFO_CLASS_HANDLE gtArgPlaceClsHnd; // Needed when we have a TYP_STRUCT argument
5163 GenTreeArgPlace(var_types type, CORINFO_CLASS_HANDLE clsHnd) : GenTree(GT_ARGPLACE, type), gtArgPlaceClsHnd(clsHnd)
5166 #if DEBUGGABLE_GENTREE
5167 GenTreeArgPlace() : GenTree()
5173 /* gtLabel -- code label target (GT_LABEL) */
5175 struct GenTreeLabel : public GenTree
5177 BasicBlock* gtLabBB;
5179 GenTreeLabel(BasicBlock* bb) : GenTree(GT_LABEL, TYP_VOID), gtLabBB(bb)
5182 #if DEBUGGABLE_GENTREE
5183 GenTreeLabel() : GenTree()
5189 /* gtPhiArg -- phi node rhs argument, var = phi(phiarg, phiarg, phiarg...); GT_PHI_ARG */
5190 struct GenTreePhiArg : public GenTreeLclVarCommon
5192 BasicBlock* gtPredBB;
5194 GenTreePhiArg(var_types type, unsigned lclNum, unsigned snum, BasicBlock* block)
5195 : GenTreeLclVarCommon(GT_PHI_ARG, type, lclNum), gtPredBB(block)
5200 #if DEBUGGABLE_GENTREE
5201 GenTreePhiArg() : GenTreeLclVarCommon()
5207 /* gtPutArgStk -- Argument passed on stack (GT_PUTARG_STK) */
5209 struct GenTreePutArgStk : public GenTreeUnOp
5211 unsigned gtSlotNum; // Slot number of the argument to be passed on stack
5212 #if defined(UNIX_X86_ABI)
5213 unsigned gtPadAlign; // Number of padding slots for stack alignment
5216 // Don't let clang-format mess with the GenTreePutArgStk constructor.
5219 GenTreePutArgStk(genTreeOps oper,
5223 PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
5224 bool putInIncomingArgArea = false,
5225 GenTreeCall* callNode = nullptr)
5226 : GenTreeUnOp(oper, type, op1 DEBUGARG(/*largeNode*/ false))
5227 , gtSlotNum(slotNum)
5228 #if defined(UNIX_X86_ABI)
5231 #if FEATURE_FASTTAILCALL
5232 , gtPutInIncomingArgArea(putInIncomingArgArea)
5233 #endif // FEATURE_FASTTAILCALL
5234 #ifdef FEATURE_PUT_STRUCT_ARG_STK
5235 , gtPutArgStkKind(Kind::Invalid)
5236 , gtNumSlots(numSlots)
5237 , gtNumberReferenceSlots(0)
5239 #endif // FEATURE_PUT_STRUCT_ARG_STK
5240 #if defined(DEBUG) || defined(UNIX_X86_ABI)
5248 #if FEATURE_FASTTAILCALL
5250 bool gtPutInIncomingArgArea; // Whether this arg needs to be placed in incoming arg area.
5251 // By default this is false and will be placed in out-going arg area.
5252 // Fast tail calls set this to true.
5253 // In future if we need to add more such bool fields consider bit fields.
5255 bool putInIncomingArgArea() const
5257 return gtPutInIncomingArgArea;
5260 #else // !FEATURE_FASTTAILCALL
5262 bool putInIncomingArgArea() const
5267 #endif // !FEATURE_FASTTAILCALL
5269 unsigned getArgOffset()
5271 return gtSlotNum * TARGET_POINTER_SIZE;
5274 #if defined(UNIX_X86_ABI)
5275 unsigned getArgPadding()
5280 void setArgPadding(unsigned padAlign)
5282 gtPadAlign = padAlign;
5286 #ifdef FEATURE_PUT_STRUCT_ARG_STK
5288 unsigned getArgSize()
5290 return gtNumSlots * TARGET_POINTER_SIZE;
5293 // Return true if this is a PutArgStk of a SIMD12 struct.
5294 // This is needed because such values are re-typed to SIMD16, and the type of PutArgStk is VOID.
5297 return (varTypeIsSIMD(gtOp1) && (gtNumSlots == 3));
5300 //------------------------------------------------------------------------
5301 // setGcPointers: Sets the number of references and the layout of the struct object returned by the VM.
5304 // numPointers - Number of pointer references.
5305 // pointers - layout of the struct (with pointers marked.)
5311 // This data is used in the codegen for GT_PUTARG_STK to decide how to copy the struct to the stack by value.
5312 // If no pointer references are used, block copying instructions are used.
5313 // Otherwise the pointer reference slots are copied atomically in a way that gcinfo is emitted.
5314 // Any non pointer references between the pointer reference slots are copied in block fashion.
5316 void setGcPointers(unsigned numPointers, BYTE* pointers)
5318 gtNumberReferenceSlots = numPointers;
5319 gtGcPtrs = pointers;
5322 // Instruction selection: during codegen time, what code sequence we will be using
5323 // to encode this operation.
5324 // TODO-Throughput: The following information should be obtained from the child
5327 enum class Kind : __int8{
5328 Invalid, RepInstr, Unroll, Push, PushAllSlots,
5331 Kind gtPutArgStkKind;
5334 return (gtPutArgStkKind == Kind::Push) || (gtPutArgStkKind == Kind::PushAllSlots);
5337 unsigned gtNumSlots; // Number of slots for the argument to be passed on stack
5338 unsigned gtNumberReferenceSlots; // Number of reference slots.
5339 BYTE* gtGcPtrs; // gcPointers
5341 #elif !defined(LEGACY_BACKEND)
5342 unsigned getArgSize();
5343 #endif // !LEGACY_BACKEND
5345 #if defined(DEBUG) || defined(UNIX_X86_ABI)
5346 GenTreeCall* gtCall; // the call node to which this argument belongs
5349 #if DEBUGGABLE_GENTREE
5350 GenTreePutArgStk() : GenTreeUnOp()
5356 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
5357 // Represent the struct argument: split value in register(s) and stack
5358 struct GenTreePutArgSplit : public GenTreePutArgStk
5362 GenTreePutArgSplit(GenTreePtr op1,
5363 unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
5365 bool putIncomingArgArea = false,
5366 GenTreeCall* callNode = nullptr)
5367 : GenTreePutArgStk(GT_PUTARG_SPLIT,
5370 slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(numSlots),
5373 , gtNumRegs(numRegs)
5376 ClearOtherRegFlags();
5379 // Type required to support multi-reg struct arg.
5380 var_types m_regType[MAX_REG_ARG];
5382 // First reg of struct is always given by gtRegNum.
5383 // gtOtherRegs holds the other reg numbers of struct.
5384 regNumberSmall gtOtherRegs[MAX_REG_ARG - 1];
5386 // GTF_SPILL or GTF_SPILLED flag on a multi-reg struct node indicates that one or
5387 // more of its result regs are in that state. The spill flag of each of the
5388 // return register is stored here. We only need 2 bits per register,
5389 // so this is treated as a 2-bit array.
5390 static const unsigned PACKED_GTF_SPILL = 1;
5391 static const unsigned PACKED_GTF_SPILLED = 2;
5392 unsigned char gtSpillFlags;
5394 //---------------------------------------------------------------------------
5395 // GetRegNumByIdx: get ith register allocated to this struct argument.
5398 // idx - index of the struct
5401 // Return regNumber of ith register of this struct argument
5403 regNumber GetRegNumByIdx(unsigned idx) const
5405 assert(idx < MAX_REG_ARG);
5412 return (regNumber)gtOtherRegs[idx - 1];
5415 //----------------------------------------------------------------------
5416 // SetRegNumByIdx: set ith register of this struct argument
5420 // idx - index of the struct
5425 void SetRegNumByIdx(regNumber reg, unsigned idx)
5427 assert(idx < MAX_REG_ARG);
5434 gtOtherRegs[idx - 1] = (regNumberSmall)reg;
5435 assert(gtOtherRegs[idx - 1] == reg);
5439 //----------------------------------------------------------------------------
5440 // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated
5448 void ClearOtherRegs()
5450 for (unsigned i = 0; i < MAX_REG_ARG - 1; ++i)
5452 gtOtherRegs[i] = REG_NA;
5456 //----------------------------------------------------------------------
5457 // GetRegSpillFlagByIdx: get spill flag associated with the register
5458 // specified by its index.
5461 // idx - Position or index of the register
5464 // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
5466 unsigned GetRegSpillFlagByIdx(unsigned idx) const
5468 assert(idx < MAX_REG_ARG);
5470 unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
5471 unsigned spillFlags = 0;
5472 if (bits & PACKED_GTF_SPILL)
5474 spillFlags |= GTF_SPILL;
5476 if (bits & PACKED_GTF_SPILLED)
5478 spillFlags |= GTF_SPILLED;
5484 //----------------------------------------------------------------------
5485 // SetRegSpillFlagByIdx: set spill flags for the register
5486 // specified by its index.
5489 // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed.
5490 // idx - Position or index of the register
5495 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
5497 assert(idx < MAX_REG_ARG);
5500 if (flags & GTF_SPILL)
5502 bits |= PACKED_GTF_SPILL;
5504 if (flags & GTF_SPILLED)
5506 bits |= PACKED_GTF_SPILLED;
5509 const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED;
5511 // Clear anything that was already there by masking out the bits before 'or'ing in what we want there.
5512 gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2)));
5515 //--------------------------------------------------------------------------
5516 // GetRegType: Get var_type of the register specified by index.
5519 // index - Index of the register.
5520 // First register will have an index 0 and so on.
5523 // var_type of the register specified by its index.
5525 var_types GetRegType(unsigned index)
5527 assert(index < gtNumRegs);
5528 var_types result = m_regType[index];
5532 //-------------------------------------------------------------------
5533 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
5541 void ClearOtherRegFlags()
5546 #ifdef FEATURE_PUT_STRUCT_ARG_STK
5547 unsigned getArgSize()
5549 return (gtNumSlots + gtNumRegs) * TARGET_POINTER_SIZE;
5551 #endif // FEATURE_PUT_STRUCT_ARG_STK
5553 #if DEBUGGABLE_GENTREE
5554 GenTreePutArgSplit() : GenTreePutArgStk()
5559 #endif // !LEGACY_BACKEND && _TARGET_ARM_
5561 // Represents GT_COPY or GT_RELOAD node
5562 struct GenTreeCopyOrReload : public GenTreeUnOp
5564 #if FEATURE_MULTIREG_RET
5565 // State required to support copy/reload of a multi-reg call node.
5566 // First register is is always given by gtRegNum.
5568 regNumberSmall gtOtherRegs[MAX_RET_REG_COUNT - 1];
5571 //----------------------------------------------------------
5572 // ClearOtherRegs: set gtOtherRegs to REG_NA.
5580 void ClearOtherRegs()
5582 #if FEATURE_MULTIREG_RET
5583 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
5585 gtOtherRegs[i] = REG_NA;
5590 //-----------------------------------------------------------
5591 // GetRegNumByIdx: Get regNumber of ith position.
5594 // idx - register position.
5597 // Returns regNumber assigned to ith position.
5599 regNumber GetRegNumByIdx(unsigned idx) const
5601 assert(idx < MAX_RET_REG_COUNT);
5608 #if FEATURE_MULTIREG_RET
5609 return (regNumber)gtOtherRegs[idx - 1];
5615 //-----------------------------------------------------------
5616 // SetRegNumByIdx: Set the regNumber for ith position.
5620 // idx - register position.
5625 void SetRegNumByIdx(regNumber reg, unsigned idx)
5627 assert(idx < MAX_RET_REG_COUNT);
5633 #if FEATURE_MULTIREG_RET
5636 gtOtherRegs[idx - 1] = (regNumberSmall)reg;
5637 assert(gtOtherRegs[idx - 1] == reg);
5647 //----------------------------------------------------------------------------
5648 // CopyOtherRegs: copy multi-reg state from the given copy/reload node to this
5652 // from - GenTree node from which to copy multi-reg state
5657 // TODO-ARM: Implement this routine for Arm64 and Arm32
5658 // TODO-X86: Implement this routine for x86
5659 void CopyOtherRegs(GenTreeCopyOrReload* from)
5661 assert(OperGet() == from->OperGet());
5663 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
5664 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
5666 gtOtherRegs[i] = from->gtOtherRegs[i];
5671 GenTreeCopyOrReload(genTreeOps oper, var_types type, GenTree* op1) : GenTreeUnOp(oper, type, op1)
5677 #if DEBUGGABLE_GENTREE
5678 GenTreeCopyOrReload() : GenTreeUnOp()
5684 // Represents GT_ALLOCOBJ node
5686 struct GenTreeAllocObj final : public GenTreeUnOp
5688 unsigned int gtNewHelper; // Value returned by ICorJitInfo::getNewHelper
5689 CORINFO_CLASS_HANDLE gtAllocObjClsHnd;
5691 GenTreeAllocObj(var_types type, unsigned int helper, CORINFO_CLASS_HANDLE clsHnd, GenTreePtr op)
5692 : GenTreeUnOp(GT_ALLOCOBJ, type, op DEBUGARG(/*largeNode*/ TRUE))
5693 , // This node in most cases will be changed to a call node
5695 , gtAllocObjClsHnd(clsHnd)
5698 #if DEBUGGABLE_GENTREE
5699 GenTreeAllocObj() : GenTreeUnOp()
5705 // Represents GT_RUNTIMELOOKUP node
5707 struct GenTreeRuntimeLookup final : public GenTreeUnOp
5709 CORINFO_GENERIC_HANDLE gtHnd;
5710 CorInfoGenericHandleType gtHndType;
5712 GenTreeRuntimeLookup(CORINFO_GENERIC_HANDLE hnd, CorInfoGenericHandleType hndTyp, GenTree* tree)
5713 : GenTreeUnOp(GT_RUNTIMELOOKUP, tree->gtType, tree DEBUGARG(/*largeNode*/ FALSE)), gtHnd(hnd), gtHndType(hndTyp)
5715 assert(hnd != nullptr);
5717 #if DEBUGGABLE_GENTREE
5718 GenTreeRuntimeLookup() : GenTreeUnOp()
5723 // Return reference to the actual tree that does the lookup
5729 bool IsClassHandle() const
5731 return gtHndType == CORINFO_HANDLETYPE_CLASS;
5733 bool IsMethodHandle() const
5735 return gtHndType == CORINFO_HANDLETYPE_METHOD;
5737 bool IsFieldHandle() const
5739 return gtHndType == CORINFO_HANDLETYPE_FIELD;
5742 // Note these operations describe the handle that is input to the
5743 // lookup, not the handle produced by the lookup.
5744 CORINFO_CLASS_HANDLE GetClassHandle() const
5746 assert(IsClassHandle());
5747 return (CORINFO_CLASS_HANDLE)gtHnd;
5749 CORINFO_METHOD_HANDLE GetMethodHandle() const
5751 assert(IsMethodHandle());
5752 return (CORINFO_METHOD_HANDLE)gtHnd;
5754 CORINFO_FIELD_HANDLE GetFieldHandle() const
5756 assert(IsMethodHandle());
5757 return (CORINFO_FIELD_HANDLE)gtHnd;
5761 // Represents a GT_JCC or GT_SETCC node.
5763 struct GenTreeCC final : public GenTree
5765 genTreeOps gtCondition; // any relop
5767 GenTreeCC(genTreeOps oper, genTreeOps condition, var_types type = TYP_VOID)
5768 : GenTree(oper, type DEBUGARG(/*largeNode*/ FALSE)), gtCondition(condition)
5770 assert(OperIs(GT_JCC, GT_SETCC));
5771 assert(OperIsCompare(condition));
5774 #if DEBUGGABLE_GENTREE
5775 GenTreeCC() : GenTree()
5778 #endif // DEBUGGABLE_GENTREE
5781 //------------------------------------------------------------------------
5782 // Deferred inline functions of GenTree -- these need the subtypes above to
5783 // be defined already.
5784 //------------------------------------------------------------------------
5786 inline bool GenTree::OperIsBlkOp()
5788 return (((gtOper == GT_ASG) && varTypeIsStruct(gtOp.gtOp1))
5789 #ifndef LEGACY_BACKEND
5790 || (OperIsBlk() && (AsBlk()->Data() != nullptr))
5795 inline bool GenTree::OperIsDynBlkOp()
5797 if (gtOper == GT_ASG)
5799 return gtGetOp1()->OperGet() == GT_DYN_BLK;
5801 #ifndef LEGACY_BACKEND
5802 else if (gtOper == GT_STORE_DYN_BLK)
5810 inline bool GenTree::OperIsInitBlkOp()
5816 #ifndef LEGACY_BACKEND
5818 if (gtOper == GT_ASG)
5824 src = AsBlk()->Data()->gtSkipReloadOrCopy();
5826 #else // LEGACY_BACKEND
5827 GenTree* src = gtGetOp2();
5828 #endif // LEGACY_BACKEND
5829 return src->OperIsInitVal() || src->OperIsConst();
5832 inline bool GenTree::OperIsCopyBlkOp()
5834 return OperIsBlkOp() && !OperIsInitBlkOp();
5837 //------------------------------------------------------------------------
5838 // IsFPZero: Checks whether this is a floating point constant with value 0.0
5841 // Returns true iff the tree is an GT_CNS_DBL, with value of 0.0.
5843 inline bool GenTree::IsFPZero()
5845 if ((gtOper == GT_CNS_DBL) && (gtDblCon.gtDconVal == 0.0))
5852 //------------------------------------------------------------------------
5853 // IsIntegralConst: Checks whether this is a constant node with the given value
5856 // constVal - the value of interest
5859 // Returns true iff the tree is an integral constant opcode, with
5863 // Like gtIconVal, the argument is of ssize_t, so cannot check for
5864 // long constants in a target-independent way.
5866 inline bool GenTree::IsIntegralConst(ssize_t constVal)
5869 if ((gtOper == GT_CNS_INT) && (gtIntConCommon.IconValue() == constVal))
5874 if ((gtOper == GT_CNS_LNG) && (gtIntConCommon.LngValue() == constVal))
5882 //-------------------------------------------------------------------
5883 // IsIntegralConstVector: returns true if this this is a SIMD vector
5884 // with all its elements equal to an integral constant.
5887 // constVal - const value of vector element
5890 // True if this represents an integral const SIMD vector.
5892 inline bool GenTree::IsIntegralConstVector(ssize_t constVal)
5895 // SIMDIntrinsicInit intrinsic with a const value as initializer
5896 // represents a const vector.
5897 if ((gtOper == GT_SIMD) && (gtSIMD.gtSIMDIntrinsicID == SIMDIntrinsicInit) && gtGetOp1()->IsIntegralConst(constVal))
5899 assert(varTypeIsIntegral(gtSIMD.gtSIMDBaseType));
5900 assert(gtGetOp2IfPresent() == nullptr);
5908 inline bool GenTree::IsBoxedValue()
5910 assert(gtOper != GT_BOX || gtBox.BoxOp() != nullptr);
5911 return (gtOper == GT_BOX) && (gtFlags & GTF_BOX_VALUE);
5914 inline bool GenTree::IsSIMDEqualityOrInequality() const
5917 if (gtOper == GT_SIMD)
5919 SIMDIntrinsicID id = AsSIMD()->gtSIMDIntrinsicID;
5920 return (id == SIMDIntrinsicOpEquality) || (id == SIMDIntrinsicOpInEquality);
5927 inline GenTreePtr GenTree::MoveNext()
5929 assert(OperIsAnyList());
5934 //------------------------------------------------------------------------
5935 // IsValidCallArgument: Given an GenTree node that represents an argument
5936 // enforce (or don't enforce) the following invariant.
5939 // instance method for a GenTree node
5942 // true: the GenTree node is accepted as a valid argument
5943 // false: the GenTree node is not accepted as a valid argumeny
5946 // For targets that don't support arguments as a list of fields, we do not support GT_FIELD_LIST.
5948 // Currently for AMD64 UNIX we allow a limited case where a GT_FIELD_LIST is
5949 // allowed but every element must be a GT_LCL_FLD.
5951 // For the future targets that allow for Multireg args (and this includes the current ARM64 target),
5952 // or that allow for passing promoted structs, we allow a GT_FIELD_LIST of arbitrary nodes.
5953 // These would typically start out as GT_LCL_VARs or GT_LCL_FLDS or GT_INDs,
5954 // but could be changed into constants or GT_COMMA trees by the later
5955 // optimization phases.
5957 inline bool GenTree::IsValidCallArgument()
5961 // GT_FIELD_LIST is the only list allowed.
5964 if (OperIsFieldList())
5966 #if defined(LEGACY_BACKEND) || (!FEATURE_MULTIREG_ARGS && !FEATURE_PUT_STRUCT_ARG_STK)
5967 // Not allowed to have a GT_FIELD_LIST for an argument
5968 // unless we have a RyuJIT backend and FEATURE_MULTIREG_ARGS or FEATURE_PUT_STRUCT_ARG_STK
5972 #else // we have RyuJIT backend and FEATURE_MULTIREG_ARGS or FEATURE_PUT_STRUCT_ARG_STK
5974 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
5975 // For UNIX ABI we currently only allow a GT_FIELD_LIST of GT_LCL_FLDs nodes
5976 GenTree* gtListPtr = this;
5977 while (gtListPtr != nullptr)
5979 // ToDo: fix UNIX_AMD64 so that we do not generate this kind of a List
5980 // Note the list as currently created is malformed, as the last entry is a nullptr
5981 if (gtListPtr->Current() == nullptr)
5986 // Only a list of GT_LCL_FLDs is allowed
5987 if (gtListPtr->Current()->OperGet() != GT_LCL_FLD)
5991 gtListPtr = gtListPtr->MoveNext();
5993 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
5995 // Note that for non-UNIX ABI the GT_FIELD_LIST may contain any node
5997 // We allow this GT_FIELD_LIST as an argument
6000 #endif // FEATURE_MULTIREG_ARGS
6002 // We don't have either kind of list, so it satisfies the invariant.
6007 inline GenTreePtr GenTree::Current()
6009 assert(OperIsAnyList());
6013 inline GenTreePtr* GenTree::pCurrent()
6015 assert(OperIsAnyList());
6016 return &(gtOp.gtOp1);
6019 inline GenTree* GenTree::gtGetOp1() const
6021 return AsOp()->gtOp1;
6026 inline bool GenTree::RequiresNonNullOp2(genTreeOps oper)
6047 #ifdef LEGACY_BACKEND
6079 inline GenTree* GenTree::gtGetOp2() const
6081 assert(OperIsBinary());
6083 GenTree* op2 = AsOp()->gtOp2;
6085 // Only allow null op2 if the node type allows it, e.g. GT_LIST.
6086 assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper));
6091 inline GenTree* GenTree::gtGetOp2IfPresent() const
6093 /* gtOp.gtOp2 is only valid for GTK_BINOP nodes. */
6095 GenTree* op2 = OperIsBinary() ? AsOp()->gtOp2 : nullptr;
6097 // This documents the genTreeOps for which gtOp.gtOp2 cannot be nullptr.
6098 // This helps prefix in its analysis of code which calls gtGetOp2()
6100 assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper));
6105 inline GenTreePtr GenTree::gtEffectiveVal(bool commaOnly)
6107 GenTree* effectiveVal = this;
6110 if (effectiveVal->gtOper == GT_COMMA)
6112 effectiveVal = effectiveVal->gtOp.gtOp2;
6114 else if (!commaOnly && (effectiveVal->gtOper == GT_NOP) && (effectiveVal->gtOp.gtOp1 != nullptr))
6116 effectiveVal = effectiveVal->gtOp.gtOp1;
6120 return effectiveVal;
6125 //-------------------------------------------------------------------------
6126 // gtRetExprVal - walk back through GT_RET_EXPRs
6129 // tree representing return value from a successful inline,
6130 // or original call for failed or yet to be determined inline.
6133 // Multi-level inlines can form chains of GT_RET_EXPRs.
6134 // This method walks back to the root of the chain.
6136 inline GenTree* GenTree::gtRetExprVal()
6138 GenTree* retExprVal = this;
6141 if (retExprVal->gtOper == GT_RET_EXPR)
6143 retExprVal = retExprVal->gtRetExpr.gtInlineCandidate;
6152 inline GenTree* GenTree::gtSkipReloadOrCopy()
6154 // There can be only one reload or copy (we can't have a reload/copy of a reload/copy)
6155 if (gtOper == GT_RELOAD || gtOper == GT_COPY)
6157 assert(gtGetOp1()->OperGet() != GT_RELOAD && gtGetOp1()->OperGet() != GT_COPY);
6163 //-----------------------------------------------------------------------------------
6164 // IsMultiRegCall: whether a call node returning its value in more than one register
6170 // Returns true if this GenTree is a multi register returning call
6171 inline bool GenTree::IsMultiRegCall() const
6175 // We cannot use AsCall() as it is not declared const
6176 const GenTreeCall* call = reinterpret_cast<const GenTreeCall*>(this);
6177 return call->HasMultiRegRetVal();
6183 //-----------------------------------------------------------------------------------
6184 // IsMultiRegNode: whether a node returning its value in more than one register
6190 // Returns true if this GenTree is a multi-reg node.
6191 inline bool GenTree::IsMultiRegNode() const
6193 if (IsMultiRegCall())
6198 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
6199 if (OperIsMultiRegOp() || OperIsPutArgSplit() || (gtOper == GT_COPY))
6208 //-------------------------------------------------------------------------
6209 // IsCopyOrReload: whether this is a GT_COPY or GT_RELOAD node.
6215 // Returns true if this GenTree is a copy or reload node.
6216 inline bool GenTree::IsCopyOrReload() const
6218 return (gtOper == GT_COPY || gtOper == GT_RELOAD);
6221 //-----------------------------------------------------------------------------------
6222 // IsCopyOrReloadOfMultiRegCall: whether this is a GT_COPY or GT_RELOAD of a multi-reg
6229 // Returns true if this GenTree is a copy or reload of multi-reg call node.
6230 inline bool GenTree::IsCopyOrReloadOfMultiRegCall() const
6232 if (IsCopyOrReload())
6234 return gtGetOp1()->IsMultiRegCall();
6240 inline bool GenTree::IsCnsIntOrI() const
6242 return (gtOper == GT_CNS_INT);
6245 inline bool GenTree::IsIntegralConst() const
6247 #ifdef _TARGET_64BIT_
6248 return IsCnsIntOrI();
6249 #else // !_TARGET_64BIT_
6250 return ((gtOper == GT_CNS_INT) || (gtOper == GT_CNS_LNG));
6251 #endif // !_TARGET_64BIT_
6254 // Is this node an integer constant that fits in a 32-bit signed integer (INT32)
6255 inline bool GenTree::IsIntCnsFitsInI32()
6257 #ifdef _TARGET_64BIT_
6258 return IsCnsIntOrI() && AsIntCon()->FitsInI32();
6259 #else // !_TARGET_64BIT_
6260 return IsCnsIntOrI();
6261 #endif // !_TARGET_64BIT_
6264 inline bool GenTree::IsCnsFltOrDbl() const
6266 return OperGet() == GT_CNS_DBL;
6269 inline bool GenTree::IsCnsNonZeroFltOrDbl()
6271 if (OperGet() == GT_CNS_DBL)
6273 double constValue = gtDblCon.gtDconVal;
6274 return *(__int64*)&constValue != 0;
6280 inline bool GenTree::IsHelperCall()
6282 return OperGet() == GT_CALL && gtCall.gtCallType == CT_HELPER;
6285 inline var_types GenTree::CastFromType()
6287 return this->gtCast.CastOp()->TypeGet();
6289 inline var_types& GenTree::CastToType()
6291 return this->gtCast.gtCastType;
6294 //-----------------------------------------------------------------------------------
6295 // HasGCPtr: determine whether this block op involves GC pointers
6301 // Returns true iff the object being copied contains one or more GC pointers.
6304 // Of the block nodes, only GT_OBJ and ST_STORE_OBJ are allowed to have GC pointers.
6306 inline bool GenTreeBlk::HasGCPtr()
6308 if ((gtOper == GT_OBJ) || (gtOper == GT_STORE_OBJ))
6310 return (AsObj()->gtGcPtrCount != 0);
6315 inline bool GenTree::isUsedFromSpillTemp() const
6317 #if !defined(LEGACY_BACKEND)
6318 // If spilled and no reg at use, then it is used from the spill temp location rather than being reloaded.
6319 if (((gtFlags & GTF_SPILLED) != 0) && ((gtFlags & GTF_NOREG_AT_USE) != 0))
6323 #endif //! LEGACY_BACKEND
6328 /*****************************************************************************/
6330 #ifndef _HOST_64BIT_
6331 #include <poppack.h>
6334 /*****************************************************************************/
6336 #if SMALL_TREE_NODES
6338 // In debug, on some platforms (e.g., when LATE_DISASM is defined), GenTreeIntCon is bigger than GenTreeLclFld.
6339 const size_t TREE_NODE_SZ_SMALL = max(sizeof(GenTreeIntCon), sizeof(GenTreeLclFld));
6341 #endif // SMALL_TREE_NODES
6343 const size_t TREE_NODE_SZ_LARGE = sizeof(GenTreeCall);
6345 /*****************************************************************************
6346 * Types returned by GenTree::lvaLclVarRefs()
6351 VR_INVARIANT = 0x00, // an invariant value
6353 VR_IND_REF = 0x01, // an object reference
6354 VR_IND_SCL = 0x02, // a non-object reference
6355 VR_GLB_VAR = 0x04, // a global (clsVar)
6357 // Add a temp define to avoid merge conflict.
6358 #define VR_IND_PTR VR_IND_REF
6360 /*****************************************************************************/
6361 #endif // !GENTREE_H
6362 /*****************************************************************************/