1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
10 XX This is the node in the semantic tree graph. It represents the operation XX
11 XX corresponding to the node, and other information during code-gen. XX
13 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
17 /*****************************************************************************/
20 /*****************************************************************************/
22 #include "vartype.h" // For "var_types"
23 #include "target.h" // For "regNumber"
24 #include "ssaconfig.h" // For "SsaConfig::RESERVED_SSA_NUM"
26 #include "valuenumtype.h"
28 #include "jithashtable.h"
31 #include "namedintrinsiclist.h"
33 // Debugging GenTree is much easier if we add a magic virtual function to make the debugger able to figure out what type
34 // it's got. This is enabled by default in DEBUG. To enable it in RET builds (temporarily!), you need to change the
35 // build to define DEBUGGABLE_GENTREE=1, as well as pass /OPT:NOICF to the linker (or else all the vtables get merged,
36 // making the debugging value supplied by them useless). See protojit.nativeproj for a commented example of setting the
37 // build flags correctly.
38 #ifndef DEBUGGABLE_GENTREE
40 #define DEBUGGABLE_GENTREE 1
42 #define DEBUGGABLE_GENTREE 0
44 #endif // !DEBUGGABLE_GENTREE
46 // The SpecialCodeKind enum is used to indicate the type of special (unique)
47 // target block that will be targeted by an instruction.
49 // GenTreeBoundsChk nodes (SCK_RNGCHK_FAIL, SCK_ARG_EXCPN, SCK_ARG_RNG_EXCPN)
50 // - these nodes have a field (gtThrowKind) to indicate which kind
51 // GenTreeOps nodes, for which codegen will generate the branch
52 // - it will use the appropriate kind based on the opcode, though it's not
53 // clear why SCK_OVERFLOW == SCK_ARITH_EXCPN
54 // SCK_PAUSE_EXEC is not currently used.
59 SCK_RNGCHK_FAIL, // target when range check fails
60 SCK_PAUSE_EXEC, // target to stop (e.g. to allow GC)
61 SCK_DIV_BY_ZERO, // target for divide by zero (Not used on X86/X64)
62 SCK_ARITH_EXCPN, // target on arithmetic exception
63 SCK_OVERFLOW = SCK_ARITH_EXCPN, // target on overflow
64 SCK_ARG_EXCPN, // target on ArgumentException (currently used only for SIMD intrinsics)
65 SCK_ARG_RNG_EXCPN, // target on ArgumentOutOfRangeException (currently used only for SIMD intrinsics)
69 /*****************************************************************************/
71 enum genTreeOps : BYTE
73 #define GTNODE(en, st, cm, ok) GT_##en,
79 // GT_CNS_NATIVELONG is the gtOper symbol for GT_CNS_LNG or GT_CNS_INT, depending on the target.
80 // For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
81 GT_CNS_NATIVELONG = GT_CNS_INT,
83 // For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
84 // In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
85 GT_CNS_NATIVELONG = GT_CNS_LNG,
89 /*****************************************************************************
91 * The following enum defines a set of bit flags that can be used
92 * to classify expression tree nodes. Note that some operators will
93 * have more than one bit set, as follows:
95 * GTK_CONST implies GTK_LEAF
96 * GTK_RELOP implies GTK_BINOP
97 * GTK_LOGOP implies GTK_BINOP
102 GTK_SPECIAL = 0x0000, // unclassified operator (special handling reqd)
104 GTK_CONST = 0x0001, // constant operator
105 GTK_LEAF = 0x0002, // leaf operator
106 GTK_UNOP = 0x0004, // unary operator
107 GTK_BINOP = 0x0008, // binary operator
108 GTK_RELOP = 0x0010, // comparison operator
109 GTK_LOGOP = 0x0020, // logical operator
110 #ifdef LEGACY_BACKEND
111 GTK_ASGOP = 0x0040, // assignment operator
114 GTK_KINDMASK = 0x007F, // operator kind mask
116 GTK_COMMUTE = 0x0080, // commutative operator
118 GTK_EXOP = 0x0100, // Indicates that an oper for a node type that extends GenTreeOp (or GenTreeUnOp)
119 // by adding non-node fields to unary or binary operator.
121 GTK_LOCAL = 0x0200, // is a local access (load, store, phi)
123 GTK_NOVALUE = 0x0400, // node does not produce a value
124 GTK_NOTLIR = 0x0800, // node is not allowed in LIR
126 GTK_NOCONTAIN = 0x1000, // this node is a value, but may not be contained
128 /* Define composite value(s) */
130 GTK_SMPOP = (GTK_UNOP | GTK_BINOP | GTK_RELOP | GTK_LOGOP)
133 /*****************************************************************************/
135 #define SMALL_TREE_NODES 1
137 /*****************************************************************************/
139 enum gtCallTypes : BYTE
141 CT_USER_FUNC, // User function
142 CT_HELPER, // Jit-helper
143 CT_INDIRECT, // Indirect call
145 CT_COUNT // fake entry (must be last)
148 /*****************************************************************************/
152 struct InlineCandidateInfo;
154 typedef unsigned short AssertionIndex;
156 static const AssertionIndex NO_ASSERTION_INDEX = 0;
158 //------------------------------------------------------------------------
159 // GetAssertionIndex: return 1-based AssertionIndex from 0-based int index.
162 // index - 0-based index
164 // 1-based AssertionIndex.
165 inline AssertionIndex GetAssertionIndex(unsigned index)
167 return (AssertionIndex)(index + 1);
172 // true if the assertion holds on the bbNext edge instead of the bbJumpDest edge (for GT_JTRUE nodes)
173 unsigned short m_isNextEdgeAssertion : 1;
174 // 1-based index of the assertion
175 unsigned short m_assertionIndex : 15;
177 AssertionInfo(bool isNextEdgeAssertion, AssertionIndex assertionIndex)
178 : m_isNextEdgeAssertion(isNextEdgeAssertion), m_assertionIndex(assertionIndex)
180 assert(m_assertionIndex == assertionIndex);
184 AssertionInfo() : AssertionInfo(false, 0)
188 AssertionInfo(AssertionIndex assertionIndex) : AssertionInfo(false, assertionIndex)
192 static AssertionInfo ForNextEdge(AssertionIndex assertionIndex)
194 // Ignore the edge information if there's no assertion
195 bool isNextEdge = (assertionIndex != NO_ASSERTION_INDEX);
196 return AssertionInfo(isNextEdge, assertionIndex);
201 m_isNextEdgeAssertion = 0;
202 m_assertionIndex = NO_ASSERTION_INDEX;
205 bool HasAssertion() const
207 return m_assertionIndex != NO_ASSERTION_INDEX;
210 AssertionIndex GetAssertionIndex() const
212 return m_assertionIndex;
215 bool IsNextEdgeAssertion() const
217 return m_isNextEdgeAssertion;
221 /*****************************************************************************/
223 // GT_FIELD nodes will be lowered into more "code-gen-able" representations, like
224 // GT_IND's of addresses, or GT_LCL_FLD nodes. We'd like to preserve the more abstract
225 // information, and will therefore annotate such lowered nodes with FieldSeq's. A FieldSeq
226 // represents a (possibly) empty sequence of fields. The fields are in the order
227 // in which they are dereferenced. The first field may be an object field or a struct field;
228 // all subsequent fields must be struct fields.
231 CORINFO_FIELD_HANDLE m_fieldHnd;
232 FieldSeqNode* m_next;
234 FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next) : m_fieldHnd(fieldHnd), m_next(next)
238 // returns true when this is the pseudo #FirstElem field sequence
239 bool IsFirstElemFieldSeq();
241 // returns true when this is the pseudo #ConstantIndex field sequence
242 bool IsConstantIndexFieldSeq();
244 // returns true when this is the the pseudo #FirstElem field sequence or the pseudo #ConstantIndex field sequence
245 bool IsPseudoField();
247 // Make sure this provides methods that allow it to be used as a KeyFuncs type in SimplerHash.
248 static int GetHashCode(FieldSeqNode fsn)
250 return static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_fieldHnd)) ^
251 static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_next));
254 static bool Equals(FieldSeqNode fsn1, FieldSeqNode fsn2)
256 return fsn1.m_fieldHnd == fsn2.m_fieldHnd && fsn1.m_next == fsn2.m_next;
260 // This class canonicalizes field sequences.
263 typedef JitHashTable<FieldSeqNode, /*KeyFuncs*/ FieldSeqNode, FieldSeqNode*> FieldSeqNodeCanonMap;
265 CompAllocator* m_alloc;
266 FieldSeqNodeCanonMap* m_canonMap;
268 static FieldSeqNode s_notAField; // No value, just exists to provide an address.
270 // Dummy variables to provide the addresses for the "pseudo field handle" statics below.
271 static int FirstElemPseudoFieldStruct;
272 static int ConstantIndexPseudoFieldStruct;
275 FieldSeqStore(CompAllocator* alloc);
277 // Returns the (canonical in the store) singleton field sequence for the given handle.
278 FieldSeqNode* CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd);
280 // This is a special distinguished FieldSeqNode indicating that a constant does *not*
281 // represent a valid field sequence. This is "infectious", in the sense that appending it
282 // (on either side) to any field sequence yields the "NotAField()" sequence.
283 static FieldSeqNode* NotAField()
288 // Returns the (canonical in the store) field sequence representing the concatenation of
289 // the sequences represented by "a" and "b". Assumes that "a" and "b" are canonical; that is,
290 // they are the results of CreateSingleton, NotAField, or Append calls. If either of the arguments
291 // are the "NotAField" value, so is the result.
292 FieldSeqNode* Append(FieldSeqNode* a, FieldSeqNode* b);
294 // We have a few "pseudo" field handles:
296 // This treats the constant offset of the first element of something as if it were a field.
297 // Works for method table offsets of boxed structs, or first elem offset of arrays/strings.
298 static CORINFO_FIELD_HANDLE FirstElemPseudoField;
300 // If there is a constant index, we make a psuedo field to correspond to the constant added to
301 // offset of the indexed field. This keeps the field sequence structure "normalized", especially in the
302 // case where the element type is a struct, so we might add a further struct field offset.
303 static CORINFO_FIELD_HANDLE ConstantIndexPseudoField;
305 static bool IsPseudoField(CORINFO_FIELD_HANDLE hnd)
307 return hnd == FirstElemPseudoField || hnd == ConstantIndexPseudoField;
311 class GenTreeUseEdgeIterator;
312 class GenTreeOperandIterator;
314 /*****************************************************************************/
316 // Forward declarations of the subtypes
317 #define GTSTRUCT_0(fn, en) struct GenTree##fn;
318 #define GTSTRUCT_1(fn, en) struct GenTree##fn;
319 #define GTSTRUCT_2(fn, en, en2) struct GenTree##fn;
320 #define GTSTRUCT_3(fn, en, en2, en3) struct GenTree##fn;
321 #define GTSTRUCT_4(fn, en, en2, en3, en4) struct GenTree##fn;
322 #define GTSTRUCT_N(fn, ...) struct GenTree##fn;
323 #define GTSTRUCT_2_SPECIAL(fn, en, en2) GTSTRUCT_2(fn, en, en2)
324 #define GTSTRUCT_3_SPECIAL(fn, en, en2, en3) GTSTRUCT_3(fn, en, en2, en3)
325 #include "gtstructs.h"
327 /*****************************************************************************/
330 #include <pshpack4.h>
335 // We use GT_STRUCT_0 only for the category of simple ops.
336 #define GTSTRUCT_0(fn, en) \
337 GenTree##fn* As##fn() \
339 assert(OperIsSimple()); \
340 return reinterpret_cast<GenTree##fn*>(this); \
342 const GenTree##fn* As##fn() const \
344 assert(OperIsSimple()); \
345 return reinterpret_cast<const GenTree##fn*>(this); \
347 GenTree##fn& As##fn##Ref() \
351 __declspec(property(get = As##fn##Ref)) GenTree##fn& gt##fn;
353 #define GTSTRUCT_N(fn, ...) \
354 GenTree##fn* As##fn() \
356 assert(OperIs(__VA_ARGS__)); \
357 return reinterpret_cast<GenTree##fn*>(this); \
359 const GenTree##fn* As##fn() const \
361 assert(OperIs(__VA_ARGS__)); \
362 return reinterpret_cast<const GenTree##fn*>(this); \
364 GenTree##fn& As##fn##Ref() \
368 __declspec(property(get = As##fn##Ref)) GenTree##fn& gt##fn;
370 #define GTSTRUCT_1(fn, en) GTSTRUCT_N(fn, en)
371 #define GTSTRUCT_2(fn, en, en2) GTSTRUCT_N(fn, en, en2)
372 #define GTSTRUCT_3(fn, en, en2, en3) GTSTRUCT_N(fn, en, en2, en3)
373 #define GTSTRUCT_4(fn, en, en2, en3, en4) GTSTRUCT_N(fn, en, en2, en3, en4)
374 #define GTSTRUCT_2_SPECIAL(fn, en, en2) GTSTRUCT_2(fn, en, en2)
375 #define GTSTRUCT_3_SPECIAL(fn, en, en2, en3) GTSTRUCT_3(fn, en, en2, en3)
377 #include "gtstructs.h"
379 genTreeOps gtOper; // enum subtype BYTE
380 var_types gtType; // enum subtype BYTE
382 genTreeOps OperGet() const
386 var_types TypeGet() const
392 genTreeOps gtOperSave; // Only used to save gtOper when we destroy a node, to aid debugging.
399 #define IS_CSE_INDEX(x) (x != 0)
400 #define IS_CSE_USE(x) (x > 0)
401 #define IS_CSE_DEF(x) (x < 0)
402 #define GET_CSE_INDEX(x) ((x > 0) ? x : -x)
403 #define TO_CSE_DEF(x) (-x)
405 signed char gtCSEnum; // 0 or the CSE index (negated if def)
406 // valid only for CSE expressions
408 #endif // FEATURE_ANYCSE
410 unsigned char gtLIRFlags; // Used for nodes that are in LIR. See LIR::Flags in lir.h for the various flags.
413 AssertionInfo gtAssertionInfo; // valid only for non-GT_STMT nodes
415 bool GeneratesAssertion() const
417 return gtAssertionInfo.HasAssertion();
420 void ClearAssertion()
422 gtAssertionInfo.Clear();
425 AssertionInfo GetAssertionInfo() const
427 return gtAssertionInfo;
430 void SetAssertionInfo(AssertionInfo info)
432 gtAssertionInfo = info;
436 #if FEATURE_STACK_FP_X87
437 unsigned char gtFPlvl; // x87 stack depth at this node
438 void gtCopyFPlvl(GenTree* other)
440 gtFPlvl = other->gtFPlvl;
442 void gtSetFPlvl(unsigned level)
444 noway_assert(FitsIn<unsigned char>(level));
445 gtFPlvl = (unsigned char)level;
447 #else // FEATURE_STACK_FP_X87
449 void gtCopyFPlvl(GenTree* other)
452 void gtSetFPlvl(unsigned level)
456 #endif // FEATURE_STACK_FP_X87
459 // Cost metrics on the node. Don't allow direct access to the variable for setting.
464 // You are not allowed to read the cost values before they have been set in gtSetEvalOrder().
465 // Keep track of whether the costs have been initialized, and assert if they are read before being initialized.
466 // Obviously, this information does need to be initialized when a node is created.
467 // This is public so the dumpers can see it.
469 bool gtCostsInitialized;
472 #define MAX_COST UCHAR_MAX
473 #define IND_COST_EX 3 // execution cost for an indirection
475 __declspec(property(get = GetCostEx)) unsigned char gtCostEx; // estimate of expression execution cost
477 __declspec(property(get = GetCostSz)) unsigned char gtCostSz; // estimate of expression code size cost
479 unsigned char GetCostEx() const
481 assert(gtCostsInitialized);
484 unsigned char GetCostSz() const
486 assert(gtCostsInitialized);
490 // Set the costs. They are always both set at the same time.
491 // Don't use the "put" property: force calling this function, to make it more obvious in the few places
492 // that set the values.
493 // Note that costs are only set in gtSetEvalOrder() and its callees.
494 void SetCosts(unsigned costEx, unsigned costSz)
496 assert(costEx != (unsigned)-1); // looks bogus
497 assert(costSz != (unsigned)-1); // looks bogus
498 INDEBUG(gtCostsInitialized = true;)
500 _gtCostEx = (costEx > MAX_COST) ? MAX_COST : (unsigned char)costEx;
501 _gtCostSz = (costSz > MAX_COST) ? MAX_COST : (unsigned char)costSz;
504 // Opimized copy function, to avoid the SetCosts() function comparisons, and make it more clear that a node copy is
506 void CopyCosts(const GenTree* const tree)
508 INDEBUG(gtCostsInitialized =
509 tree->gtCostsInitialized;) // If the 'tree' costs aren't initialized, we'll hit an assert below.
510 _gtCostEx = tree->gtCostEx;
511 _gtCostSz = tree->gtCostSz;
514 // Same as CopyCosts, but avoids asserts if the costs we are copying have not been initialized.
515 // This is because the importer, for example, clones nodes, before these costs have been initialized.
516 // Note that we directly access the 'tree' costs, not going through the accessor functions (either
517 // directly or through the properties).
518 void CopyRawCosts(const GenTree* const tree)
520 INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;)
521 _gtCostEx = tree->_gtCostEx;
522 _gtCostSz = tree->_gtCostSz;
526 unsigned char _gtCostEx; // estimate of expression execution cost
527 unsigned char _gtCostSz; // estimate of expression code size cost
530 // Register or register pair number of the node.
532 CLANG_FORMAT_COMMENT_ANCHOR;
538 GT_REGTAG_NONE, // Nothing has been assigned to _gtRegNum/_gtRegPair
539 GT_REGTAG_REG, // _gtRegNum has been assigned
540 #if CPU_LONG_USES_REGPAIR
541 GT_REGTAG_REGPAIR // _gtRegPair has been assigned
544 genRegTag GetRegTag() const
546 #if CPU_LONG_USES_REGPAIR
547 assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG || gtRegTag == GT_REGTAG_REGPAIR);
549 assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG);
555 genRegTag gtRegTag; // What is in _gtRegNum/_gtRegPair?
560 // These store the register assigned to the node. If a register is not assigned, _gtRegNum is set to REG_NA
561 // or _gtRegPair is set to REG_PAIR_NONE, depending on the node type.
562 // For the LEGACY_BACKEND,these are valid only if GTF_REG_VAL is set in gtFlags.
563 regNumberSmall _gtRegNum; // which register the value is in
564 regPairNoSmall _gtRegPair; // which register pair the value is in
568 // The register number is stored in a small format (8 bits), but the getters return and the setters take
569 // a full-size (unsigned) format, to localize the casts here.
571 __declspec(property(get = GetRegNum, put = SetRegNum)) regNumber gtRegNum;
573 bool canBeContained() const;
575 // for codegen purposes, is this node a subnode of its parent
576 bool isContained() const;
578 bool isContainedIndir() const;
580 bool isIndirAddrMode();
582 bool isIndir() const;
584 bool isContainedIntOrIImmed() const
586 return isContained() && IsCnsIntOrI() && !isUsedFromSpillTemp();
589 bool isContainedFltOrDblImmed() const
591 return isContained() && (OperGet() == GT_CNS_DBL);
594 bool isLclField() const
596 return OperGet() == GT_LCL_FLD || OperGet() == GT_STORE_LCL_FLD;
599 bool isUsedFromSpillTemp() const;
601 // Indicates whether it is a memory op.
602 // Right now it includes Indir and LclField ops.
603 bool isMemoryOp() const
605 return isIndir() || isLclField();
608 bool isUsedFromMemory() const
610 return ((isContained() && (isMemoryOp() || (OperGet() == GT_LCL_VAR) || (OperGet() == GT_CNS_DBL))) ||
611 isUsedFromSpillTemp());
614 bool isLclVarUsedFromMemory() const
616 return (OperGet() == GT_LCL_VAR) && (isContained() || isUsedFromSpillTemp());
619 bool isLclFldUsedFromMemory() const
621 return isLclField() && (isContained() || isUsedFromSpillTemp());
624 bool isUsedFromReg() const
626 return !isContained() && !isUsedFromSpillTemp();
629 regNumber GetRegNum() const
631 assert((gtRegTag == GT_REGTAG_REG) || (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case,
632 // and fix everyplace that reads undefined
634 regNumber reg = (regNumber)_gtRegNum;
635 assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads
637 (reg >= REG_FIRST && reg <= REG_COUNT));
641 void SetRegNum(regNumber reg)
643 assert(reg >= REG_FIRST && reg <= REG_COUNT);
644 // Make sure the upper bits of _gtRegPair are clear
645 _gtRegPair = (regPairNoSmall)0;
646 _gtRegNum = (regNumberSmall)reg;
647 INDEBUG(gtRegTag = GT_REGTAG_REG;)
648 assert(_gtRegNum == reg);
651 #if CPU_LONG_USES_REGPAIR
652 __declspec(property(get = GetRegPair, put = SetRegPair)) regPairNo gtRegPair;
654 regPairNo GetRegPair() const
656 assert((gtRegTag == GT_REGTAG_REGPAIR) || (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE
657 // case, and fix everyplace that reads
659 regPairNo regPair = (regPairNo)_gtRegPair;
660 assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads
662 (regPair >= REG_PAIR_FIRST && regPair <= REG_PAIR_LAST) ||
663 (regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
667 void SetRegPair(regPairNo regPair)
669 assert((regPair >= REG_PAIR_FIRST && regPair <= REG_PAIR_LAST) ||
670 (regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
671 _gtRegPair = (regPairNoSmall)regPair;
672 INDEBUG(gtRegTag = GT_REGTAG_REGPAIR;)
673 assert(_gtRegPair == regPair);
677 // Copy the _gtRegNum/_gtRegPair/gtRegTag fields
678 void CopyReg(GenTree* from);
679 bool gtHasReg() const;
681 int GetRegisterDstCount() const;
683 regMaskTP gtGetRegMask() const;
685 unsigned gtFlags; // see GTF_xxxx below
688 unsigned gtDebugFlags; // see GTF_DEBUG_xxx below
689 #endif // defined(DEBUG)
691 ValueNumPair gtVNPair;
693 regMaskSmall gtRsvdRegs; // set of fixed trashed registers
695 #ifndef LEGACY_BACKEND
696 unsigned AvailableTempRegCount(regMaskTP mask = (regMaskTP)-1) const;
697 regNumber GetSingleTempReg(regMaskTP mask = (regMaskTP)-1);
698 regNumber ExtractTempReg(regMaskTP mask = (regMaskTP)-1);
699 #endif // !LEGACY_BACKEND
701 #ifdef LEGACY_BACKEND
702 regMaskSmall gtUsedRegs; // set of used (trashed) registers
703 #endif // LEGACY_BACKEND
705 void SetVNsFromNode(GenTree* tree)
707 gtVNPair = tree->gtVNPair;
710 ValueNum GetVN(ValueNumKind vnk) const
712 if (vnk == VNK_Liberal)
714 return gtVNPair.GetLiberal();
718 assert(vnk == VNK_Conservative);
719 return gtVNPair.GetConservative();
722 void SetVN(ValueNumKind vnk, ValueNum vn)
724 if (vnk == VNK_Liberal)
726 return gtVNPair.SetLiberal(vn);
730 assert(vnk == VNK_Conservative);
731 return gtVNPair.SetConservative(vn);
734 void SetVNs(ValueNumPair vnp)
740 gtVNPair = ValueNumPair(); // Initializes both elements to "NoVN".
745 //---------------------------------------------------------------------
747 // GenTree flags stored in gtFlags.
749 //---------------------------------------------------------------------
751 //---------------------------------------------------------------------
752 // The first set of flags can be used with a large set of nodes, and
753 // thus they must all have distinct values. That is, one can test any
754 // expression node for one of these flags.
755 //---------------------------------------------------------------------
757 #define GTF_ASG 0x00000001 // sub-expression contains an assignment
758 #define GTF_CALL 0x00000002 // sub-expression contains a func. call
759 #define GTF_EXCEPT 0x00000004 // sub-expression might throw an exception
760 #define GTF_GLOB_REF 0x00000008 // sub-expression uses global variable(s)
761 #define GTF_ORDER_SIDEEFF 0x00000010 // sub-expression has a re-ordering side effect
763 // If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree,
764 // otherwise the C# (run csc /o-) code:
765 // var v = side_eff_operation
766 // with no use of v will drop your tree on the floor.
767 #define GTF_PERSISTENT_SIDE_EFFECTS (GTF_ASG | GTF_CALL)
768 #define GTF_SIDE_EFFECT (GTF_PERSISTENT_SIDE_EFFECTS | GTF_EXCEPT)
769 #define GTF_GLOB_EFFECT (GTF_SIDE_EFFECT | GTF_GLOB_REF)
770 #define GTF_ALL_EFFECT (GTF_GLOB_EFFECT | GTF_ORDER_SIDEEFF)
772 // The extra flag GTF_IS_IN_CSE is used to tell the consumer of these flags
773 // that we are calling in the context of performing a CSE, thus we
774 // should allow the run-once side effects of running a class constructor.
776 // The only requirement of this flag is that it not overlap any of the
777 // side-effect flags. The actual bit used is otherwise arbitrary.
778 #define GTF_IS_IN_CSE GTF_BOOLEAN
779 #define GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE (GTF_ASG | GTF_CALL | GTF_IS_IN_CSE)
781 // Can any side-effects be observed externally, say by a caller method?
782 // For assignments, only assignments to global memory can be observed
783 // externally, whereas simple assignments to local variables can not.
785 // Be careful when using this inside a "try" protected region as the
786 // order of assignments to local variables would need to be preserved
787 // wrt side effects if the variables are alive on entry to the
788 // "catch/finally" region. In such cases, even assignments to locals
789 // will have to be restricted.
790 #define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \
791 (((flags) & (GTF_CALL | GTF_EXCEPT)) || (((flags) & (GTF_ASG | GTF_GLOB_REF)) == (GTF_ASG | GTF_GLOB_REF)))
793 #define GTF_REVERSE_OPS 0x00000020 // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second)
795 #ifdef LEGACY_BACKEND
796 #define GTF_REG_VAL 0x00000040 // operand is sitting in a register (or part of a TYP_LONG operand is sitting in a register)
797 #else // !LEGACY_BACKEND
798 #define GTF_CONTAINED 0x00000040 // This node is contained (executed as part of its parent)
799 #endif // !LEGACY_BACKEND
801 #define GTF_SPILLED 0x00000080 // the value has been spilled
803 #ifdef LEGACY_BACKEND
804 #define GTF_SPILLED_OPER 0x00000100 // op1 has been spilled
805 #define GTF_SPILLED_OP2 0x00000200 // op2 has been spilled
806 #define GTF_ZSF_SET 0x00000400 // the zero(ZF) and sign(SF) flags set to the operand
807 #else // !LEGACY_BACKEND
808 #define GTF_NOREG_AT_USE 0x00000100 // tree node is in memory at the point of use
809 #endif // !LEGACY_BACKEND
811 #define GTF_SET_FLAGS 0x00000800 // Requires that codegen for this node set the flags. Use gtSetFlags() to check this flag.
812 #define GTF_USE_FLAGS 0x00001000 // Indicates that this node uses the flags bits.
814 #define GTF_MAKE_CSE 0x00002000 // Hoisted expression: try hard to make this into CSE (see optPerformHoistExpr)
815 #define GTF_DONT_CSE 0x00004000 // Don't bother CSE'ing this expr
816 #define GTF_COLON_COND 0x00008000 // This node is conditionally executed (part of ? :)
818 #define GTF_NODE_MASK (GTF_COLON_COND)
820 #define GTF_BOOLEAN 0x00040000 // value is known to be 0/1
821 #if CPU_HAS_BYTE_REGS && defined(LEGACY_BACKEND)
822 #define GTF_SMALL_OK 0x00080000 // actual small int sufficient
825 #define GTF_UNSIGNED 0x00100000 // With GT_CAST: the source operand is an unsigned type
826 // With operators: the specified node is an unsigned operator
827 #define GTF_LATE_ARG 0x00200000 // The specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs.
828 #define GTF_SPILL 0x00400000 // Needs to be spilled here
830 #define GTF_COMMON_MASK 0x007FFFFF // mask of all the flags above
832 #define GTF_REUSE_REG_VAL 0x00800000 // This is set by the register allocator on nodes whose value already exists in the
833 // register assigned to this node, so the code generator does not have to generate
834 // code to produce the value. It is currently used only on constant nodes.
835 // It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since
836 // it is not needed for lclVars and is highly unlikely to be useful for indir nodes.
838 //---------------------------------------------------------------------
839 // The following flags can be used only with a small set of nodes, and
840 // thus their values need not be distinct (other than within the set
841 // that goes with a particular node/nodes, of course). That is, one can
842 // only test for one of these flags if the 'gtOper' value is tested as
843 // well to make sure it's the right operator for the particular flag.
844 //---------------------------------------------------------------------
846 // NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags, because
847 // GT_LCL_VAR nodes may be changed to GT_REG_VAR nodes without resetting
848 // the flags. These are also used by GT_LCL_FLD.
849 #define GTF_VAR_DEF 0x80000000 // GT_LCL_VAR -- this is a definition
850 #define GTF_VAR_USEASG 0x40000000 // GT_LCL_VAR -- this is a use/def for a x<op>=y
851 #define GTF_VAR_CAST 0x10000000 // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local)
852 #define GTF_VAR_ITERATOR 0x08000000 // GT_LCL_VAR -- this is a iterator reference in the loop condition
853 #define GTF_VAR_CLONED 0x01000000 // GT_LCL_VAR -- this node has been cloned or is a clone
854 // Relevant for inlining optimizations (see fgInlinePrependStatements)
856 // TODO-Cleanup: Currently, GTF_REG_BIRTH is used only by stackfp
857 // We should consider using it more generally for VAR_BIRTH, instead of
858 // GTF_VAR_DEF && !GTF_VAR_USEASG
859 #define GTF_REG_BIRTH 0x04000000 // GT_REG_VAR -- enregistered variable born here
860 #define GTF_VAR_DEATH 0x02000000 // GT_LCL_VAR, GT_REG_VAR -- variable dies here (last use)
862 #define GTF_VAR_ARR_INDEX 0x00000020 // The variable is part of (the index portion of) an array index expression.
863 // Shares a value with GTF_REVERSE_OPS, which is meaningless for local var.
865 #define GTF_LIVENESS_MASK (GTF_VAR_DEF | GTF_VAR_USEASG | GTF_REG_BIRTH | GTF_VAR_DEATH)
867 #define GTF_CALL_UNMANAGED 0x80000000 // GT_CALL -- direct call to unmanaged code
868 #define GTF_CALL_INLINE_CANDIDATE 0x40000000 // GT_CALL -- this call has been marked as an inline candidate
870 #define GTF_CALL_VIRT_KIND_MASK 0x30000000 // GT_CALL -- mask of the below call kinds
871 #define GTF_CALL_NONVIRT 0x00000000 // GT_CALL -- a non virtual call
872 #define GTF_CALL_VIRT_STUB 0x10000000 // GT_CALL -- a stub-dispatch virtual call
873 #define GTF_CALL_VIRT_VTABLE 0x20000000 // GT_CALL -- a vtable-based virtual call
875 #define GTF_CALL_NULLCHECK 0x08000000 // GT_CALL -- must check instance pointer for null
876 #define GTF_CALL_POP_ARGS 0x04000000 // GT_CALL -- caller pop arguments?
877 #define GTF_CALL_HOISTABLE 0x02000000 // GT_CALL -- call is hoistable
878 #ifdef LEGACY_BACKEND
880 // The GTF_CALL_REG_SAVE flag indicates that the call preserves all integer registers. This is used for
881 // the PollGC helper. However, since the PollGC helper on ARM follows the standard calling convention,
882 // for that target we don't use this flag.
883 #define GTF_CALL_REG_SAVE 0x00000000
885 #define GTF_CALL_REG_SAVE 0x01000000 // GT_CALL -- This call preserves all integer regs
886 #endif // _TARGET_ARM_
887 #endif // LEGACY_BACKEND
888 // For additional flags for GT_CALL node see GTF_CALL_M_*
890 #define GTF_NOP_DEATH 0x40000000 // GT_NOP -- operand dies here
892 #define GTF_FLD_NULLCHECK 0x80000000 // GT_FIELD -- need to nullcheck the "this" pointer
893 #define GTF_FLD_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
894 #define GTF_FLD_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- field access requires preceding class/static init helper
896 #define GTF_INX_RNGCHK 0x80000000 // GT_INDEX/GT_INDEX_ADDR -- the array reference should be range-checked.
897 #define GTF_INX_REFARR_LAYOUT 0x20000000 // GT_INDEX
898 #define GTF_INX_STRING_LAYOUT 0x40000000 // GT_INDEX -- this uses the special string array layout
900 #define GTF_IND_ARR_LEN 0x80000000 // GT_IND -- the indirection represents an array length (of the REF
901 // contribution to its argument).
902 #define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
903 #define GTF_IND_NONFAULTING 0x20000000 // Operations for which OperIsIndir() is true -- An indir that cannot fault.
904 // Same as GTF_ARRLEN_NONFAULTING.
905 #define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere
906 #define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS
907 #define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
908 // assignment; don't evaluate it independently.
909 #define GTF_IND_REQ_ADDR_IN_REG GTF_IND_ASG_LHS // GT_IND -- requires its addr operand to be evaluated
910 // into a register. This flag is useful in cases where it
911 // is required to generate register indirect addressing mode.
912 // One such case is virtual stub calls on xarch. This is only
913 // valid in the backend, where GTF_IND_ASG_LHS is not necessary
914 // (all such indirections will be lowered to GT_STOREIND).
915 #define GTF_IND_UNALIGNED 0x02000000 // GT_IND -- the load or store is unaligned (we assume worst case
916 // alignment of 1 byte)
917 #define GTF_IND_INVARIANT 0x01000000 // GT_IND -- the target is invariant (a prejit indirection)
918 #define GTF_IND_ARR_INDEX 0x00800000 // GT_IND -- the indirection represents an (SZ) array index
920 #define GTF_IND_FLAGS \
921 (GTF_IND_VOLATILE | GTF_IND_TGTANYWHERE | GTF_IND_NONFAULTING | GTF_IND_TLS_REF | \
922 GTF_IND_UNALIGNED | GTF_IND_INVARIANT | GTF_IND_ARR_INDEX)
924 #define GTF_CLS_VAR_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
925 #define GTF_CLS_VAR_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_FLD_INITCLASS
926 #define GTF_CLS_VAR_ASG_LHS 0x04000000 // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS
927 // of an assignment; don't evaluate it independently.
929 #define GTF_ADDR_ONSTACK 0x80000000 // GT_ADDR -- this expression is guaranteed to be on the stack
931 #define GTF_ADDRMODE_NO_CSE 0x80000000 // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex
934 #define GTF_MUL_64RSLT 0x40000000 // GT_MUL -- produce 64-bit result
936 #ifdef LEGACY_BACKEND
937 #define GTF_MOD_INT_RESULT 0x80000000 // GT_MOD, -- the real tree represented by this
938 // GT_UMOD node evaluates to an int even though its type is long.
939 // The result is placed in the low member of the reg pair.
940 #endif // LEGACY_BACKEND
942 #define GTF_RELOP_NAN_UN 0x80000000 // GT_<relop> -- Is branch taken if ops are NaN?
943 #define GTF_RELOP_JMP_USED 0x40000000 // GT_<relop> -- result of compare used for jump or ?:
944 #define GTF_RELOP_QMARK 0x20000000 // GT_<relop> -- the node is the condition for ?:
945 #define GTF_RELOP_ZTT 0x08000000 // GT_<relop> -- Loop test cloned for converting while-loops into do-while
946 // with explicit "loop test" in the header block.
948 #define GTF_JCMP_EQ 0x80000000 // GTF_JCMP_EQ -- Branch on equal rather than not equal
949 #define GTF_JCMP_TST 0x40000000 // GTF_JCMP_TST -- Use bit test instruction rather than compare against zero instruction
951 #define GTF_RET_MERGED 0x80000000 // GT_RETURN -- This is a return generated during epilog merging.
953 #define GTF_QMARK_CAST_INSTOF 0x80000000 // GT_QMARK -- Is this a top (not nested) level qmark created for
954 // castclass or instanceof?
956 #define GTF_BOX_VALUE 0x80000000 // GT_BOX -- "box" is on a value type
958 #define GTF_ICON_HDL_MASK 0xF0000000 // Bits used by handle types below
959 #define GTF_ICON_SCOPE_HDL 0x10000000 // GT_CNS_INT -- constant is a scope handle
960 #define GTF_ICON_CLASS_HDL 0x20000000 // GT_CNS_INT -- constant is a class handle
961 #define GTF_ICON_METHOD_HDL 0x30000000 // GT_CNS_INT -- constant is a method handle
962 #define GTF_ICON_FIELD_HDL 0x40000000 // GT_CNS_INT -- constant is a field handle
963 #define GTF_ICON_STATIC_HDL 0x50000000 // GT_CNS_INT -- constant is a handle to static data
964 #define GTF_ICON_STR_HDL 0x60000000 // GT_CNS_INT -- constant is a string handle
965 #define GTF_ICON_PSTR_HDL 0x70000000 // GT_CNS_INT -- constant is a ptr to a string handle
966 #define GTF_ICON_PTR_HDL 0x80000000 // GT_CNS_INT -- constant is a ldptr handle
967 #define GTF_ICON_VARG_HDL 0x90000000 // GT_CNS_INT -- constant is a var arg cookie handle
968 #define GTF_ICON_PINVKI_HDL 0xA0000000 // GT_CNS_INT -- constant is a pinvoke calli handle
969 #define GTF_ICON_TOKEN_HDL 0xB0000000 // GT_CNS_INT -- constant is a token handle
970 #define GTF_ICON_TLS_HDL 0xC0000000 // GT_CNS_INT -- constant is a TLS ref with offset
971 #define GTF_ICON_FTN_ADDR 0xD0000000 // GT_CNS_INT -- constant is a function address
972 #define GTF_ICON_CIDMID_HDL 0xE0000000 // GT_CNS_INT -- constant is a class ID or a module ID
973 #define GTF_ICON_BBC_PTR 0xF0000000 // GT_CNS_INT -- constant is a basic block count pointer
975 #define GTF_ICON_FIELD_OFF 0x08000000 // GT_CNS_INT -- constant is a field offset
976 #define GTF_ICON_SIMD_COUNT 0x04000000 // GT_CNS_INT -- constant is Vector<T>.Count
978 #define GTF_ICON_INITCLASS 0x02000000 // GT_CNS_INT -- Constant is used to access a static that requires preceding
979 // class/static init helper. In some cases, the constant is
980 // the address of the static field itself, and in other cases
981 // there's an extra layer of indirection and it is the address
982 // of the cell that the runtime will fill in with the address
983 // of the static field; in both of those cases, the constant
984 // is what gets flagged.
986 #define GTF_BLK_VOLATILE GTF_IND_VOLATILE // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation
987 #define GTF_BLK_UNALIGNED GTF_IND_UNALIGNED // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation
989 #define GTF_OVERFLOW 0x10000000 // GT_ADD, GT_SUB, GT_MUL, -- Need overflow check. Use gtOverflow(Ex)() to check this flag.
990 // GT_ASG_ADD, GT_ASG_SUB,
993 #define GTF_ARR_BOUND_INBND 0x80000000 // GT_ARR_BOUNDS_CHECK -- have proved this check is always in-bounds
995 #define GTF_ARRLEN_ARR_IDX 0x80000000 // GT_ARR_LENGTH -- Length which feeds into an array index expression
996 #define GTF_ARRLEN_NONFAULTING 0x20000000 // GT_ARR_LENGTH -- An array length operation that cannot fault. Same as GT_IND_NONFAULTING.
998 #define GTF_FIELD_LIST_HEAD 0x80000000 // GT_FIELD_LIST -- Indicates that this is the first field in a list of
999 // struct fields constituting a single call argument.
1001 #define GTF_SIMD12_OP 0x80000000 // GT_SIMD -- Indicates that the operands need to be handled as SIMD12
1002 // even if they have been retyped as SIMD16.
1004 #define GTF_STMT_CMPADD 0x80000000 // GT_STMT -- added by compiler
1005 #define GTF_STMT_HAS_CSE 0x40000000 // GT_STMT -- CSE def or use was subsituted
1007 //---------------------------------------------------------------------
1009 // GenTree flags stored in gtDebugFlags.
1011 //---------------------------------------------------------------------
1014 #define GTF_DEBUG_NONE 0x00000000 // No debug flags.
1016 #define GTF_DEBUG_NODE_MORPHED 0x00000001 // the node has been morphed (in the global morphing phase)
1017 #define GTF_DEBUG_NODE_SMALL 0x00000002
1018 #define GTF_DEBUG_NODE_LARGE 0x00000004
1019 #define GTF_DEBUG_NODE_CG_PRODUCED 0x00000008 // genProduceReg has been called on this node
1020 #define GTF_DEBUG_NODE_CG_CONSUMED 0x00000010 // genConsumeReg has been called on this node
1021 #define GTF_DEBUG_NODE_LSRA_ADDED 0x00000020 // This node was added by LSRA
1023 #define GTF_DEBUG_NODE_MASK 0x0000003F // These flags are all node (rather than operation) properties.
1025 #define GTF_DEBUG_VAR_CSE_REF 0x00800000 // GT_LCL_VAR -- This is a CSE LCL_VAR node
1026 #endif // defined(DEBUG)
1028 //---------------------------------------------------------------------
1030 // end of GenTree flags definitions
1032 //---------------------------------------------------------------------
1041 unsigned gtSeqNum; // liveness traversal order within the current statement
1043 int gtUseNum; // use-ordered traversal within the function
1046 static const unsigned short gtOperKindTable[];
1048 static unsigned OperKind(unsigned gtOper)
1050 assert(gtOper < GT_COUNT);
1052 return gtOperKindTable[gtOper];
1055 unsigned OperKind() const
1057 assert(gtOper < GT_COUNT);
1059 return gtOperKindTable[gtOper];
1062 static bool IsExOp(unsigned opKind)
1064 return (opKind & GTK_EXOP) != 0;
1066 // Returns the operKind with the GTK_EX_OP bit removed (the
1067 // kind of operator, unary or binary, that is extended).
1068 static unsigned StripExOp(unsigned opKind)
1070 return opKind & ~GTK_EXOP;
1073 bool IsValue() const
1075 if ((OperKind(gtOper) & GTK_NOVALUE) != 0)
1080 if (gtType == TYP_VOID)
1082 // These are the only operators which can produce either VOID or non-VOID results.
1083 assert(OperIs(GT_NOP, GT_CALL, GT_LOCKADD, GT_FIELD_LIST, GT_COMMA) || OperIsCompare() || OperIsLong() ||
1084 OperIsSIMD() || OperIsHWIntrinsic());
1088 if (gtOper == GT_FIELD_LIST)
1090 return (gtFlags & GTF_FIELD_LIST_HEAD) != 0;
1098 if ((OperKind(gtOper) & GTK_NOTLIR) != 0)
1106 // NOPs may only be present in LIR if they do not produce a value.
1107 return IsNothingNode();
1110 // LIST nodes may not be present in a block's LIR sequence, but they may
1111 // be present as children of an LIR node.
1112 return (gtNext == nullptr) && (gtPrev == nullptr);
1115 // Only the head of the FIELD_LIST is present in the block's LIR sequence.
1116 return (((gtFlags & GTF_FIELD_LIST_HEAD) != 0) || ((gtNext == nullptr) && (gtPrev == nullptr)));
1120 // ADDR ndoes may only be present in LIR if the location they refer to is not a
1121 // local, class variable, or IND node.
1122 GenTree* location = gtGetOp1();
1123 genTreeOps locationOp = location->OperGet();
1124 return !location->IsLocal() && (locationOp != GT_CLS_VAR) && (locationOp != GT_IND);
1128 // All other nodes are assumed to be correct.
1134 // These helper methods, along with the flag values they manipulate, are defined in lir.h
1136 // UnusedValue indicates that, although this node produces a value, it is unused.
1137 inline void SetUnusedValue();
1138 inline void ClearUnusedValue();
1139 inline bool IsUnusedValue() const;
1140 // RegOptional indicates that codegen can still generate code even if it isn't allocated a register.
1141 inline bool IsRegOptional() const;
1142 inline void SetRegOptional();
1143 inline void ClearRegOptional();
1145 void dumpLIRFlags();
1148 bool OperIs(genTreeOps oper) const
1150 return OperGet() == oper;
1153 template <typename... T>
1154 bool OperIs(genTreeOps oper, T... rest) const
1156 return OperIs(oper) || OperIs(rest...);
1159 static bool OperIsConst(genTreeOps gtOper)
1161 return (OperKind(gtOper) & GTK_CONST) != 0;
1164 bool OperIsConst() const
1166 return (OperKind(gtOper) & GTK_CONST) != 0;
1169 static bool OperIsLeaf(genTreeOps gtOper)
1171 return (OperKind(gtOper) & GTK_LEAF) != 0;
1174 bool OperIsLeaf() const
1176 return (OperKind(gtOper) & GTK_LEAF) != 0;
1179 static bool OperIsCompare(genTreeOps gtOper)
1181 return (OperKind(gtOper) & GTK_RELOP) != 0;
1184 static bool OperIsLocal(genTreeOps gtOper)
1186 bool result = (OperKind(gtOper) & GTK_LOCAL) != 0;
1187 assert(result == (gtOper == GT_LCL_VAR || gtOper == GT_PHI_ARG || gtOper == GT_REG_VAR ||
1188 gtOper == GT_LCL_FLD || gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD));
1192 static bool OperIsLocalAddr(genTreeOps gtOper)
1194 return (gtOper == GT_LCL_VAR_ADDR || gtOper == GT_LCL_FLD_ADDR);
1197 static bool OperIsLocalField(genTreeOps gtOper)
1199 return (gtOper == GT_LCL_FLD || gtOper == GT_LCL_FLD_ADDR || gtOper == GT_STORE_LCL_FLD);
1202 inline bool OperIsLocalField() const
1204 return OperIsLocalField(gtOper);
1207 static bool OperIsScalarLocal(genTreeOps gtOper)
1209 return (gtOper == GT_LCL_VAR || gtOper == GT_REG_VAR || gtOper == GT_STORE_LCL_VAR);
1212 static bool OperIsNonPhiLocal(genTreeOps gtOper)
1214 return OperIsLocal(gtOper) && (gtOper != GT_PHI_ARG);
1217 static bool OperIsLocalRead(genTreeOps gtOper)
1219 return (OperIsLocal(gtOper) && !OperIsLocalStore(gtOper));
1222 static bool OperIsLocalStore(genTreeOps gtOper)
1224 return (gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD);
1227 static bool OperIsAddrMode(genTreeOps gtOper)
1229 return (gtOper == GT_LEA);
1232 static bool OperIsInitVal(genTreeOps gtOper)
1234 return (gtOper == GT_INIT_VAL);
1237 bool OperIsInitVal() const
1239 return OperIsInitVal(OperGet());
1242 bool IsConstInitVal()
1244 return (gtOper == GT_CNS_INT) || (OperIsInitVal() && (gtGetOp1()->gtOper == GT_CNS_INT));
1248 bool OperIsCopyBlkOp();
1249 bool OperIsInitBlkOp();
1250 bool OperIsDynBlkOp();
1252 static bool OperIsBlk(genTreeOps gtOper)
1254 return ((gtOper == GT_BLK) || (gtOper == GT_OBJ) || (gtOper == GT_DYN_BLK) || (gtOper == GT_STORE_BLK) ||
1255 (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK));
1258 bool OperIsBlk() const
1260 return OperIsBlk(OperGet());
1263 static bool OperIsDynBlk(genTreeOps gtOper)
1265 return ((gtOper == GT_DYN_BLK) || (gtOper == GT_STORE_DYN_BLK));
1268 bool OperIsDynBlk() const
1270 return OperIsDynBlk(OperGet());
1273 static bool OperIsStoreBlk(genTreeOps gtOper)
1275 return ((gtOper == GT_STORE_BLK) || (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK));
1278 bool OperIsStoreBlk() const
1280 return OperIsStoreBlk(OperGet());
1283 bool OperIsPutArgSplit() const
1285 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1286 return gtOper == GT_PUTARG_SPLIT;
1292 bool OperIsPutArgStk() const
1294 return gtOper == GT_PUTARG_STK;
1297 bool OperIsPutArgStkOrSplit() const
1299 return OperIsPutArgStk() || OperIsPutArgSplit();
1302 bool OperIsPutArgReg() const
1304 return gtOper == GT_PUTARG_REG;
1307 bool OperIsPutArg() const
1309 return OperIsPutArgStk() || OperIsPutArgReg() || OperIsPutArgSplit();
1312 bool OperIsMultiRegOp() const
1314 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1315 if ((gtOper == GT_MUL_LONG) || (gtOper == GT_PUTARG_REG) || (gtOper == GT_BITCAST))
1324 bool OperIsAddrMode() const
1326 return OperIsAddrMode(OperGet());
1329 bool OperIsLocal() const
1331 return OperIsLocal(OperGet());
1334 bool OperIsLocalAddr() const
1336 return OperIsLocalAddr(OperGet());
1339 bool OperIsScalarLocal() const
1341 return OperIsScalarLocal(OperGet());
1344 bool OperIsNonPhiLocal() const
1346 return OperIsNonPhiLocal(OperGet());
1349 bool OperIsLocalStore() const
1351 return OperIsLocalStore(OperGet());
1354 bool OperIsLocalRead() const
1356 return OperIsLocalRead(OperGet());
1359 bool OperIsCompare() const
1361 return (OperKind(gtOper) & GTK_RELOP) != 0;
1364 static bool OperIsLogical(genTreeOps gtOper)
1366 return (OperKind(gtOper) & GTK_LOGOP) != 0;
1369 bool OperIsLogical() const
1371 return (OperKind(gtOper) & GTK_LOGOP) != 0;
1374 static bool OperIsShift(genTreeOps gtOper)
1376 return (gtOper == GT_LSH) || (gtOper == GT_RSH) || (gtOper == GT_RSZ);
1379 bool OperIsShift() const
1381 return OperIsShift(OperGet());
1384 static bool OperIsRotate(genTreeOps gtOper)
1386 return (gtOper == GT_ROL) || (gtOper == GT_ROR);
1389 bool OperIsRotate() const
1391 return OperIsRotate(OperGet());
1394 static bool OperIsShiftOrRotate(genTreeOps gtOper)
1396 return OperIsShift(gtOper) || OperIsRotate(gtOper);
1399 bool OperIsShiftOrRotate() const
1401 return OperIsShiftOrRotate(OperGet());
1404 bool OperIsArithmetic() const
1406 genTreeOps op = OperGet();
1407 return op == GT_ADD || op == GT_SUB || op == GT_MUL || op == GT_DIV || op == GT_MOD
1409 || op == GT_UDIV || op == GT_UMOD
1411 || op == GT_OR || op == GT_XOR || op == GT_AND
1413 || OperIsShiftOrRotate(op);
1416 #ifdef _TARGET_XARCH_
1417 static bool OperIsRMWMemOp(genTreeOps gtOper)
1419 // Return if binary op is one of the supported operations for RMW of memory.
1420 return (gtOper == GT_ADD || gtOper == GT_SUB || gtOper == GT_AND || gtOper == GT_OR || gtOper == GT_XOR ||
1421 gtOper == GT_NOT || gtOper == GT_NEG || OperIsShiftOrRotate(gtOper));
1423 bool OperIsRMWMemOp() const
1425 // Return if binary op is one of the supported operations for RMW of memory.
1426 return OperIsRMWMemOp(gtOper);
1428 #endif // _TARGET_XARCH_
1430 static bool OperIsUnary(genTreeOps gtOper)
1432 return (OperKind(gtOper) & GTK_UNOP) != 0;
1435 bool OperIsUnary() const
1437 return OperIsUnary(gtOper);
1440 static bool OperIsBinary(genTreeOps gtOper)
1442 return (OperKind(gtOper) & GTK_BINOP) != 0;
1445 bool OperIsBinary() const
1447 return OperIsBinary(gtOper);
1450 static bool OperIsSimple(genTreeOps gtOper)
1452 return (OperKind(gtOper) & GTK_SMPOP) != 0;
1455 static bool OperIsSpecial(genTreeOps gtOper)
1457 return ((OperKind(gtOper) & GTK_KINDMASK) == GTK_SPECIAL);
1460 bool OperIsSimple() const
1462 return OperIsSimple(gtOper);
1466 bool isCommutativeSIMDIntrinsic();
1468 bool isCommutativeSIMDIntrinsic()
1472 #endif // FEATURE_SIMD
1474 #ifdef FEATURE_HW_INTRINSICS
1475 bool isCommutativeHWIntrinsic() const;
1476 bool isContainableHWIntrinsic() const;
1477 bool isRMWHWIntrinsic(Compiler* comp);
1479 bool isCommutativeHWIntrinsic() const
1484 bool isContainableHWIntrinsic() const
1489 bool isRMWHWIntrinsic(Compiler* comp)
1493 #endif // FEATURE_HW_INTRINSICS
1495 static bool OperIsCommutative(genTreeOps gtOper)
1497 return (OperKind(gtOper) & GTK_COMMUTE) != 0;
1500 bool OperIsCommutative()
1502 return OperIsCommutative(gtOper) || (OperIsSIMD(gtOper) && isCommutativeSIMDIntrinsic()) ||
1503 (OperIsHWIntrinsic(gtOper) && isCommutativeHWIntrinsic());
1506 static bool OperIsAssignment(genTreeOps gtOper)
1508 #ifdef LEGACY_BACKEND
1509 return (OperKind(gtOper) & GTK_ASGOP) != 0;
1511 return gtOper == GT_ASG;
1515 bool OperIsAssignment() const
1517 return OperIsAssignment(gtOper);
1520 static bool OperMayOverflow(genTreeOps gtOper)
1522 return ((gtOper == GT_ADD) || (gtOper == GT_SUB) || (gtOper == GT_MUL) || (gtOper == GT_CAST)
1523 #ifdef LEGACY_BACKEND
1524 || (gtOper == GT_ASG_ADD) || (gtOper == GT_ASG_SUB)
1525 #elif !defined(_TARGET_64BIT_)
1526 || (gtOper == GT_ADD_HI) || (gtOper == GT_SUB_HI)
1531 bool OperMayOverflow() const
1533 return OperMayOverflow(gtOper);
1536 static bool OperIsIndir(genTreeOps gtOper)
1538 return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK || OperIsBlk(gtOper);
1541 static bool OperIsIndirOrArrLength(genTreeOps gtOper)
1543 return OperIsIndir(gtOper) || (gtOper == GT_ARR_LENGTH);
1546 bool OperIsIndir() const
1548 return OperIsIndir(gtOper);
1551 bool OperIsIndirOrArrLength() const
1553 return OperIsIndirOrArrLength(gtOper);
1556 bool OperIsImplicitIndir() const;
1558 bool OperIsStore() const
1560 return OperIsStore(gtOper);
1563 static bool OperIsStore(genTreeOps gtOper)
1565 return (gtOper == GT_STOREIND || gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD ||
1566 gtOper == GT_STORE_BLK || gtOper == GT_STORE_OBJ || gtOper == GT_STORE_DYN_BLK);
1569 static bool OperIsAtomicOp(genTreeOps gtOper)
1571 return (gtOper == GT_XADD || gtOper == GT_XCHG || gtOper == GT_LOCKADD || gtOper == GT_CMPXCHG);
1574 bool OperIsAtomicOp() const
1576 return OperIsAtomicOp(gtOper);
1579 // This is here for cleaner FEATURE_SIMD #ifdefs.
1580 static bool OperIsSIMD(genTreeOps gtOper)
1583 return gtOper == GT_SIMD;
1584 #else // !FEATURE_SIMD
1586 #endif // !FEATURE_SIMD
1589 bool OperIsSIMD() const
1591 return OperIsSIMD(gtOper);
1594 static bool OperIsHWIntrinsic(genTreeOps gtOper)
1596 #ifdef FEATURE_HW_INTRINSICS
1597 return gtOper == GT_HWIntrinsic;
1600 #endif // FEATURE_HW_INTRINSICS
1603 bool OperIsHWIntrinsic() const
1605 return OperIsHWIntrinsic(gtOper);
1608 #ifdef FEATURE_HW_INTRINSICS
1609 inline bool OperIsSimdHWIntrinsic() const;
1611 inline bool OperIsSimdHWIntrinsic() const
1617 bool OperIsSIMDorSimdHWintrinsic() const
1619 return OperIsSIMD() || OperIsSimdHWIntrinsic();
1622 // This is here for cleaner GT_LONG #ifdefs.
1623 static bool OperIsLong(genTreeOps gtOper)
1625 #if defined(_TARGET_64BIT_) || defined(LEGACY_BACKEND)
1628 return gtOper == GT_LONG;
1632 bool OperIsLong() const
1634 return OperIsLong(gtOper);
1637 bool OperIsFieldListHead()
1639 return (gtOper == GT_FIELD_LIST) && ((gtFlags & GTF_FIELD_LIST_HEAD) != 0);
1642 bool OperIsConditionalJump() const
1644 return (gtOper == GT_JTRUE) || (gtOper == GT_JCMP) || (gtOper == GT_JCC);
1647 static bool OperIsBoundsCheck(genTreeOps op)
1649 if (op == GT_ARR_BOUNDS_CHECK)
1654 if (op == GT_SIMD_CHK)
1658 #endif // FEATURE_SIMD
1659 #ifdef FEATURE_HW_INTRINSICS
1660 if (op == GT_HW_INTRINSIC_CHK)
1664 #endif // FEATURE_HW_INTRINSICS
1668 bool OperIsBoundsCheck() const
1670 return OperIsBoundsCheck(OperGet());
1673 #ifdef LEGACY_BACKEND
1674 // Requires that "op" is an op= operator. Returns
1675 // the corresponding "op".
1676 static genTreeOps OpAsgToOper(genTreeOps op);
1680 bool NullOp1Legal() const
1682 assert(OperIsSimple(gtOper));
1689 #ifdef FEATURE_HW_INTRINSICS
1690 case GT_HWIntrinsic:
1691 #endif // FEATURE_HW_INTRINSICS
1694 return gtType == TYP_VOID;
1700 bool NullOp2Legal() const
1702 assert(OperIsSimple(gtOper) || OperIsBlk(gtOper));
1703 if (!OperIsBinary(gtOper))
1715 #endif // !FEATURE_SIMD
1717 #ifdef FEATURE_HW_INTRINSICS
1718 case GT_HWIntrinsic:
1719 #endif // FEATURE_HW_INTRINSICS
1721 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1723 #endif // !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
1730 static inline bool RequiresNonNullOp2(genTreeOps oper);
1731 bool IsValidCallArgument();
1734 inline bool IsFPZero();
1735 inline bool IsIntegralConst(ssize_t constVal);
1736 inline bool IsIntegralConstVector(ssize_t constVal);
1738 inline bool IsBoxedValue();
1740 inline bool IsSIMDEqualityOrInequality() const;
1742 static bool OperIsList(genTreeOps gtOper)
1744 return gtOper == GT_LIST;
1747 bool OperIsList() const
1749 return OperIsList(gtOper);
1752 static bool OperIsFieldList(genTreeOps gtOper)
1754 return gtOper == GT_FIELD_LIST;
1757 bool OperIsFieldList() const
1759 return OperIsFieldList(gtOper);
1762 static bool OperIsAnyList(genTreeOps gtOper)
1764 return OperIsList(gtOper) || OperIsFieldList(gtOper);
1767 bool OperIsAnyList() const
1769 return OperIsAnyList(gtOper);
1772 inline GenTree* MoveNext();
1774 inline GenTree* Current();
1776 inline GenTree** pCurrent();
1778 inline GenTree* gtGetOp1() const;
1780 // Directly return op2. Asserts the node is binary. Might return nullptr if the binary node allows
1781 // a nullptr op2, such as GT_LIST. This is more efficient than gtGetOp2IfPresent() if you know what
1782 // node type you have.
1783 inline GenTree* gtGetOp2() const;
1785 // The returned pointer might be nullptr if the node is not binary, or if non-null op2 is not required.
1786 inline GenTree* gtGetOp2IfPresent() const;
1788 // Given a tree node, if this is a child of that node, return the pointer to the child node so that it
1789 // can be modified; otherwise, return null.
1790 GenTree** gtGetChildPointer(GenTree* parent) const;
1792 // Given a tree node, if this node uses that node, return the use as an out parameter and return true.
1793 // Otherwise, return false.
1794 bool TryGetUse(GenTree* def, GenTree*** use);
1797 bool TryGetUseList(GenTree* def, GenTree*** use);
1799 bool TryGetUseBinOp(GenTree* def, GenTree*** use);
1802 // Get the parent of this node, and optionally capture the pointer to the child so that it can be modified.
1803 GenTree* gtGetParent(GenTree*** parentChildPtrPtr) const;
1805 void ReplaceOperand(GenTree** useEdge, GenTree* replacement);
1807 inline GenTree* gtEffectiveVal(bool commaOnly = false);
1809 // Tunnel through any GT_RET_EXPRs
1810 inline GenTree* gtRetExprVal();
1812 // Return the child of this node if it is a GT_RELOAD or GT_COPY; otherwise simply return the node itself
1813 inline GenTree* gtSkipReloadOrCopy();
1815 // Returns true if it is a call node returning its value in more than one register
1816 inline bool IsMultiRegCall() const;
1818 // Returns true if it is a node returning its value in more than one register
1819 inline bool IsMultiRegNode() const;
1821 // Returns true if it is a GT_COPY or GT_RELOAD node
1822 inline bool IsCopyOrReload() const;
1824 // Returns true if it is a GT_COPY or GT_RELOAD of a multi-reg call node
1825 inline bool IsCopyOrReloadOfMultiRegCall() const;
1827 bool OperRequiresAsgFlag();
1829 bool OperMayThrow(Compiler* comp);
1831 unsigned GetScaleIndexMul();
1832 unsigned GetScaleIndexShf();
1833 unsigned GetScaledIndex();
1835 // Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer
1836 // (<= 32 bit) constant. If it returns true, it sets "*offset" to (one of the) constant value(s), and
1837 // "*addr" to the other argument.
1838 bool IsAddWithI32Const(GenTree** addr, int* offset);
1841 #if SMALL_TREE_NODES
1842 static unsigned char s_gtNodeSizes[];
1843 #if NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
1844 static unsigned char s_gtTrueSizes[];
1847 static LONG s_gtNodeCounts[];
1849 #endif // SMALL_TREE_NODES
1851 static void InitNodeSize();
1853 size_t GetNodeSize() const;
1855 bool IsNodeProperlySized() const;
1857 void ReplaceWith(GenTree* src, Compiler* comp);
1859 static genTreeOps ReverseRelop(genTreeOps relop);
1861 static genTreeOps SwapRelop(genTreeOps relop);
1863 //---------------------------------------------------------------------
1865 static bool Compare(GenTree* op1, GenTree* op2, bool swapOK = false);
1867 //---------------------------------------------------------------------
1869 #if defined(DEBUG) || NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
1870 static const char* OpName(genTreeOps op);
1873 #if MEASURE_NODE_SIZE && SMALL_TREE_NODES
1874 static const char* OpStructName(genTreeOps op);
1877 //---------------------------------------------------------------------
1879 bool IsNothingNode() const;
1882 // Value number update action enumeration
1883 enum ValueNumberUpdate
1885 CLEAR_VN, // Clear value number
1886 PRESERVE_VN // Preserve value number
1889 void SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN); // set gtOper
1890 void SetOperResetFlags(genTreeOps oper); // set gtOper and reset flags
1892 void ChangeOperConst(genTreeOps oper); // ChangeOper(constOper)
1893 // set gtOper and only keep GTF_COMMON_MASK flags
1894 void ChangeOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN);
1895 void ChangeOperUnchecked(genTreeOps oper);
1896 void SetOperRaw(genTreeOps oper);
1898 void ChangeType(var_types newType)
1900 var_types oldType = gtType;
1902 GenTree* node = this;
1903 while (node->gtOper == GT_COMMA)
1905 node = node->gtGetOp2();
1906 assert(node->gtType == oldType);
1907 node->gtType = newType;
1911 #if SMALL_TREE_NODES
1913 static void RecordOperBashing(genTreeOps operOld, genTreeOps operNew);
1914 static void ReportOperBashing(FILE* fp);
1916 static void RecordOperBashing(genTreeOps operOld, genTreeOps operNew)
1919 static void ReportOperBashing(FILE* fp)
1925 bool IsLocal() const
1927 return OperIsLocal(OperGet());
1930 // Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
1931 // is not the same size as the type of the GT_LCL_VAR.
1932 bool IsPartialLclFld(Compiler* comp);
1934 // Returns "true" iff "this" defines a local variable. Requires "comp" to be the
1935 // current compilation. If returns "true", sets "*pLclVarTree" to the
1936 // tree for the local that is defined, and, if "pIsEntire" is non-null, sets "*pIsEntire" to
1937 // true or false, depending on whether the assignment writes to the entirety of the local
1938 // variable, or just a portion of it.
1939 bool DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire = nullptr);
1941 // Returns true if "this" represents the address of a local, or a field of a local. If returns true, sets
1942 // "*pLclVarTree" to the node indicating the local variable. If the address is that of a field of this node,
1943 // sets "*pFldSeq" to the field sequence representing that field, else null.
1944 bool IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq);
1946 // Simpler variant of the above which just returns the local node if this is an expression that
1947 // yields an address into a local
1948 GenTreeLclVarCommon* IsLocalAddrExpr();
1950 // Determine if this is a LclVarCommon node and return some additional info about it in the
1951 // two out parameters.
1952 bool IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq);
1954 // Determine whether this is an assignment tree of the form X = X (op) Y,
1955 // where Y is an arbitrary tree, and X is a lclVar.
1956 unsigned IsLclVarUpdateTree(GenTree** otherTree, genTreeOps* updateOper);
1958 // If returns "true", "this" may represent the address of a static or instance field
1959 // (or a field of such a field, in the case of an object field of type struct).
1960 // If returns "true", then either "*pObj" is set to the object reference,
1961 // or "*pStatic" is set to the baseAddr or offset to be added to the "*pFldSeq"
1962 // Only one of "*pObj" or "*pStatic" will be set, the other one will be null.
1963 // The boolean return value only indicates that "this" *may* be a field address
1964 // -- the field sequence must also be checked.
1965 // If it is a field address, the field sequence will be a sequence of length >= 1,
1966 // starting with an instance or static field, and optionally continuing with struct fields.
1967 bool IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq);
1969 // Requires "this" to be the address of an array (the child of a GT_IND labeled with GTF_IND_ARR_INDEX).
1970 // Sets "pArr" to the node representing the array (either an array object pointer, or perhaps a byref to the some
1972 // Sets "*pArrayType" to the class handle for the array type.
1973 // Sets "*inxVN" to the value number inferred for the array index.
1974 // Sets "*pFldSeq" to the sequence, if any, of struct fields used to index into the array element.
1975 void ParseArrayAddress(
1976 Compiler* comp, struct ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq);
1978 // Helper method for the above.
1979 void ParseArrayAddressWork(
1980 Compiler* comp, ssize_t inputMul, GenTree** pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq);
1982 // Requires "this" to be a GT_IND. Requires the outermost caller to set "*pFldSeq" to nullptr.
1983 // Returns true if it is an array index expression, or access to a (sequence of) struct field(s)
1984 // within a struct array element. If it returns true, sets *arrayInfo to the array information, and sets *pFldSeq
1985 // to the sequence of struct field accesses.
1986 bool ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq);
1988 // Requires "this" to be the address of a (possible) array element (or struct field within that).
1989 // If it is, sets "*arrayInfo" to the array access info, "*pFldSeq" to the sequence of struct fields
1990 // accessed within the array element, and returns true. If not, returns "false".
1991 bool ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq);
1993 // Requires "this" to be an int expression. If it is a sequence of one or more integer constants added together,
1994 // returns true and sets "*pFldSeq" to the sequence of fields with which those constants are annotated.
1995 bool ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq);
1997 // Labels "*this" as an array index expression: label all constants and variables that could contribute, as part of
1998 // an affine expression, to the value of the of the index.
1999 void LabelIndex(Compiler* comp, bool isConst = true);
2001 // Assumes that "this" occurs in a context where it is being dereferenced as the LHS of an assignment-like
2002 // statement (assignment, initblk, or copyblk). The "width" should be the number of bytes copied by the
2003 // operation. Returns "true" if "this" is an address of (or within)
2004 // a local variable; sets "*pLclVarTree" to that local variable instance; and, if "pIsEntire" is non-null,
2005 // sets "*pIsEntire" to true if this assignment writes the full width of the local.
2006 bool DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire);
2008 #ifdef LEGACY_BACKEND
2009 bool IsRegVar() const
2011 return OperGet() == GT_REG_VAR ? true : false;
2015 return (gtFlags & GTF_REG_VAL) ? true : false;
2017 void SetInReg(bool value = true)
2021 gtFlags |= GTF_REG_VAL;
2025 gtFlags &= ~GTF_REG_VAL;
2028 regNumber GetReg() const
2030 return InReg() ? gtRegNum : REG_NA;
2033 #else // !LEGACY_BACKEND
2034 // For the non-legacy backend, these are only used for dumping.
2035 // The gtRegNum is only valid in LIR, but the dumping methods are not easily
2036 // modified to check this.
2037 CLANG_FORMAT_COMMENT_ANCHOR;
2041 return (GetRegTag() != GT_REGTAG_NONE) ? true : false;
2043 regNumber GetReg() const
2045 return (GetRegTag() != GT_REGTAG_NONE) ? gtRegNum : REG_NA;
2049 static bool IsContained(unsigned flags)
2051 return ((flags & GTF_CONTAINED) != 0);
2057 gtFlags |= GTF_CONTAINED;
2058 assert(isContained());
2061 void ClearContained()
2064 gtFlags &= ~GTF_CONTAINED;
2068 #endif // !LEGACY_BACKEND
2070 bool IsRegVarDeath() const
2072 assert(OperGet() == GT_REG_VAR);
2073 return (gtFlags & GTF_VAR_DEATH) ? true : false;
2075 bool IsRegVarBirth() const
2077 assert(OperGet() == GT_REG_VAR);
2078 return (gtFlags & GTF_REG_BIRTH) ? true : false;
2080 bool IsReverseOp() const
2082 return (gtFlags & GTF_REVERSE_OPS) ? true : false;
2084 bool IsUnsigned() const
2086 return ((gtFlags & GTF_UNSIGNED) != 0);
2089 inline bool IsCnsIntOrI() const;
2091 inline bool IsIntegralConst() const;
2093 inline bool IsIntCnsFitsInI32(); // Constant fits in INT32
2095 inline bool IsCnsFltOrDbl() const;
2097 inline bool IsCnsNonZeroFltOrDbl();
2099 bool IsIconHandle() const
2101 assert(gtOper == GT_CNS_INT);
2102 return (gtFlags & GTF_ICON_HDL_MASK) ? true : false;
2105 bool IsIconHandle(unsigned handleType) const
2107 assert(gtOper == GT_CNS_INT);
2108 assert((handleType & GTF_ICON_HDL_MASK) != 0); // check that handleType is one of the valid GTF_ICON_* values
2109 assert((handleType & ~GTF_ICON_HDL_MASK) == 0);
2110 return (gtFlags & GTF_ICON_HDL_MASK) == handleType;
2113 // Return just the part of the flags corresponding to the GTF_ICON_*_HDL flag. For example,
2114 // GTF_ICON_SCOPE_HDL. The tree node must be a const int, but it might not be a handle, in which
2115 // case we'll return zero.
2116 unsigned GetIconHandleFlag() const
2118 assert(gtOper == GT_CNS_INT);
2119 return (gtFlags & GTF_ICON_HDL_MASK);
2122 // Mark this node as no longer being a handle; clear its GTF_ICON_*_HDL bits.
2123 void ClearIconHandleMask()
2125 assert(gtOper == GT_CNS_INT);
2126 gtFlags &= ~GTF_ICON_HDL_MASK;
2129 // Return true if the two GT_CNS_INT trees have the same handle flag (GTF_ICON_*_HDL).
2130 static bool SameIconHandleFlag(GenTree* t1, GenTree* t2)
2132 return t1->GetIconHandleFlag() == t2->GetIconHandleFlag();
2135 bool IsArgPlaceHolderNode() const
2137 return OperGet() == GT_ARGPLACE;
2141 return OperGet() == GT_CALL;
2143 bool IsStatement() const
2145 return OperGet() == GT_STMT;
2147 inline bool IsHelperCall();
2149 bool IsVarAddr() const;
2150 bool gtOverflow() const;
2151 bool gtOverflowEx() const;
2152 bool gtSetFlags() const;
2153 bool gtRequestSetFlags();
2155 #ifdef LEGACY_BACKEND
2156 // Returns true if the codegen of this tree node
2157 // sets ZF and SF flags.
2158 bool gtSetZSFlags() const
2160 return (gtFlags & GTF_ZSF_SET) != 0;
2165 bool gtIsValid64RsltMul();
2166 static int gtDispFlags(unsigned flags, unsigned debugFlags);
2170 inline var_types CastFromType();
2171 inline var_types& CastToType();
2173 // Returns "true" iff "this" is a phi-related node (i.e. a GT_PHI_ARG, GT_PHI, or a PhiDefn).
2176 // Returns "true" iff "*this" is an assignment (GT_ASG) tree that defines an SSA name (lcl = phi(...));
2179 // Returns "true" iff "*this" is a statement containing an assignment that defines an SSA name (lcl = phi(...));
2180 bool IsPhiDefnStmt();
2182 // Can't use an assignment operator, because we need the extra "comp" argument
2183 // (to provide the allocator necessary for the VarSet assignment).
2184 // TODO-Cleanup: Not really needed now, w/o liveset on tree nodes
2185 void CopyTo(class Compiler* comp, const GenTree& gt);
2187 // Like the above, excepts assumes copying from small node to small node.
2188 // (Following the code it replaces, it does *not* copy the GenTree fields,
2189 // which CopyTo does.)
2190 void CopyToSmall(const GenTree& gt);
2192 // Because of the fact that we hid the assignment operator of "BitSet" (in DEBUG),
2193 // we can't synthesize an assignment operator.
2194 // TODO-Cleanup: Could change this w/o liveset on tree nodes
2195 // (This is also necessary for the VTable trick.)
2200 // Returns the number of children of the current node.
2201 unsigned NumChildren();
2203 // Requires "childNum < NumChildren()". Returns the "n"th child of "this."
2204 GenTree* GetChild(unsigned childNum);
2206 // Returns an iterator that will produce the use edge to each operand of this node. Differs
2207 // from the sequence of nodes produced by a loop over `GetChild` in its handling of call, phi,
2208 // and block op nodes.
2209 GenTreeUseEdgeIterator UseEdgesBegin();
2210 GenTreeUseEdgeIterator UseEdgesEnd();
2212 IteratorPair<GenTreeUseEdgeIterator> UseEdges();
2214 // Returns an iterator that will produce each operand of this node. Differs from the sequence
2215 // of nodes produced by a loop over `GetChild` in its handling of call, phi, and block op
2217 GenTreeOperandIterator OperandsBegin();
2218 GenTreeOperandIterator OperandsEnd();
2220 // Returns a range that will produce the operands of this node in use order.
2221 IteratorPair<GenTreeOperandIterator> Operands();
2223 enum class VisitResult
2229 // Visits each operand of this node. The operand must be either a lambda, function, or functor with the signature
2230 // `GenTree::VisitResult VisitorFunction(GenTree* operand)`. Here is a simple example:
2232 // unsigned operandCount = 0;
2233 // node->VisitOperands([&](GenTree* operand) -> GenTree::VisitResult)
2236 // return GenTree::VisitResult::Continue;
2239 // This function is generally more efficient that the operand iterator and should be preferred over that API for
2240 // hot code, as it affords better opportunities for inlining and acheives shorter dynamic path lengths when
2241 // deciding how operands need to be accessed.
2243 // Note that this function does not respect `GTF_REVERSE_OPS` and `gtEvalSizeFirst`. This is always safe in LIR,
2244 // but may be dangerous in HIR if for some reason you need to visit operands in the order in which they will
2246 template <typename TVisitor>
2247 void VisitOperands(TVisitor visitor);
2250 template <typename TVisitor>
2251 VisitResult VisitListOperands(TVisitor visitor);
2253 template <typename TVisitor>
2254 void VisitBinOpOperands(TVisitor visitor);
2257 bool Precedes(GenTree* other);
2259 // The maximum possible # of children of any node.
2260 static const int MAX_CHILDREN = 6;
2262 bool IsReuseRegVal() const
2264 // This can be extended to non-constant nodes, but not to local or indir nodes.
2265 if (OperIsConst() && ((gtFlags & GTF_REUSE_REG_VAL) != 0))
2271 void SetReuseRegVal()
2273 assert(OperIsConst());
2274 gtFlags |= GTF_REUSE_REG_VAL;
2276 void ResetReuseRegVal()
2278 assert(OperIsConst());
2279 gtFlags &= ~GTF_REUSE_REG_VAL;
2282 void SetIndirExceptionFlags(Compiler* comp)
2284 assert(OperIsIndirOrArrLength());
2285 gtFlags |= OperMayThrow(comp) ? GTF_EXCEPT : GTF_IND_NONFAULTING;
2288 #if MEASURE_NODE_SIZE
2289 static void DumpNodeSizes(FILE* fp);
2295 GenTree& operator=(const GenTree& gt)
2297 assert(!"Don't copy");
2302 #if DEBUGGABLE_GENTREE
2303 // In DEBUG builds, add a dummy virtual method, to give the debugger run-time type information.
2304 virtual void DummyVirt()
2308 typedef void* VtablePtr;
2310 VtablePtr GetVtableForOper(genTreeOps oper);
2311 void SetVtableForOper(genTreeOps oper);
2313 static VtablePtr s_vtablesForOpers[GT_COUNT];
2314 static VtablePtr s_vtableForOp;
2315 #endif // DEBUGGABLE_GENTREE
2318 inline void* operator new(size_t sz, class Compiler*, genTreeOps oper);
2320 inline GenTree(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false));
2323 //------------------------------------------------------------------------
2324 // GenTreeUseEdgeIterator: an iterator that will produce each use edge of a GenTree node in the order in which
2327 // The use edges of a node may not correspond exactly to the nodes on the other ends of its use edges: in
2328 // particular, GT_LIST nodes are expanded into their component parts. This differs from the behavior of
2329 // GenTree::GetChildPointer(), which does not expand lists.
2331 // Operand iteration is common enough in the back end of the compiler that the implementation of this type has
2332 // traded some simplicity for speed:
2333 // - As much work as is reasonable is done in the constructor rather than during operand iteration
2334 // - Node-specific functionality is handled by a small class of "advance" functions called by operator++
2335 // rather than making operator++ itself handle all nodes
2336 // - Some specialization has been performed for specific node types/shapes (e.g. the advance function for
2337 // binary nodes is specialized based on whether or not the node has the GTF_REVERSE_OPS flag set)
2339 // Valid values of this type may be obtained by calling `GenTree::UseEdgesBegin` and `GenTree::UseEdgesEnd`.
2341 class GenTreeUseEdgeIterator final
2343 friend class GenTreeOperandIterator;
2344 friend GenTreeUseEdgeIterator GenTree::UseEdgesBegin();
2345 friend GenTreeUseEdgeIterator GenTree::UseEdgesEnd();
2352 CALL_CONTROL_EXPR = 3,
2358 typedef void (GenTreeUseEdgeIterator::*AdvanceFn)();
2360 AdvanceFn m_advance;
2366 GenTreeUseEdgeIterator(GenTree* node);
2368 // Advance functions for special nodes
2369 void AdvanceCmpXchg();
2370 void AdvanceBoundsChk();
2371 void AdvanceArrElem();
2372 void AdvanceArrOffset();
2373 void AdvanceDynBlk();
2374 void AdvanceStoreDynBlk();
2376 template <bool ReverseOperands>
2377 void AdvanceBinOp();
2378 void SetEntryStateForBinOp();
2380 // An advance function for list-like nodes (Phi, SIMDIntrinsicInitN, FieldList)
2382 void SetEntryStateForList(GenTree* list);
2384 // The advance function for call nodes
2385 template <int state>
2391 GenTreeUseEdgeIterator();
2393 inline GenTree** operator*()
2395 assert(m_state != -1);
2399 inline GenTree** operator->()
2401 assert(m_state != -1);
2405 inline bool operator==(const GenTreeUseEdgeIterator& other) const
2407 if (m_state == -1 || other.m_state == -1)
2409 return m_state == other.m_state;
2412 return (m_node == other.m_node) && (m_edge == other.m_edge) && (m_argList == other.m_argList) &&
2413 (m_state == other.m_state);
2416 inline bool operator!=(const GenTreeUseEdgeIterator& other) const
2418 return !(operator==(other));
2421 GenTreeUseEdgeIterator& operator++();
2424 //------------------------------------------------------------------------
2425 // GenTreeOperandIterator: an iterator that will produce each operand of a
2426 // GenTree node in the order in which they are
2427 // used. This uses `GenTreeUseEdgeIterator` under
2428 // the covers and comes with the same caveats
2429 // w.r.t. `GetChild`.
2431 // Note: valid values of this type may be obtained by calling
2432 // `GenTree::OperandsBegin` and `GenTree::OperandsEnd`.
2433 class GenTreeOperandIterator final
2435 friend GenTreeOperandIterator GenTree::OperandsBegin();
2436 friend GenTreeOperandIterator GenTree::OperandsEnd();
2438 GenTreeUseEdgeIterator m_useEdges;
2440 GenTreeOperandIterator(GenTree* node) : m_useEdges(node)
2445 GenTreeOperandIterator() : m_useEdges()
2449 inline GenTree* operator*()
2451 return *(*m_useEdges);
2454 inline GenTree* operator->()
2456 return *(*m_useEdges);
2459 inline bool operator==(const GenTreeOperandIterator& other) const
2461 return m_useEdges == other.m_useEdges;
2464 inline bool operator!=(const GenTreeOperandIterator& other) const
2466 return !(operator==(other));
2469 inline GenTreeOperandIterator& operator++()
2476 /*****************************************************************************/
2477 // In the current design, we never instantiate GenTreeUnOp: it exists only to be
2478 // used as a base class. For unary operators, we instantiate GenTreeOp, with a NULL second
2479 // argument. We check that this is true dynamically. We could tighten this and get static
2480 // checking, but that would entail accessing the first child of a unary operator via something
2481 // like gtUnOp.gtOp1 instead of gtOp.gtOp1.
2482 struct GenTreeUnOp : public GenTree
2487 GenTreeUnOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
2488 : GenTree(oper, type DEBUGARG(largeNode)), gtOp1(nullptr)
2492 GenTreeUnOp(genTreeOps oper, var_types type, GenTree* op1 DEBUGARG(bool largeNode = false))
2493 : GenTree(oper, type DEBUGARG(largeNode)), gtOp1(op1)
2495 assert(op1 != nullptr || NullOp1Legal());
2497 { // Propagate effects flags from child.
2498 gtFlags |= op1->gtFlags & GTF_ALL_EFFECT;
2502 #if DEBUGGABLE_GENTREE
2503 GenTreeUnOp() : GenTree(), gtOp1(nullptr)
2509 struct GenTreeOp : public GenTreeUnOp
2513 GenTreeOp(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2 DEBUGARG(bool largeNode = false))
2514 : GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode)), gtOp2(op2)
2516 // comparisons are always integral types
2517 assert(!GenTree::OperIsCompare(oper) || varTypeIsIntegral(type));
2518 // Binary operators, with a few exceptions, require a non-nullptr
2520 assert(op2 != nullptr || NullOp2Legal());
2521 // Unary operators, on the other hand, require a null second argument.
2522 assert(!OperIsUnary(oper) || op2 == nullptr);
2523 // Propagate effects flags from child. (UnOp handled this for first child.)
2526 gtFlags |= op2->gtFlags & GTF_ALL_EFFECT;
2530 // A small set of types are unary operators with optional arguments. We use
2531 // this constructor to build those.
2532 GenTreeOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
2533 : GenTreeUnOp(oper, type DEBUGARG(largeNode)), gtOp2(nullptr)
2535 // Unary operators with optional arguments:
2536 assert(oper == GT_NOP || oper == GT_RETURN || oper == GT_RETFILT || OperIsBlk(oper));
2539 #if DEBUGGABLE_GENTREE
2540 GenTreeOp() : GenTreeUnOp(), gtOp2(nullptr)
2546 struct GenTreeVal : public GenTree
2550 GenTreeVal(genTreeOps oper, var_types type, ssize_t val) : GenTree(oper, type), gtVal1(val)
2553 #if DEBUGGABLE_GENTREE
2554 GenTreeVal() : GenTree()
2560 struct GenTreeIntConCommon : public GenTree
2562 inline INT64 LngValue();
2563 inline void SetLngValue(INT64 val);
2564 inline ssize_t IconValue();
2565 inline void SetIconValue(ssize_t val);
2566 inline INT64 IntegralValue();
2568 GenTreeIntConCommon(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
2569 : GenTree(oper, type DEBUGARG(largeNode))
2573 bool FitsInI8() // IconValue() fits into 8-bit signed storage
2575 return FitsInI8(IconValue());
2578 static bool FitsInI8(ssize_t val) // Constant fits into 8-bit signed storage
2580 return (int8_t)val == val;
2583 bool FitsInI32() // IconValue() fits into 32-bit signed storage
2585 return FitsInI32(IconValue());
2588 static bool FitsInI32(ssize_t val) // Constant fits into 32-bit signed storage
2590 #ifdef _TARGET_64BIT_
2591 return (int32_t)val == val;
2597 bool ImmedValNeedsReloc(Compiler* comp);
2598 bool ImmedValCanBeFolded(Compiler* comp, genTreeOps op);
2600 #ifdef _TARGET_XARCH_
2601 bool FitsInAddrBase(Compiler* comp);
2602 bool AddrNeedsReloc(Compiler* comp);
2605 #if DEBUGGABLE_GENTREE
2606 GenTreeIntConCommon() : GenTree()
2612 // node representing a read from a physical register
2613 struct GenTreePhysReg : public GenTree
2615 // physregs need a field beyond gtRegNum because
2616 // gtRegNum indicates the destination (and can be changed)
2617 // whereas reg indicates the source
2619 GenTreePhysReg(regNumber r, var_types type = TYP_I_IMPL) : GenTree(GT_PHYSREG, type), gtSrcReg(r)
2622 #if DEBUGGABLE_GENTREE
2623 GenTreePhysReg() : GenTree()
2629 #ifndef LEGACY_BACKEND
2630 // gtJumpTable - Switch Jump Table
2632 // This node stores a DWORD constant that represents the
2633 // absolute address of a jump table for switches. The code
2634 // generator uses this table to code the destination for every case
2635 // in an array of addresses which starting position is stored in
2637 struct GenTreeJumpTable : public GenTreeIntConCommon
2639 ssize_t gtJumpTableAddr;
2641 GenTreeJumpTable(var_types type DEBUGARG(bool largeNode = false))
2642 : GenTreeIntConCommon(GT_JMPTABLE, type DEBUGARG(largeNode))
2645 #if DEBUGGABLE_GENTREE
2646 GenTreeJumpTable() : GenTreeIntConCommon()
2651 #endif // !LEGACY_BACKEND
2653 /* gtIntCon -- integer constant (GT_CNS_INT) */
2654 struct GenTreeIntCon : public GenTreeIntConCommon
2657 * This is the GT_CNS_INT struct definition.
2658 * It's used to hold for both int constants and pointer handle constants.
2659 * For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
2660 * For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
2661 * In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
2663 ssize_t gtIconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeLngCon below.
2665 /* The InitializeArray intrinsic needs to go back to the newarray statement
2666 to find the class handle of the array so that we can get its size. However,
2667 in ngen mode, the handle in that statement does not correspond to the compile
2668 time handle (rather it lets you get a handle at run-time). In that case, we also
2669 need to store a compile time handle, which goes in this gtCompileTimeHandle field.
2671 ssize_t gtCompileTimeHandle;
2673 // TODO-Cleanup: It's not clear what characterizes the cases where the field
2674 // above is used. It may be that its uses and those of the "gtFieldSeq" field below
2675 // are mutually exclusive, and they could be put in a union. Or else we should separate
2676 // this type into three subtypes.
2678 // If this constant represents the offset of one or more fields, "gtFieldSeq" represents that
2679 // sequence of fields.
2680 FieldSeqNode* gtFieldSeq;
2682 GenTreeIntCon(var_types type, ssize_t value DEBUGARG(bool largeNode = false))
2683 : GenTreeIntConCommon(GT_CNS_INT, type DEBUGARG(largeNode))
2685 , gtCompileTimeHandle(0)
2686 , gtFieldSeq(FieldSeqStore::NotAField())
2690 GenTreeIntCon(var_types type, ssize_t value, FieldSeqNode* fields DEBUGARG(bool largeNode = false))
2691 : GenTreeIntConCommon(GT_CNS_INT, type DEBUGARG(largeNode))
2693 , gtCompileTimeHandle(0)
2694 , gtFieldSeq(fields)
2696 assert(fields != nullptr);
2699 void FixupInitBlkValue(var_types asgType);
2701 #ifdef _TARGET_64BIT_
2702 void TruncateOrSignExtend32()
2704 if (gtFlags & GTF_UNSIGNED)
2706 gtIconVal = UINT32(gtIconVal);
2710 gtIconVal = INT32(gtIconVal);
2713 #endif // _TARGET_64BIT_
2715 #if DEBUGGABLE_GENTREE
2716 GenTreeIntCon() : GenTreeIntConCommon()
2722 /* gtLngCon -- long constant (GT_CNS_LNG) */
2724 struct GenTreeLngCon : public GenTreeIntConCommon
2726 INT64 gtLconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeIntCon above.
2729 return (INT32)(gtLconVal & 0xffffffff);
2734 return (INT32)(gtLconVal >> 32);
2737 GenTreeLngCon(INT64 val) : GenTreeIntConCommon(GT_CNS_NATIVELONG, TYP_LONG)
2741 #if DEBUGGABLE_GENTREE
2742 GenTreeLngCon() : GenTreeIntConCommon()
2748 inline INT64 GenTreeIntConCommon::LngValue()
2750 #ifndef _TARGET_64BIT_
2751 assert(gtOper == GT_CNS_LNG);
2752 return AsLngCon()->gtLconVal;
2758 inline void GenTreeIntConCommon::SetLngValue(INT64 val)
2760 #ifndef _TARGET_64BIT_
2761 assert(gtOper == GT_CNS_LNG);
2762 AsLngCon()->gtLconVal = val;
2764 // Compile time asserts that these two fields overlap and have the same offsets: gtIconVal and gtLconVal
2765 C_ASSERT(offsetof(GenTreeLngCon, gtLconVal) == offsetof(GenTreeIntCon, gtIconVal));
2766 C_ASSERT(sizeof(AsLngCon()->gtLconVal) == sizeof(AsIntCon()->gtIconVal));
2768 SetIconValue(ssize_t(val));
2772 inline ssize_t GenTreeIntConCommon::IconValue()
2774 assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target!
2775 return AsIntCon()->gtIconVal;
2778 inline void GenTreeIntConCommon::SetIconValue(ssize_t val)
2780 assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target!
2781 AsIntCon()->gtIconVal = val;
2784 inline INT64 GenTreeIntConCommon::IntegralValue()
2786 #ifdef _TARGET_64BIT_
2789 return gtOper == GT_CNS_LNG ? LngValue() : (INT64)IconValue();
2790 #endif // _TARGET_64BIT_
2793 /* gtDblCon -- double constant (GT_CNS_DBL) */
2795 struct GenTreeDblCon : public GenTree
2799 bool isBitwiseEqual(GenTreeDblCon* other)
2801 unsigned __int64 bits = *(unsigned __int64*)(>DconVal);
2802 unsigned __int64 otherBits = *(unsigned __int64*)(&(other->gtDconVal));
2803 return (bits == otherBits);
2806 GenTreeDblCon(double val) : GenTree(GT_CNS_DBL, TYP_DOUBLE), gtDconVal(val)
2809 #if DEBUGGABLE_GENTREE
2810 GenTreeDblCon() : GenTree()
2816 /* gtStrCon -- string constant (GT_CNS_STR) */
2818 struct GenTreeStrCon : public GenTree
2821 CORINFO_MODULE_HANDLE gtScpHnd;
2823 // Because this node can come from an inlined method we need to
2824 // have the scope handle, since it will become a helper call.
2825 GenTreeStrCon(unsigned sconCPX, CORINFO_MODULE_HANDLE mod DEBUGARG(bool largeNode = false))
2826 : GenTree(GT_CNS_STR, TYP_REF DEBUGARG(largeNode)), gtSconCPX(sconCPX), gtScpHnd(mod)
2829 #if DEBUGGABLE_GENTREE
2830 GenTreeStrCon() : GenTree()
2836 // Common supertype of LCL_VAR, LCL_FLD, REG_VAR, PHI_ARG
2837 // This inherits from UnOp because lclvar stores are Unops
2838 struct GenTreeLclVarCommon : public GenTreeUnOp
2841 unsigned _gtLclNum; // The local number. An index into the Compiler::lvaTable array.
2842 unsigned _gtSsaNum; // The SSA number.
2845 GenTreeLclVarCommon(genTreeOps oper, var_types type, unsigned lclNum DEBUGARG(bool largeNode = false))
2846 : GenTreeUnOp(oper, type DEBUGARG(largeNode))
2851 unsigned GetLclNum() const
2855 __declspec(property(get = GetLclNum)) unsigned gtLclNum;
2857 void SetLclNum(unsigned lclNum)
2860 _gtSsaNum = SsaConfig::RESERVED_SSA_NUM;
2863 unsigned GetSsaNum() const
2867 __declspec(property(get = GetSsaNum)) unsigned gtSsaNum;
2869 void SetSsaNum(unsigned ssaNum)
2876 return (gtSsaNum != SsaConfig::RESERVED_SSA_NUM);
2879 #if DEBUGGABLE_GENTREE
2880 GenTreeLclVarCommon() : GenTreeUnOp()
2886 // gtLclVar -- load/store/addr of local variable
2888 struct GenTreeLclVar : public GenTreeLclVarCommon
2890 IL_OFFSET gtLclILoffs; // instr offset of ref (only for debug info)
2892 GenTreeLclVar(var_types type, unsigned lclNum, IL_OFFSET ilOffs DEBUGARG(bool largeNode = false))
2893 : GenTreeLclVarCommon(GT_LCL_VAR, type, lclNum DEBUGARG(largeNode)), gtLclILoffs(ilOffs)
2897 GenTreeLclVar(genTreeOps oper, var_types type, unsigned lclNum, IL_OFFSET ilOffs DEBUGARG(bool largeNode = false))
2898 : GenTreeLclVarCommon(oper, type, lclNum DEBUGARG(largeNode)), gtLclILoffs(ilOffs)
2900 assert(OperIsLocal(oper) || OperIsLocalAddr(oper));
2903 #if DEBUGGABLE_GENTREE
2904 GenTreeLclVar() : GenTreeLclVarCommon()
2910 // gtLclFld -- load/store/addr of local variable field
2912 struct GenTreeLclFld : public GenTreeLclVarCommon
2914 unsigned gtLclOffs; // offset into the variable to access
2916 FieldSeqNode* gtFieldSeq; // This LclFld node represents some sequences of accesses.
2918 // old/FE style constructor where load/store/addr share same opcode
2919 GenTreeLclFld(var_types type, unsigned lclNum, unsigned lclOffs)
2920 : GenTreeLclVarCommon(GT_LCL_FLD, type, lclNum), gtLclOffs(lclOffs), gtFieldSeq(nullptr)
2922 assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]);
2925 GenTreeLclFld(genTreeOps oper, var_types type, unsigned lclNum, unsigned lclOffs)
2926 : GenTreeLclVarCommon(oper, type, lclNum), gtLclOffs(lclOffs), gtFieldSeq(nullptr)
2928 assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]);
2930 #if DEBUGGABLE_GENTREE
2931 GenTreeLclFld() : GenTreeLclVarCommon()
2937 struct GenTreeRegVar : public GenTreeLclVarCommon
2939 // TODO-Cleanup: Note that the base class GenTree already has a gtRegNum field.
2940 // It's not clear exactly why a GT_REG_VAR has a separate field. When
2941 // GT_REG_VAR is created, the two are identical. It appears that they may
2942 // or may not remain so. In particular, there is a comment in stackfp.cpp
2945 // There used to be an assertion: assert(src->gtRegNum == src->gtRegVar.gtRegNum, ...)
2946 // here, but there's actually no reason to assume that. AFAICT, for FP vars under stack FP,
2947 // src->gtRegVar.gtRegNum is the allocated stack pseudo-register, but src->gtRegNum is the
2948 // FP stack position into which that is loaded to represent a particular use of the variable.
2950 // It might be the case that only for stackfp do they ever differ.
2952 // The following might be possible: the GT_REG_VAR node has a last use prior to a complex
2953 // subtree being evaluated. It could then be spilled from the register. Later,
2954 // it could be unspilled into a different register, which would be recorded at
2955 // the unspill time in the GenTree::gtRegNum, whereas GenTreeRegVar::gtRegNum
2956 // is left alone. It's not clear why that is useful.
2958 // Assuming there is a particular use, like stack fp, that requires it, maybe we
2959 // can get rid of GT_REG_VAR and just leave it as GT_LCL_VAR, using the base class gtRegNum field.
2960 // If we need it for stackfp, we could add a GenTreeStackFPRegVar type, which carries both the
2961 // pieces of information, in a clearer and more specific way (in particular, with
2962 // a different member name).
2966 regNumberSmall _gtRegNum;
2969 GenTreeRegVar(var_types type, unsigned lclNum, regNumber regNum) : GenTreeLclVarCommon(GT_REG_VAR, type, lclNum)
2974 // The register number is stored in a small format (8 bits), but the getters return and the setters take
2975 // a full-size (unsigned) format, to localize the casts here.
2977 __declspec(property(get = GetRegNum, put = SetRegNum)) regNumber gtRegNum;
2979 regNumber GetRegNum() const
2981 return (regNumber)_gtRegNum;
2984 void SetRegNum(regNumber reg)
2986 _gtRegNum = (regNumberSmall)reg;
2987 assert(_gtRegNum == reg);
2990 #if DEBUGGABLE_GENTREE
2991 GenTreeRegVar() : GenTreeLclVarCommon()
2997 /* gtCast -- conversion to a different type (GT_CAST) */
2999 struct GenTreeCast : public GenTreeOp
3005 var_types gtCastType;
3007 GenTreeCast(var_types type, GenTree* op, bool fromUnsigned, var_types castType DEBUGARG(bool largeNode = false))
3008 : GenTreeOp(GT_CAST, type, op, nullptr DEBUGARG(largeNode)), gtCastType(castType)
3010 gtFlags |= fromUnsigned ? GTF_UNSIGNED : 0;
3012 #if DEBUGGABLE_GENTREE
3013 GenTreeCast() : GenTreeOp()
3019 // GT_BOX nodes are place markers for boxed values. The "real" tree
3020 // for most purposes is in gtBoxOp.
3021 struct GenTreeBox : public GenTreeUnOp
3023 // An expanded helper call to implement the "box" if we don't get
3024 // rid of it any other way. Must be in same position as op1.
3030 // This is the statement that contains the assignment tree when the node is an inlined GT_BOX on a value
3032 GenTree* gtAsgStmtWhenInlinedBoxValue;
3033 // And this is the statement that copies from the value being boxed to the box payload
3034 GenTree* gtCopyStmtWhenInlinedBoxValue;
3036 GenTreeBox(var_types type,
3038 GenTree* asgStmtWhenInlinedBoxValue,
3039 GenTree* copyStmtWhenInlinedBoxValue)
3040 : GenTreeUnOp(GT_BOX, type, boxOp)
3041 , gtAsgStmtWhenInlinedBoxValue(asgStmtWhenInlinedBoxValue)
3042 , gtCopyStmtWhenInlinedBoxValue(copyStmtWhenInlinedBoxValue)
3045 #if DEBUGGABLE_GENTREE
3046 GenTreeBox() : GenTreeUnOp()
3052 /* gtField -- data member ref (GT_FIELD) */
3054 struct GenTreeField : public GenTree
3057 CORINFO_FIELD_HANDLE gtFldHnd;
3059 bool gtFldMayOverlap;
3060 #ifdef FEATURE_READYTORUN_COMPILER
3061 CORINFO_CONST_LOOKUP gtFieldLookup;
3064 GenTreeField(var_types type) : GenTree(GT_FIELD, type)
3066 gtFldMayOverlap = false;
3068 #if DEBUGGABLE_GENTREE
3069 GenTreeField() : GenTree()
3075 // Represents the Argument list of a call node, as a Lisp-style linked list.
3076 // (Originally I had hoped that this could have *only* the m_arg/m_rest fields, but it turns out
3077 // that enough of the GenTree mechanism is used that it makes sense just to make it a subtype. But
3078 // note that in many ways, this is *not* a "real" node of the tree, but rather a mechanism for
3079 // giving call nodes a flexible number of children. GenTreeArgListNodes never evaluate to registers,
3082 // Note that while this extends GenTreeOp, it is *not* an EXOP. We don't add any new fields, and one
3083 // is free to allocate a GenTreeOp of type GT_LIST. If you use this type, you get the convenient Current/Rest
3084 // method names for the arguments.
3085 struct GenTreeArgList : public GenTreeOp
3091 GenTreeArgList*& Rest()
3093 assert(gtOp2 == nullptr || gtOp2->OperIsAnyList());
3094 return *reinterpret_cast<GenTreeArgList**>(>Op2);
3097 #if DEBUGGABLE_GENTREE
3098 GenTreeArgList() : GenTreeOp()
3103 GenTreeArgList(GenTree* arg) : GenTreeArgList(arg, nullptr)
3107 GenTreeArgList(GenTree* arg, GenTreeArgList* rest) : GenTreeArgList(GT_LIST, arg, rest)
3111 GenTreeArgList(genTreeOps oper, GenTree* arg, GenTreeArgList* rest) : GenTreeOp(oper, TYP_VOID, arg, rest)
3113 assert(OperIsAnyList(oper));
3114 assert((arg != nullptr) && arg->IsValidCallArgument());
3115 gtFlags |= arg->gtFlags & GTF_ALL_EFFECT;
3116 if (rest != nullptr)
3118 gtFlags |= rest->gtFlags & GTF_ALL_EFFECT;
3123 // Represents a list of fields constituting a struct, when it is passed as an argument.
3124 // The first field of the struct is marked with the GTF_FIELD_LIST_HEAD flag, and
3125 // in LIR form it is the only member of the list that is threaded into the execution
3127 // It differs from the GenTreeArgList in a couple of ways:
3128 // - The entire list represents a single argument.
3129 // - It contains additional fields to provide the offset and type of the field.
3131 struct GenTreeFieldList : public GenTreeArgList
3133 unsigned gtFieldOffset;
3134 var_types gtFieldType;
3136 bool IsFieldListHead() const
3138 return (gtFlags & GTF_FIELD_LIST_HEAD) != 0;
3141 #if DEBUGGABLE_GENTREE
3142 GenTreeFieldList() : GenTreeArgList()
3147 GenTreeFieldList*& Rest()
3149 assert(gtOp2 == nullptr || gtOp2->OperGet() == GT_FIELD_LIST);
3150 return *reinterpret_cast<GenTreeFieldList**>(>Op2);
3153 GenTreeFieldList(GenTree* arg, unsigned fieldOffset, var_types fieldType, GenTreeFieldList* prevList)
3154 : GenTreeArgList(GT_FIELD_LIST, arg, nullptr)
3156 // While GT_FIELD_LIST can be in a GT_LIST, GT_FIELD_LISTs cannot be nested or have GT_LISTs.
3157 assert(!arg->OperIsAnyList());
3158 gtFieldOffset = fieldOffset;
3159 gtFieldType = fieldType;
3161 if (prevList == nullptr)
3163 gtFlags |= GTF_FIELD_LIST_HEAD;
3164 #ifndef LEGACY_BACKEND
3165 // A GT_FIELD_LIST head is always contained. Other nodes return false from IsValue()
3166 // and should not be marked as contained.
3172 prevList->gtOp2 = this;
3177 // There was quite a bit of confusion in the code base about which of gtOp1 and gtOp2 was the
3178 // 'then' and 'else' clause of a colon node. Adding these accessors, while not enforcing anything,
3179 // at least *allows* the programmer to be obviously correct.
3180 // However, these conventions seem backward.
3181 // TODO-Cleanup: If we could get these accessors used everywhere, then we could switch them.
3182 struct GenTreeColon : public GenTreeOp
3184 GenTree*& ThenNode()
3188 GenTree*& ElseNode()
3193 #if DEBUGGABLE_GENTREE
3194 GenTreeColon() : GenTreeOp()
3199 GenTreeColon(var_types typ, GenTree* thenNode, GenTree* elseNode) : GenTreeOp(GT_COLON, typ, elseNode, thenNode)
3204 // gtCall -- method call (GT_CALL)
3205 enum class InlineObservation;
3207 // Return type descriptor of a GT_CALL node.
3208 // x64 Unix, Arm64, Arm32 and x86 allow a value to be returned in multiple
3209 // registers. For such calls this struct provides the following info
3210 // on their return type
3211 // - type of value returned in each return register
3212 // - ABI return register numbers in which the value is returned
3213 // - count of return registers in which the value is returned
3215 // TODO-ARM: Update this to meet the needs of Arm64 and Arm32
3217 // TODO-AllArch: Right now it is used for describing multi-reg returned types.
3218 // Eventually we would want to use it for describing even single-reg
3219 // returned types (e.g. structs returned in single register x64/arm).
3220 // This would allow us not to lie or normalize single struct return
3221 // values in importer/morph.
3222 struct ReturnTypeDesc
3225 var_types m_regType[MAX_RET_REG_COUNT];
3237 // Initialize the Return Type Descriptor for a method that returns a struct type
3238 void InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd);
3240 // Initialize the Return Type Descriptor for a method that returns a TYP_LONG
3241 // Only needed for X86
3242 void InitializeLongReturnType(Compiler* comp);
3244 // Reset type descriptor to defaults
3247 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
3249 m_regType[i] = TYP_UNKNOWN;
3257 // NOTE: we only use this function when writing out IR dumps. These dumps may take place before the ReturnTypeDesc
3258 // has been initialized.
3259 unsigned TryGetReturnRegCount() const
3261 return m_inited ? GetReturnRegCount() : 0;
3265 //--------------------------------------------------------------------------------------------
3266 // GetReturnRegCount: Get the count of return registers in which the return value is returned.
3272 // Count of return registers.
3273 // Returns 0 if the return type is not returned in registers.
3274 unsigned GetReturnRegCount() const
3279 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
3281 if (m_regType[i] == TYP_UNKNOWN)
3290 // Any remaining elements in m_regTypes[] should also be TYP_UNKNOWN
3291 for (unsigned i = regCount + 1; i < MAX_RET_REG_COUNT; ++i)
3293 assert(m_regType[i] == TYP_UNKNOWN);
3300 //-----------------------------------------------------------------------
3301 // IsMultiRegRetType: check whether the type is returned in multiple
3302 // return registers.
3308 // Returns true if the type is returned in multiple return registers.
3310 // Note that we only have to examine the first two values to determine this
3312 bool IsMultiRegRetType() const
3314 if (MAX_RET_REG_COUNT < 2)
3320 return ((m_regType[0] != TYP_UNKNOWN) && (m_regType[1] != TYP_UNKNOWN));
3324 //--------------------------------------------------------------------------
3325 // GetReturnRegType: Get var_type of the return register specified by index.
3328 // index - Index of the return register.
3329 // First return register will have an index 0 and so on.
3332 // var_type of the return register specified by its index.
3333 // asserts if the index does not have a valid register return type.
3335 var_types GetReturnRegType(unsigned index)
3337 var_types result = m_regType[index];
3338 assert(result != TYP_UNKNOWN);
3343 // Get ith ABI return register
3344 regNumber GetABIReturnReg(unsigned idx);
3346 // Get reg mask of ABI return registers
3347 regMaskTP GetABIReturnRegs();
3352 struct GenTreeCall final : public GenTree
3354 GenTree* gtCallObjp; // The instance argument ('this' pointer)
3355 GenTreeArgList* gtCallArgs; // The list of arguments in original evaluation order
3356 GenTreeArgList* gtCallLateArgs; // On x86: The register arguments in an optimal order
3357 // On ARM/x64: - also includes any outgoing arg space arguments
3358 // - that were evaluated into a temp LclVar
3359 fgArgInfo* fgArgInfo;
3361 #if !FEATURE_FIXED_OUT_ARGS
3362 int regArgListCount;
3366 // TODO-Throughput: Revisit this (this used to be only defined if
3367 // FEATURE_FIXED_OUT_ARGS was enabled, so this makes GenTreeCall 4 bytes bigger on x86).
3368 CORINFO_SIG_INFO* callSig; // Used by tail calls and to register callsites with the EE
3370 #ifdef LEGACY_BACKEND
3371 regMaskTP gtCallRegUsedMask; // mask of registers used to pass parameters
3372 #endif // LEGACY_BACKEND
3374 #if FEATURE_MULTIREG_RET
3376 // State required to support multi-reg returning call nodes.
3377 // For now it is enabled only for x64 unix.
3379 // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns.
3380 ReturnTypeDesc gtReturnTypeDesc;
3382 // gtRegNum would always be the first return reg.
3383 // The following array holds the other reg numbers of multi-reg return.
3384 regNumberSmall gtOtherRegs[MAX_RET_REG_COUNT - 1];
3386 // GTF_SPILL or GTF_SPILLED flag on a multi-reg call node indicates that one or
3387 // more of its result regs are in that state. The spill flag of each of the
3388 // return register is stored here. We only need 2 bits per returned register,
3389 // so this is treated as a 2-bit array. No architecture needs more than 8 bits.
3391 static const unsigned PACKED_GTF_SPILL = 1;
3392 static const unsigned PACKED_GTF_SPILLED = 2;
3393 unsigned char gtSpillFlags;
3395 #endif // FEATURE_MULTIREG_RET
3397 //-----------------------------------------------------------------------
3398 // GetReturnTypeDesc: get the type descriptor of return value of the call
3404 // Type descriptor of the value returned by call
3407 // Right now implemented only for x64 unix and yet to be
3408 // implemented for other multi-reg target arch (Arm64/Arm32/x86).
3410 // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns.
3411 ReturnTypeDesc* GetReturnTypeDesc()
3413 #if FEATURE_MULTIREG_RET
3414 return >ReturnTypeDesc;
3420 //---------------------------------------------------------------------------
3421 // GetRegNumByIdx: get ith return register allocated to this call node.
3424 // idx - index of the return register
3427 // Return regNumber of ith return register of call node.
3428 // Returns REG_NA if there is no valid return register for the given index.
3430 regNumber GetRegNumByIdx(unsigned idx) const
3432 assert(idx < MAX_RET_REG_COUNT);
3439 #if FEATURE_MULTIREG_RET
3440 return (regNumber)gtOtherRegs[idx - 1];
3446 //----------------------------------------------------------------------
3447 // SetRegNumByIdx: set ith return register of this call node
3451 // idx - index of the return register
3456 void SetRegNumByIdx(regNumber reg, unsigned idx)
3458 assert(idx < MAX_RET_REG_COUNT);
3464 #if FEATURE_MULTIREG_RET
3467 gtOtherRegs[idx - 1] = (regNumberSmall)reg;
3468 assert(gtOtherRegs[idx - 1] == reg);
3475 //----------------------------------------------------------------------------
3476 // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated
3484 void ClearOtherRegs()
3486 #if FEATURE_MULTIREG_RET
3487 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
3489 gtOtherRegs[i] = REG_NA;
3494 //----------------------------------------------------------------------------
3495 // CopyOtherRegs: copy multi-reg state from the given call node to this node
3498 // fromCall - GenTreeCall node from which to copy multi-reg state
3503 void CopyOtherRegs(GenTreeCall* fromCall)
3505 #if FEATURE_MULTIREG_RET
3506 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
3508 this->gtOtherRegs[i] = fromCall->gtOtherRegs[i];
3513 // Get reg mask of all the valid registers of gtOtherRegs array
3514 regMaskTP GetOtherRegMask() const;
3516 //----------------------------------------------------------------------
3517 // GetRegSpillFlagByIdx: get spill flag associated with the return register
3518 // specified by its index.
3521 // idx - Position or index of the return register
3524 // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
3526 unsigned GetRegSpillFlagByIdx(unsigned idx) const
3528 static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE);
3529 assert(idx < MAX_RET_REG_COUNT);
3531 #if FEATURE_MULTIREG_RET
3532 unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
3533 unsigned spillFlags = 0;
3534 if (bits & PACKED_GTF_SPILL)
3536 spillFlags |= GTF_SPILL;
3538 if (bits & PACKED_GTF_SPILLED)
3540 spillFlags |= GTF_SPILLED;
3544 assert(!"unreached");
3549 //----------------------------------------------------------------------
3550 // SetRegSpillFlagByIdx: set spill flags for the return register
3551 // specified by its index.
3554 // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed.
3555 // idx - Position or index of the return register
3560 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
3562 static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE);
3563 assert(idx < MAX_RET_REG_COUNT);
3565 #if FEATURE_MULTIREG_RET
3567 if (flags & GTF_SPILL)
3569 bits |= PACKED_GTF_SPILL;
3571 if (flags & GTF_SPILLED)
3573 bits |= PACKED_GTF_SPILLED;
3576 const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED;
3578 // Clear anything that was already there by masking out the bits before 'or'ing in what we want there.
3579 gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2)));
3585 //-------------------------------------------------------------------
3586 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
3593 void ClearOtherRegFlags()
3595 #if FEATURE_MULTIREG_RET
3600 //-------------------------------------------------------------------------
3601 // CopyOtherRegFlags: copy GTF_* flags associated with gtOtherRegs from
3602 // the given call node.
3605 // fromCall - GenTreeCall node from which to copy
3610 void CopyOtherRegFlags(GenTreeCall* fromCall)
3612 #if FEATURE_MULTIREG_RET
3613 this->gtSpillFlags = fromCall->gtSpillFlags;
3619 #define GTF_CALL_M_EXPLICIT_TAILCALL 0x00000001 // GT_CALL -- the call is "tail" prefixed and
3620 // importer has performed tail call checks
3621 #define GTF_CALL_M_TAILCALL 0x00000002 // GT_CALL -- the call is a tailcall
3622 #define GTF_CALL_M_VARARGS 0x00000004 // GT_CALL -- the call uses varargs ABI
3623 #define GTF_CALL_M_RETBUFFARG 0x00000008 // GT_CALL -- first parameter is the return buffer argument
3624 #define GTF_CALL_M_DELEGATE_INV 0x00000010 // GT_CALL -- call to Delegate.Invoke
3625 #define GTF_CALL_M_NOGCCHECK 0x00000020 // GT_CALL -- not a call for computing full interruptability
3626 #define GTF_CALL_M_SPECIAL_INTRINSIC 0x00000040 // GT_CALL -- function that could be optimized as an intrinsic
3627 // in special cases. Used to optimize fast way out in morphing
3628 #define GTF_CALL_M_UNMGD_THISCALL 0x00000080 // GT_CALL -- "this" pointer (first argument)
3629 // should be enregistered (only for GTF_CALL_UNMANAGED)
3630 #define GTF_CALL_M_VIRTSTUB_REL_INDIRECT 0x00000080 // the virtstub is indirected through
3631 // a relative address (only for GTF_CALL_VIRT_STUB)
3632 #define GTF_CALL_M_NONVIRT_SAME_THIS 0x00000080 // GT_CALL -- callee "this" pointer is
3633 // equal to caller this pointer (only for GTF_CALL_NONVIRT)
3634 #define GTF_CALL_M_FRAME_VAR_DEATH 0x00000100 // GT_CALL -- the compLvFrameListRoot variable dies here (last use)
3636 #ifndef LEGACY_BACKEND
3637 #define GTF_CALL_M_TAILCALL_VIA_HELPER 0x00000200 // GT_CALL -- call is a tail call dispatched via tail call JIT helper.
3640 #if FEATURE_TAILCALL_OPT
3641 #define GTF_CALL_M_IMPLICIT_TAILCALL 0x00000400 // GT_CALL -- call is an opportunistic
3642 // tail call and importer has performed tail call checks
3643 #define GTF_CALL_M_TAILCALL_TO_LOOP 0x00000800 // GT_CALL -- call is a fast recursive tail call
3644 // that can be converted into a loop
3647 #define GTF_CALL_M_PINVOKE 0x00001000 // GT_CALL -- call is a pinvoke. This mirrors VM flag CORINFO_FLG_PINVOKE.
3648 // A call marked as Pinvoke is not necessarily a GT_CALL_UNMANAGED. For e.g.
3649 // an IL Stub dynamically generated for a PInvoke declaration is flagged as
3650 // a Pinvoke but not as an unmanaged call. See impCheckForPInvokeCall() to
3651 // know when these flags are set.
3653 #define GTF_CALL_M_R2R_REL_INDIRECT 0x00002000 // GT_CALL -- ready to run call is indirected through a relative address
3654 #define GTF_CALL_M_DOES_NOT_RETURN 0x00004000 // GT_CALL -- call does not return
3655 #define GTF_CALL_M_SECURE_DELEGATE_INV 0x00008000 // GT_CALL -- call is in secure delegate
3656 #define GTF_CALL_M_FAT_POINTER_CHECK 0x00010000 // GT_CALL -- CoreRT managed calli needs transformation, that checks
3657 // special bit in calli address. If it is set, then it is necessary
3658 // to restore real function address and load hidden argument
3659 // as the first argument for calli. It is CoreRT replacement for instantiating
3660 // stubs, because executable code cannot be generated at runtime.
3661 #define GTF_CALL_M_HELPER_SPECIAL_DCE 0x00020000 // GT_CALL -- this helper call can be removed if it is part of a comma and
3662 // the comma result is unused.
3666 bool IsUnmanaged() const
3668 return (gtFlags & GTF_CALL_UNMANAGED) != 0;
3670 bool NeedsNullCheck() const
3672 return (gtFlags & GTF_CALL_NULLCHECK) != 0;
3674 bool CallerPop() const
3676 return (gtFlags & GTF_CALL_POP_ARGS) != 0;
3678 bool IsVirtual() const
3680 return (gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT;
3682 bool IsVirtualStub() const
3684 return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB;
3686 bool IsVirtualVtable() const
3688 return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_VTABLE;
3690 bool IsInlineCandidate() const
3692 return (gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0;
3695 #ifndef LEGACY_BACKEND
3696 bool HasNonStandardAddedArgs(Compiler* compiler) const;
3697 int GetNonStandardAddedArgCount(Compiler* compiler) const;
3698 #endif // !LEGACY_BACKEND
3700 // Returns true if this call uses a retBuf argument and its calling convention
3701 bool HasRetBufArg() const
3703 return (gtCallMoreFlags & GTF_CALL_M_RETBUFFARG) != 0;
3706 //-------------------------------------------------------------------------
3707 // TreatAsHasRetBufArg:
3710 // compiler, the compiler instance so that we can call eeGetHelperNum
3713 // Returns true if we treat the call as if it has a retBuf argument
3714 // This method may actually have a retBuf argument
3715 // or it could be a JIT helper that we are still transforming during
3716 // the importer phase.
3719 // On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
3720 // will make HasRetBufArg() return true, but will also force the
3721 // use of register x8 to pass the RetBuf argument.
3723 bool TreatAsHasRetBufArg(Compiler* compiler) const;
3725 //-----------------------------------------------------------------------------------------
3726 // HasMultiRegRetVal: whether the call node returns its value in multiple return registers.
3732 // True if the call is returning a multi-reg return value. False otherwise.
3735 // This is implemented only for x64 Unix and yet to be implemented for
3736 // other multi-reg return target arch (arm64/arm32/x86).
3738 bool HasMultiRegRetVal() const
3740 #if defined(_TARGET_X86_) && !defined(LEGACY_BACKEND)
3741 // LEGACY_BACKEND does not use multi reg returns for calls with long return types
3742 return varTypeIsLong(gtType);
3743 #elif FEATURE_MULTIREG_RET && (defined(_TARGET_ARM_) && !defined(LEGACY_BACKEND))
3744 // LEGACY_BACKEND does not use multi reg returns for calls with long return types
3745 return varTypeIsLong(gtType) || (varTypeIsStruct(gtType) && !HasRetBufArg());
3746 #elif FEATURE_MULTIREG_RET
3747 return varTypeIsStruct(gtType) && !HasRetBufArg();
3753 // Returns true if VM has flagged this method as CORINFO_FLG_PINVOKE.
3754 bool IsPInvoke() const
3756 return (gtCallMoreFlags & GTF_CALL_M_PINVOKE) != 0;
3759 // Note that the distinction of whether tail prefixed or an implicit tail call
3760 // is maintained on a call node till fgMorphCall() after which it will be
3761 // either a tail call (i.e. IsTailCall() is true) or a non-tail call.
3762 bool IsTailPrefixedCall() const
3764 return (gtCallMoreFlags & GTF_CALL_M_EXPLICIT_TAILCALL) != 0;
3767 // This method returning "true" implies that tail call flowgraph morhphing has
3768 // performed final checks and committed to making a tail call.
3769 bool IsTailCall() const
3771 return (gtCallMoreFlags & GTF_CALL_M_TAILCALL) != 0;
3774 // This method returning "true" implies that importer has performed tail call checks
3775 // and providing a hint that this can be converted to a tail call.
3776 bool CanTailCall() const
3778 return IsTailPrefixedCall() || IsImplicitTailCall();
3781 #ifndef LEGACY_BACKEND
3782 bool IsTailCallViaHelper() const
3784 return IsTailCall() && (gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER);
3786 #else // LEGACY_BACKEND
3787 bool IsTailCallViaHelper() const
3791 #endif // LEGACY_BACKEND
3793 #if FEATURE_FASTTAILCALL
3794 bool IsFastTailCall() const
3796 return IsTailCall() && !(gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER);
3798 #else // !FEATURE_FASTTAILCALL
3799 bool IsFastTailCall() const
3803 #endif // !FEATURE_FASTTAILCALL
3805 #if FEATURE_TAILCALL_OPT
3806 // Returns true if this is marked for opportunistic tail calling.
3807 // That is, can be tail called though not explicitly prefixed with "tail" prefix.
3808 bool IsImplicitTailCall() const
3810 return (gtCallMoreFlags & GTF_CALL_M_IMPLICIT_TAILCALL) != 0;
3812 bool IsTailCallConvertibleToLoop() const
3814 return (gtCallMoreFlags & GTF_CALL_M_TAILCALL_TO_LOOP) != 0;
3816 #else // !FEATURE_TAILCALL_OPT
3817 bool IsImplicitTailCall() const
3821 bool IsTailCallConvertibleToLoop() const
3825 #endif // !FEATURE_TAILCALL_OPT
3827 bool IsSameThis() const
3829 return (gtCallMoreFlags & GTF_CALL_M_NONVIRT_SAME_THIS) != 0;
3831 bool IsDelegateInvoke() const
3833 return (gtCallMoreFlags & GTF_CALL_M_DELEGATE_INV) != 0;
3835 bool IsVirtualStubRelativeIndir() const
3837 return (gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT) != 0;
3840 #ifdef FEATURE_READYTORUN_COMPILER
3841 bool IsR2RRelativeIndir() const
3843 return (gtCallMoreFlags & GTF_CALL_M_R2R_REL_INDIRECT) != 0;
3845 void setEntryPoint(CORINFO_CONST_LOOKUP entryPoint)
3847 gtEntryPoint = entryPoint;
3848 if (gtEntryPoint.accessType == IAT_PVALUE)
3850 gtCallMoreFlags |= GTF_CALL_M_R2R_REL_INDIRECT;
3853 #endif // FEATURE_READYTORUN_COMPILER
3855 bool IsVarargs() const
3857 return (gtCallMoreFlags & GTF_CALL_M_VARARGS) != 0;
3860 bool IsNoReturn() const
3862 return (gtCallMoreFlags & GTF_CALL_M_DOES_NOT_RETURN) != 0;
3865 bool IsFatPointerCandidate() const
3867 return (gtCallMoreFlags & GTF_CALL_M_FAT_POINTER_CHECK) != 0;
3870 bool IsPure(Compiler* compiler) const;
3872 bool HasSideEffects(Compiler* compiler, bool ignoreExceptions = false, bool ignoreCctors = false) const;
3874 void ClearFatPointerCandidate()
3876 gtCallMoreFlags &= ~GTF_CALL_M_FAT_POINTER_CHECK;
3879 void SetFatPointerCandidate()
3881 gtCallMoreFlags |= GTF_CALL_M_FAT_POINTER_CHECK;
3884 unsigned gtCallMoreFlags; // in addition to gtFlags
3886 unsigned char gtCallType : 3; // value from the gtCallTypes enumeration
3887 unsigned char gtReturnType : 5; // exact return type
3889 CORINFO_CLASS_HANDLE gtRetClsHnd; // The return type handle of the call if it is a struct; always available
3892 // only used for CALLI unmanaged calls (CT_INDIRECT)
3893 GenTree* gtCallCookie;
3894 // gtInlineCandidateInfo is only used when inlining methods
3895 InlineCandidateInfo* gtInlineCandidateInfo;
3896 void* gtStubCallStubAddr; // GTF_CALL_VIRT_STUB - these are never inlined
3897 CORINFO_GENERIC_HANDLE compileTimeHelperArgumentHandle; // Used to track type handle argument of dynamic helpers
3898 void* gtDirectCallAddress; // Used to pass direct call address between lower and codegen
3901 // expression evaluated after args are placed which determines the control target
3902 GenTree* gtControlExpr;
3905 CORINFO_METHOD_HANDLE gtCallMethHnd; // CT_USER_FUNC
3906 GenTree* gtCallAddr; // CT_INDIRECT
3909 #ifdef FEATURE_READYTORUN_COMPILER
3910 // Call target lookup info for method call from a Ready To Run module
3911 CORINFO_CONST_LOOKUP gtEntryPoint;
3914 #if defined(DEBUG) || defined(INLINE_DATA)
3915 // For non-inline candidates, track the first observation
3916 // that blocks candidacy.
3917 InlineObservation gtInlineObservation;
3919 // IL offset of the call wrt its parent method.
3920 IL_OFFSET gtRawILOffset;
3921 #endif // defined(DEBUG) || defined(INLINE_DATA)
3923 bool IsHelperCall() const
3925 return gtCallType == CT_HELPER;
3928 bool IsHelperCall(CORINFO_METHOD_HANDLE callMethHnd) const
3930 return IsHelperCall() && (callMethHnd == gtCallMethHnd);
3933 bool IsHelperCall(Compiler* compiler, unsigned helper) const;
3935 void ReplaceCallOperand(GenTree** operandUseEdge, GenTree* replacement);
3937 bool AreArgsComplete() const;
3939 GenTreeCall(var_types type) : GenTree(GT_CALL, type)
3941 fgArgInfo = nullptr;
3943 #if DEBUGGABLE_GENTREE
3944 GenTreeCall() : GenTree()
3950 struct GenTreeCmpXchg : public GenTree
3952 GenTree* gtOpLocation;
3954 GenTree* gtOpComparand;
3956 GenTreeCmpXchg(var_types type, GenTree* loc, GenTree* val, GenTree* comparand)
3957 : GenTree(GT_CMPXCHG, type), gtOpLocation(loc), gtOpValue(val), gtOpComparand(comparand)
3959 // There's no reason to do a compare-exchange on a local location, so we'll assume that all of these
3960 // have global effects.
3961 gtFlags |= (GTF_GLOB_REF | GTF_ASG);
3963 #if DEBUGGABLE_GENTREE
3964 GenTreeCmpXchg() : GenTree()
3970 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
3971 struct GenTreeMultiRegOp : public GenTreeOp
3973 regNumber gtOtherReg;
3975 // GTF_SPILL or GTF_SPILLED flag on a multi-reg node indicates that one or
3976 // more of its result regs are in that state. The spill flag of each of the
3977 // return register is stored here. We only need 2 bits per returned register,
3978 // so this is treated as a 2-bit array. No architecture needs more than 8 bits.
3980 static const unsigned PACKED_GTF_SPILL = 1;
3981 static const unsigned PACKED_GTF_SPILLED = 2;
3982 unsigned char gtSpillFlags;
3984 GenTreeMultiRegOp(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
3985 : GenTreeOp(oper, type, op1, op2), gtOtherReg(REG_NA)
3987 ClearOtherRegFlags();
3990 unsigned GetRegCount() const
3992 if (gtRegNum == REG_NA || gtRegNum == REG_STK)
3996 return (gtOtherReg == REG_NA || gtOtherReg == REG_STK) ? 1 : 2;
3999 //---------------------------------------------------------------------------
4000 // GetRegNumByIdx: get ith register allocated to this struct argument.
4003 // idx - index of the register
4006 // Return regNumber of ith register of this register argument
4008 regNumber GetRegNumByIdx(unsigned idx) const
4020 //----------------------------------------------------------------------
4021 // GetRegSpillFlagByIdx: get spill flag associated with the register
4022 // specified by its index.
4025 // idx - Position or index of the register
4028 // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
4030 unsigned GetRegSpillFlagByIdx(unsigned idx) const
4032 assert(idx < MAX_REG_ARG);
4034 unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
4035 unsigned spillFlags = 0;
4036 if (bits & PACKED_GTF_SPILL)
4038 spillFlags |= GTF_SPILL;
4040 if (bits & PACKED_GTF_SPILLED)
4042 spillFlags |= GTF_SPILLED;
4048 //----------------------------------------------------------------------
4049 // SetRegSpillFlagByIdx: set spill flags for the register
4050 // specified by its index.
4053 // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed.
4054 // idx - Position or index of the register
4059 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
4061 assert(idx < MAX_REG_ARG);
4064 if (flags & GTF_SPILL)
4066 bits |= PACKED_GTF_SPILL;
4068 if (flags & GTF_SPILLED)
4070 bits |= PACKED_GTF_SPILLED;
4073 const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED;
4075 // Clear anything that was already there by masking out the bits before 'or'ing in what we want there.
4076 gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2)));
4079 //--------------------------------------------------------------------------
4080 // GetRegType: Get var_type of the register specified by index.
4083 // index - Index of the register.
4084 // First register will have an index 0 and so on.
4087 // var_type of the register specified by its index.
4089 var_types GetRegType(unsigned index)
4092 // The type of register is usually the same as GenTree type
4093 // since most of time GenTreeMultiRegOp uses only a single reg (when gtOtherReg is REG_NA).
4094 // The special case is when we have TYP_LONG here, which was `TYP_DOUBLE` originally
4095 // (copied to int regs for argument push on armel). Then we need to separate them into int for each index.
4096 var_types result = TypeGet();
4097 if (result == TYP_LONG)
4099 assert(gtOtherReg != REG_NA);
4105 //-------------------------------------------------------------------
4106 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
4114 void ClearOtherRegFlags()
4119 #if DEBUGGABLE_GENTREE
4120 GenTreeMultiRegOp() : GenTreeOp()
4127 struct GenTreeFptrVal : public GenTree
4129 CORINFO_METHOD_HANDLE gtFptrMethod;
4131 #ifdef FEATURE_READYTORUN_COMPILER
4132 CORINFO_CONST_LOOKUP gtEntryPoint;
4135 GenTreeFptrVal(var_types type, CORINFO_METHOD_HANDLE meth) : GenTree(GT_FTN_ADDR, type), gtFptrMethod(meth)
4138 #if DEBUGGABLE_GENTREE
4139 GenTreeFptrVal() : GenTree()
4146 struct GenTreeQmark : public GenTreeOp
4148 #ifdef LEGACY_BACKEND
4149 // Livesets on entry to then and else subtrees
4150 VARSET_TP gtThenLiveSet;
4151 VARSET_TP gtElseLiveSet;
4154 // The "Compiler*" argument is not a DEBUGARG here because we use it to keep track of the set of
4155 // (possible) QMark nodes.
4156 GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, class Compiler* comp);
4158 #if DEBUGGABLE_GENTREE
4159 GenTreeQmark() : GenTreeOp(GT_QMARK, TYP_INT, nullptr, nullptr)
4165 /* gtIntrinsic -- intrinsic (possibly-binary op [NULL op2 is allowed] with an additional field) */
4167 struct GenTreeIntrinsic : public GenTreeOp
4169 CorInfoIntrinsics gtIntrinsicId;
4170 CORINFO_METHOD_HANDLE gtMethodHandle; // Method handle of the method which is treated as an intrinsic.
4172 #ifdef FEATURE_READYTORUN_COMPILER
4173 // Call target lookup info for method call from a Ready To Run module
4174 CORINFO_CONST_LOOKUP gtEntryPoint;
4177 GenTreeIntrinsic(var_types type, GenTree* op1, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle)
4178 : GenTreeOp(GT_INTRINSIC, type, op1, nullptr), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle)
4183 var_types type, GenTree* op1, GenTree* op2, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle)
4184 : GenTreeOp(GT_INTRINSIC, type, op1, op2), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle)
4188 #if DEBUGGABLE_GENTREE
4189 GenTreeIntrinsic() : GenTreeOp()
4195 struct GenTreeJitIntrinsic : public GenTreeOp
4197 var_types gtSIMDBaseType; // SIMD vector base type
4198 unsigned gtSIMDSize; // SIMD vector size in bytes, use 0 for scalar intrinsics
4200 GenTreeJitIntrinsic(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2, var_types baseType, unsigned size)
4201 : GenTreeOp(oper, type, op1, op2), gtSIMDBaseType(baseType), gtSIMDSize(size)
4207 return gtSIMDSize != 0;
4210 #if DEBUGGABLE_GENTREE
4211 GenTreeJitIntrinsic() : GenTreeOp()
4219 /* gtSIMD -- SIMD intrinsic (possibly-binary op [NULL op2 is allowed] with additional fields) */
4220 struct GenTreeSIMD : public GenTreeJitIntrinsic
4222 SIMDIntrinsicID gtSIMDIntrinsicID; // operation Id
4224 GenTreeSIMD(var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
4225 : GenTreeJitIntrinsic(GT_SIMD, type, op1, nullptr, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID)
4230 var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
4231 : GenTreeJitIntrinsic(GT_SIMD, type, op1, op2, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID)
4235 #if DEBUGGABLE_GENTREE
4236 GenTreeSIMD() : GenTreeJitIntrinsic()
4241 #endif // FEATURE_SIMD
4243 #ifdef FEATURE_HW_INTRINSICS
4244 struct GenTreeHWIntrinsic : public GenTreeJitIntrinsic
4246 NamedIntrinsic gtHWIntrinsicId;
4248 GenTreeHWIntrinsic(var_types type, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size)
4249 : GenTreeJitIntrinsic(GT_HWIntrinsic, type, nullptr, nullptr, baseType, size), gtHWIntrinsicId(hwIntrinsicID)
4253 GenTreeHWIntrinsic(var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size)
4254 : GenTreeJitIntrinsic(GT_HWIntrinsic, type, op1, nullptr, baseType, size), gtHWIntrinsicId(hwIntrinsicID)
4259 var_types type, GenTree* op1, GenTree* op2, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size)
4260 : GenTreeJitIntrinsic(GT_HWIntrinsic, type, op1, op2, baseType, size), gtHWIntrinsicId(hwIntrinsicID)
4264 // Note that HW Instrinsic instructions are a sub class of GenTreeOp which only supports two operands
4265 // However there are HW Instrinsic instructions that have 3 or even 4 operands and this is
4266 // supported using a single op1 and using an ArgList for it: gtNewArgList(op1, op2, op3)
4268 bool OperIsMemoryLoad(); // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics,
4270 bool OperIsMemoryStore(); // Returns true for the HW Instrinsic instructions that have MemoryStore semantics,
4272 bool OperIsMemoryLoadOrStore(); // Returns true for the HW Instrinsic instructions that have MemoryLoad or
4273 // MemoryStore semantics, false otherwise
4275 #if DEBUGGABLE_GENTREE
4276 GenTreeHWIntrinsic() : GenTreeJitIntrinsic()
4282 inline bool GenTree::OperIsSimdHWIntrinsic() const
4284 if (gtOper == GT_HWIntrinsic)
4286 return this->AsHWIntrinsic()->isSIMD();
4290 #endif // FEATURE_HW_INTRINSICS
4292 /* gtIndex -- array access */
4294 struct GenTreeIndex : public GenTreeOp
4305 unsigned gtIndElemSize; // size of elements in the array
4306 CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type.
4308 GenTreeIndex(var_types type, GenTree* arr, GenTree* ind, unsigned indElemSize)
4309 : GenTreeOp(GT_INDEX, type, arr, ind)
4310 , gtIndElemSize(indElemSize)
4311 , gtStructElemClass(nullptr) // We always initialize this after construction.
4314 if (JitConfig.JitSkipArrayBoundCheck() == 1)
4316 // Skip bounds check
4322 gtFlags |= GTF_INX_RNGCHK;
4325 if (type == TYP_REF)
4327 gtFlags |= GTF_INX_REFARR_LAYOUT;
4330 gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;
4332 #if DEBUGGABLE_GENTREE
4333 GenTreeIndex() : GenTreeOp()
4339 // gtIndexAddr: given an array object and an index, checks that the index is within the bounds of the array if
4340 // necessary and produces the address of the value at that index of the array.
4341 struct GenTreeIndexAddr : public GenTreeOp
4352 CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type.
4354 GenTree* gtIndRngFailBB; // Label to jump to for array-index-out-of-range
4355 unsigned gtStkDepth; // Stack depth at which the jump occurs (required for fgSetRngChkTarget)
4357 var_types gtElemType; // The element type of the array.
4358 unsigned gtElemSize; // size of elements in the array
4359 unsigned gtLenOffset; // The offset from the array's base address to its length.
4360 unsigned gtElemOffset; // The offset from the array's base address to its first element.
4362 GenTreeIndexAddr(GenTree* arr,
4365 CORINFO_CLASS_HANDLE structElemClass,
4368 unsigned elemOffset)
4369 : GenTreeOp(GT_INDEX_ADDR, TYP_BYREF, arr, ind)
4370 , gtStructElemClass(structElemClass)
4371 , gtIndRngFailBB(nullptr)
4373 , gtElemType(elemType)
4374 , gtElemSize(elemSize)
4375 , gtLenOffset(lenOffset)
4376 , gtElemOffset(elemOffset)
4379 if (JitConfig.JitSkipArrayBoundCheck() == 1)
4381 // Skip bounds check
4387 gtFlags |= GTF_INX_RNGCHK;
4390 // REVERSE_OPS is set because we must evaluate the index before the array address.
4391 gtFlags |= GTF_EXCEPT | GTF_GLOB_REF | GTF_REVERSE_OPS;
4394 #if DEBUGGABLE_GENTREE
4395 GenTreeIndexAddr() : GenTreeOp()
4401 /* gtArrLen -- array length (GT_ARR_LENGTH)
4402 GT_ARR_LENGTH is used for "arr.length" */
4404 struct GenTreeArrLen : public GenTreeUnOp
4409 } // the array address node
4411 int gtArrLenOffset; // constant to add to "gtArrRef" to get the address of the array length.
4414 inline int ArrLenOffset()
4416 return gtArrLenOffset;
4419 GenTreeArrLen(var_types type, GenTree* arrRef, int lenOffset)
4420 : GenTreeUnOp(GT_ARR_LENGTH, type, arrRef), gtArrLenOffset(lenOffset)
4424 #if DEBUGGABLE_GENTREE
4425 GenTreeArrLen() : GenTreeUnOp()
4432 // - a comparison value (generally an array length),
4433 // - an index value, and
4434 // - the label to jump to if the index is out of range.
4435 // - the "kind" of the throw block to branch to on failure
4436 // It generates no result.
4438 struct GenTreeBoundsChk : public GenTree
4440 GenTree* gtIndex; // The index expression.
4441 GenTree* gtArrLen; // An expression for the length of the array being indexed.
4443 GenTree* gtIndRngFailBB; // Label to jump to for array-index-out-of-range
4444 SpecialCodeKind gtThrowKind; // Kind of throw block to branch to on failure
4446 /* Only out-of-ranges at same stack depth can jump to the same label (finding return address is easier)
4447 For delayed calling of fgSetRngChkTarget() so that the
4448 optimizer has a chance of eliminating some of the rng checks */
4449 unsigned gtStkDepth;
4451 GenTreeBoundsChk(genTreeOps oper, var_types type, GenTree* index, GenTree* arrLen, SpecialCodeKind kind)
4452 : GenTree(oper, type)
4455 , gtIndRngFailBB(nullptr)
4459 // Effects flags propagate upwards.
4460 gtFlags |= (arrLen->gtFlags & GTF_ALL_EFFECT);
4461 gtFlags |= GTF_EXCEPT;
4463 #if DEBUGGABLE_GENTREE
4464 GenTreeBoundsChk() : GenTree()
4469 // If the gtArrLen is really an array length, returns array reference, else "NULL".
4472 if (gtArrLen->OperGet() == GT_ARR_LENGTH)
4474 return gtArrLen->gtArrLen.ArrRef();
4483 // gtArrElem -- general array element (GT_ARR_ELEM), for non "SZ_ARRAYS"
4484 // -- multidimensional arrays, or 1-d arrays with non-zero lower bounds.
4486 struct GenTreeArrElem : public GenTree
4490 #define GT_ARR_MAX_RANK 3
4491 GenTree* gtArrInds[GT_ARR_MAX_RANK]; // Indices
4492 unsigned char gtArrRank; // Rank of the array
4494 unsigned char gtArrElemSize; // !!! Caution, this is an "unsigned char", it is used only
4495 // on the optimization path of array intrisics.
4496 // It stores the size of array elements WHEN it can fit
4497 // into an "unsigned char".
4498 // This has caused VSW 571394.
4499 var_types gtArrElemType; // The array element type
4501 // Requires that "inds" is a pointer to an array of "rank" GenTreePtrs for the indices.
4503 var_types type, GenTree* arr, unsigned char rank, unsigned char elemSize, var_types elemType, GenTree** inds)
4504 : GenTree(GT_ARR_ELEM, type), gtArrObj(arr), gtArrRank(rank), gtArrElemSize(elemSize), gtArrElemType(elemType)
4506 for (unsigned char i = 0; i < rank; i++)
4508 gtArrInds[i] = inds[i];
4510 gtFlags |= GTF_EXCEPT;
4512 #if DEBUGGABLE_GENTREE
4513 GenTreeArrElem() : GenTree()
4519 //--------------------------------------------
4521 // GenTreeArrIndex (gtArrIndex): Expression to bounds-check the index for one dimension of a
4522 // multi-dimensional or non-zero-based array., and compute the effective index
4523 // (i.e. subtracting the lower bound).
4526 // This node is similar in some ways to GenTreeBoundsChk, which ONLY performs the check.
4527 // The reason that this node incorporates the check into the effective index computation is
4528 // to avoid duplicating the codegen, as the effective index is required to compute the
4530 // TODO-CQ: Enable optimization of the lower bound and length by replacing this:
4533 // +--* ArrIndex[i, ]
4534 // with something like:
4536 // /--* ArrLowerBound[i, ]
4538 // +--* ArrLen[i, ] (either generalize GT_ARR_LENGTH or add a new node)
4540 // +--* ArrIndex[i, ]
4541 // Which could, for example, be optimized to the following when known to be within bounds:
4542 // /--* TempForLowerBoundDim0
4546 struct GenTreeArrIndex : public GenTreeOp
4548 // The array object - may be any expression producing an Array reference, but is likely to be a lclVar.
4553 // The index expression - may be any integral expression.
4554 GenTree*& IndexExpr()
4558 unsigned char gtCurrDim; // The current dimension
4559 unsigned char gtArrRank; // Rank of the array
4560 var_types gtArrElemType; // The array element type
4562 GenTreeArrIndex(var_types type,
4565 unsigned char currDim,
4566 unsigned char arrRank,
4568 : GenTreeOp(GT_ARR_INDEX, type, arrObj, indexExpr)
4569 , gtCurrDim(currDim)
4570 , gtArrRank(arrRank)
4571 , gtArrElemType(elemType)
4573 gtFlags |= GTF_EXCEPT;
4575 #if DEBUGGABLE_GENTREE
4578 // Used only for GenTree::GetVtableForOper()
4579 GenTreeArrIndex() : GenTreeOp()
4585 //--------------------------------------------
4587 // GenTreeArrOffset (gtArrOffset): Expression to compute the accumulated offset for the address
4588 // of an element of a multi-dimensional or non-zero-based array.
4591 // The result of this expression is (gtOffset * dimSize) + gtIndex
4592 // where dimSize is the length/stride/size of the dimension, and is obtained from gtArrObj.
4593 // This node is generated in conjunction with the GenTreeArrIndex node, which computes the
4594 // effective index for a single dimension. The sub-trees can be separately optimized, e.g.
4595 // within a loop body where the expression for the 0th dimension may be invariant.
4597 // Here is an example of how the tree might look for a two-dimension array reference:
4601 // +--* ArrIndex[i, ]
4603 // /--| arrOffs[i, ]
4606 // +--* ArrIndex[*,j]
4608 // /--| arrOffs[*,j]
4609 // TODO-CQ: see comment on GenTreeArrIndex for how its representation may change. When that
4610 // is done, we will also want to replace the <arrObj> argument to arrOffs with the
4611 // ArrLen as for GenTreeArrIndex.
4613 struct GenTreeArrOffs : public GenTree
4615 GenTree* gtOffset; // The accumulated offset for lower dimensions - must be TYP_I_IMPL, and
4616 // will either be a CSE temp, the constant 0, or another GenTreeArrOffs node.
4617 GenTree* gtIndex; // The effective index for the current dimension - must be non-negative
4618 // and can be any expression (though it is likely to be either a GenTreeArrIndex,
4619 // node, a lclVar, or a constant).
4620 GenTree* gtArrObj; // The array object - may be any expression producing an Array reference,
4621 // but is likely to be a lclVar.
4622 unsigned char gtCurrDim; // The current dimension
4623 unsigned char gtArrRank; // Rank of the array
4624 var_types gtArrElemType; // The array element type
4626 GenTreeArrOffs(var_types type,
4630 unsigned char currDim,
4633 : GenTree(GT_ARR_OFFSET, type)
4637 , gtCurrDim(currDim)
4639 , gtArrElemType(elemType)
4641 assert(index->gtFlags & GTF_EXCEPT);
4642 gtFlags |= GTF_EXCEPT;
4644 #if DEBUGGABLE_GENTREE
4645 GenTreeArrOffs() : GenTree()
4651 /* gtAddrMode -- Target-specific canonicalized addressing expression (GT_LEA) */
4653 struct GenTreeAddrMode : public GenTreeOp
4655 // Address is Base + Index*Scale + Offset.
4656 // These are the legal patterns:
4658 // Base // Base != nullptr && Index == nullptr && Scale == 0 && Offset == 0
4659 // Base + Index*Scale // Base != nullptr && Index != nullptr && Scale != 0 && Offset == 0
4660 // Base + Offset // Base != nullptr && Index == nullptr && Scale == 0 && Offset != 0
4661 // Base + Index*Scale + Offset // Base != nullptr && Index != nullptr && Scale != 0 && Offset != 0
4662 // Index*Scale // Base == nullptr && Index != nullptr && Scale > 1 && Offset == 0
4663 // Index*Scale + Offset // Base == nullptr && Index != nullptr && Scale > 1 && Offset != 0
4664 // Offset // Base == nullptr && Index == nullptr && Scale == 0 && Offset != 0
4667 // 1. Base + Index is legal with Scale==1
4668 // 2. If Index is null, Scale should be zero (or unintialized / unused)
4669 // 3. If Scale==1, then we should have "Base" instead of "Index*Scale", and "Base + Offset" instead of
4670 // "Index*Scale + Offset".
4672 // First operand is base address/pointer
4673 bool HasBase() const
4675 return gtOp1 != nullptr;
4682 // Second operand is scaled index value
4683 bool HasIndex() const
4685 return gtOp2 != nullptr;
4694 return static_cast<int>(gtOffset);
4697 unsigned gtScale; // The scale factor
4699 #ifndef LEGACY_BACKEND
4702 // TODO-Cleanup: gtOffset should be changed to 'int' to match the getter function and avoid accidental
4703 // zero extension to 64 bit. However, this is used by legacy code and initialized, via the offset
4704 // parameter of the constructor, by Lowering::TryCreateAddrMode & CodeGenInterface::genCreateAddrMode.
4705 // The later computes the offset as 'ssize_t' but returns it as 'unsigned'. We should change
4706 // genCreateAddrMode to return 'int' or 'ssize_t' and then update this as well.
4707 unsigned gtOffset; // The offset to add
4710 GenTreeAddrMode(var_types type, GenTree* base, GenTree* index, unsigned scale, unsigned offset)
4711 : GenTreeOp(GT_LEA, type, base, index)
4713 assert(base != nullptr || index != nullptr);
4717 #if DEBUGGABLE_GENTREE
4720 // Used only for GenTree::GetVtableForOper()
4721 GenTreeAddrMode() : GenTreeOp()
4727 // Indir is just an op, no additional data, but some additional abstractions
4728 struct GenTreeIndir : public GenTreeOp
4730 // The address for the indirection.
4731 // Since GenTreeDynBlk derives from this, but is an "EXOP" (i.e. it has extra fields),
4732 // we can't access Op1 and Op2 in the normal manner if we may have a DynBlk.
4738 // these methods provide an interface to the indirection node which
4746 GenTreeIndir(genTreeOps oper, var_types type, GenTree* addr, GenTree* data) : GenTreeOp(oper, type, addr, data)
4750 #if DEBUGGABLE_GENTREE
4753 // Used only for GenTree::GetVtableForOper()
4754 GenTreeIndir() : GenTreeOp()
4760 // gtBlk -- 'block' (GT_BLK, GT_STORE_BLK).
4762 // This is the base type for all of the nodes that represent block or struct
4764 // Since it can be a store, it includes gtBlkOpKind to specify the type of
4765 // code generation that will be used for the block operation.
4767 struct GenTreeBlk : public GenTreeIndir
4770 // The data to be stored (null for GT_BLK)
4775 void SetData(GenTree* dataNode)
4780 // The size of the buffer to be copied.
4781 unsigned Size() const
4788 // Return true iff the object being copied contains one or more GC pointers.
4791 // True if this BlkOpNode is a volatile memory operation.
4792 bool IsVolatile() const
4794 return (gtFlags & GTF_BLK_VOLATILE) != 0;
4797 // True if this BlkOpNode is an unaligned memory operation.
4798 bool IsUnaligned() const
4800 return (gtFlags & GTF_BLK_UNALIGNED) != 0;
4803 // Instruction selection: during codegen time, what code sequence we will be using
4804 // to encode this operation.
4813 bool gtBlkOpGcUnsafe;
4815 GenTreeBlk(genTreeOps oper, var_types type, GenTree* addr, unsigned size)
4816 : GenTreeIndir(oper, type, addr, nullptr)
4818 , gtBlkOpKind(BlkOpKindInvalid)
4819 , gtBlkOpGcUnsafe(false)
4821 assert(OperIsBlk(oper));
4822 gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT);
4825 GenTreeBlk(genTreeOps oper, var_types type, GenTree* addr, GenTree* data, unsigned size)
4826 : GenTreeIndir(oper, type, addr, data), gtBlkSize(size), gtBlkOpKind(BlkOpKindInvalid), gtBlkOpGcUnsafe(false)
4828 assert(OperIsBlk(oper));
4829 gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT);
4830 gtFlags |= (data->gtFlags & GTF_ALL_EFFECT);
4833 #if DEBUGGABLE_GENTREE
4836 GenTreeBlk() : GenTreeIndir()
4839 #endif // DEBUGGABLE_GENTREE
4842 // gtObj -- 'object' (GT_OBJ).
4844 // This node is used for block values that may have GC pointers.
4846 struct GenTreeObj : public GenTreeBlk
4848 CORINFO_CLASS_HANDLE gtClass; // the class of the object
4850 // If non-null, this array represents the gc-layout of the class.
4851 // This may be simply copied when cloning this node, because it is not changed once computed.
4854 // If non-zero, this is the number of slots in the class layout that
4855 // contain gc-pointers.
4856 __declspec(property(get = GetGcPtrCount)) unsigned gtGcPtrCount;
4857 unsigned GetGcPtrCount() const
4859 assert(_gtGcPtrCount != UINT32_MAX);
4860 return _gtGcPtrCount;
4862 unsigned _gtGcPtrCount;
4864 // If non-zero, the number of pointer-sized slots that constitutes the class token.
4867 bool IsGCInfoInitialized()
4869 return (_gtGcPtrCount != UINT32_MAX);
4872 void SetGCInfo(BYTE* gcPtrs, unsigned gcPtrCount, unsigned slots)
4875 _gtGcPtrCount = gcPtrCount;
4877 if (gtGcPtrCount != 0)
4879 // We assume that we cannot have a struct with GC pointers that is not a multiple
4880 // of the register size.
4881 // The EE currently does not allow this, but it could change.
4882 // Let's assert it just to be safe.
4883 noway_assert(roundUp(gtBlkSize, REGSIZE_BYTES) == gtBlkSize);
4887 genTreeOps newOper = GT_BLK;
4888 if (gtOper == GT_STORE_OBJ)
4890 newOper = GT_STORE_BLK;
4894 assert(gtOper == GT_OBJ);
4900 void CopyGCInfo(GenTreeObj* srcObj)
4902 if (srcObj->IsGCInfoInitialized())
4904 gtGcPtrs = srcObj->gtGcPtrs;
4905 _gtGcPtrCount = srcObj->gtGcPtrCount;
4906 gtSlots = srcObj->gtSlots;
4910 GenTreeObj(var_types type, GenTree* addr, CORINFO_CLASS_HANDLE cls, unsigned size)
4911 : GenTreeBlk(GT_OBJ, type, addr, size), gtClass(cls)
4913 // By default, an OBJ is assumed to be a global reference.
4914 gtFlags |= GTF_GLOB_REF;
4915 noway_assert(cls != NO_CLASS_HANDLE);
4916 _gtGcPtrCount = UINT32_MAX;
4919 GenTreeObj(var_types type, GenTree* addr, GenTree* data, CORINFO_CLASS_HANDLE cls, unsigned size)
4920 : GenTreeBlk(GT_STORE_OBJ, type, addr, data, size), gtClass(cls)
4922 // By default, an OBJ is assumed to be a global reference.
4923 gtFlags |= GTF_GLOB_REF;
4924 noway_assert(cls != NO_CLASS_HANDLE);
4925 _gtGcPtrCount = UINT32_MAX;
4928 #if DEBUGGABLE_GENTREE
4929 GenTreeObj() : GenTreeBlk()
4935 // gtDynBlk -- 'dynamic block' (GT_DYN_BLK).
4937 // This node is used for block values that have a dynamic size.
4938 // Note that such a value can never have GC pointers.
4940 struct GenTreeDynBlk : public GenTreeBlk
4943 GenTree* gtDynamicSize;
4944 bool gtEvalSizeFirst;
4946 GenTreeDynBlk(GenTree* addr, GenTree* dynamicSize)
4947 : GenTreeBlk(GT_DYN_BLK, TYP_STRUCT, addr, 0), gtDynamicSize(dynamicSize), gtEvalSizeFirst(false)
4949 // Conservatively the 'addr' could be null or point into the global heap.
4950 gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;
4951 gtFlags |= (dynamicSize->gtFlags & GTF_ALL_EFFECT);
4954 #if DEBUGGABLE_GENTREE
4957 GenTreeDynBlk() : GenTreeBlk()
4960 #endif // DEBUGGABLE_GENTREE
4963 // Read-modify-write status of a RMW memory op rooted at a storeInd
4966 STOREIND_RMW_STATUS_UNKNOWN, // RMW status of storeInd unknown
4967 // Default status unless modified by IsRMWMemOpRootedAtStoreInd()
4969 // One of these denote storeind is a RMW memory operation.
4970 STOREIND_RMW_DST_IS_OP1, // StoreInd is known to be a RMW memory op and dst candidate is op1
4971 STOREIND_RMW_DST_IS_OP2, // StoreInd is known to be a RMW memory op and dst candidate is op2
4973 // One of these denote the reason for storeind is marked as non-RMW operation
4974 STOREIND_RMW_UNSUPPORTED_ADDR, // Addr mode is not yet supported for RMW memory
4975 STOREIND_RMW_UNSUPPORTED_OPER, // Operation is not supported for RMW memory
4976 STOREIND_RMW_UNSUPPORTED_TYPE, // Type is not supported for RMW memory
4977 STOREIND_RMW_INDIR_UNEQUAL // Indir to read value is not equivalent to indir that writes the value
4980 // StoreInd is just a BinOp, with additional RMW status
4981 struct GenTreeStoreInd : public GenTreeIndir
4983 #if !CPU_LOAD_STORE_ARCH
4984 // The below flag is set and used during lowering
4985 RMWStatus gtRMWStatus;
4987 bool IsRMWStatusUnknown()
4989 return gtRMWStatus == STOREIND_RMW_STATUS_UNKNOWN;
4991 bool IsNonRMWMemoryOp()
4993 return gtRMWStatus == STOREIND_RMW_UNSUPPORTED_ADDR || gtRMWStatus == STOREIND_RMW_UNSUPPORTED_OPER ||
4994 gtRMWStatus == STOREIND_RMW_UNSUPPORTED_TYPE || gtRMWStatus == STOREIND_RMW_INDIR_UNEQUAL;
4996 bool IsRMWMemoryOp()
4998 return gtRMWStatus == STOREIND_RMW_DST_IS_OP1 || gtRMWStatus == STOREIND_RMW_DST_IS_OP2;
5002 return gtRMWStatus == STOREIND_RMW_DST_IS_OP1;
5006 return gtRMWStatus == STOREIND_RMW_DST_IS_OP2;
5008 #endif //! CPU_LOAD_STORE_ARCH
5010 RMWStatus GetRMWStatus()
5012 #if !CPU_LOAD_STORE_ARCH
5015 return STOREIND_RMW_STATUS_UNKNOWN;
5019 void SetRMWStatusDefault()
5021 #if !CPU_LOAD_STORE_ARCH
5022 gtRMWStatus = STOREIND_RMW_STATUS_UNKNOWN;
5026 void SetRMWStatus(RMWStatus status)
5028 #if !CPU_LOAD_STORE_ARCH
5029 gtRMWStatus = status;
5038 GenTreeStoreInd(var_types type, GenTree* destPtr, GenTree* data) : GenTreeIndir(GT_STOREIND, type, destPtr, data)
5040 SetRMWStatusDefault();
5043 #if DEBUGGABLE_GENTREE
5046 // Used only for GenTree::GetVtableForOper()
5047 GenTreeStoreInd() : GenTreeIndir()
5049 SetRMWStatusDefault();
5054 /* gtRetExp -- Place holder for the return expression from an inline candidate (GT_RET_EXPR) */
5056 struct GenTreeRetExpr : public GenTree
5058 GenTree* gtInlineCandidate;
5060 CORINFO_CLASS_HANDLE gtRetClsHnd;
5062 GenTreeRetExpr(var_types type) : GenTree(GT_RET_EXPR, type)
5065 #if DEBUGGABLE_GENTREE
5066 GenTreeRetExpr() : GenTree()
5072 /* gtStmt -- 'statement expr' (GT_STMT) */
5074 class InlineContext;
5076 struct GenTreeStmt : public GenTree
5078 GenTree* gtStmtExpr; // root of the expression tree
5079 GenTree* gtStmtList; // first node (for forward walks)
5080 InlineContext* gtInlineContext; // The inline context for this statement.
5081 IL_OFFSETX gtStmtILoffsx; // instr offset (if available)
5084 IL_OFFSET gtStmtLastILoffs; // instr offset at end of stmt
5087 __declspec(property(get = getNextStmt)) GenTreeStmt* gtNextStmt;
5089 __declspec(property(get = getPrevStmt)) GenTreeStmt* gtPrevStmt;
5091 GenTreeStmt* getNextStmt()
5093 if (gtNext == nullptr)
5099 return gtNext->AsStmt();
5103 GenTreeStmt* getPrevStmt()
5105 if (gtPrev == nullptr)
5111 return gtPrev->AsStmt();
5115 GenTreeStmt(GenTree* expr, IL_OFFSETX offset)
5116 : GenTree(GT_STMT, TYP_VOID)
5118 , gtStmtList(nullptr)
5119 , gtInlineContext(nullptr)
5120 , gtStmtILoffsx(offset)
5122 , gtStmtLastILoffs(BAD_IL_OFFSET)
5125 // Statements can't have statements as part of their expression tree.
5126 assert(expr->gtOper != GT_STMT);
5128 // Set the statement to have the same costs as the top node of the tree.
5129 // This is used long before costs have been assigned, so we need to copy
5134 #if DEBUGGABLE_GENTREE
5135 GenTreeStmt() : GenTree(GT_STMT, TYP_VOID)
5141 /* NOTE: Any tree nodes that are larger than 8 bytes (two ints or
5142 pointers) must be flagged as 'large' in GenTree::InitNodeSize().
5145 /* gtClsVar -- 'static data member' (GT_CLS_VAR) */
5147 struct GenTreeClsVar : public GenTree
5149 CORINFO_FIELD_HANDLE gtClsVarHnd;
5150 FieldSeqNode* gtFieldSeq;
5152 GenTreeClsVar(var_types type, CORINFO_FIELD_HANDLE clsVarHnd, FieldSeqNode* fldSeq)
5153 : GenTree(GT_CLS_VAR, type), gtClsVarHnd(clsVarHnd), gtFieldSeq(fldSeq)
5155 gtFlags |= GTF_GLOB_REF;
5157 #if DEBUGGABLE_GENTREE
5158 GenTreeClsVar() : GenTree()
5164 /* gtArgPlace -- 'register argument placeholder' (GT_ARGPLACE) */
5166 struct GenTreeArgPlace : public GenTree
5168 CORINFO_CLASS_HANDLE gtArgPlaceClsHnd; // Needed when we have a TYP_STRUCT argument
5170 GenTreeArgPlace(var_types type, CORINFO_CLASS_HANDLE clsHnd) : GenTree(GT_ARGPLACE, type), gtArgPlaceClsHnd(clsHnd)
5173 #if DEBUGGABLE_GENTREE
5174 GenTreeArgPlace() : GenTree()
5180 /* gtLabel -- code label target (GT_LABEL) */
5182 struct GenTreeLabel : public GenTree
5184 BasicBlock* gtLabBB;
5186 GenTreeLabel(BasicBlock* bb) : GenTree(GT_LABEL, TYP_VOID), gtLabBB(bb)
5189 #if DEBUGGABLE_GENTREE
5190 GenTreeLabel() : GenTree()
5196 /* gtPhiArg -- phi node rhs argument, var = phi(phiarg, phiarg, phiarg...); GT_PHI_ARG */
5197 struct GenTreePhiArg : public GenTreeLclVarCommon
5199 BasicBlock* gtPredBB;
5201 GenTreePhiArg(var_types type, unsigned lclNum, unsigned snum, BasicBlock* block)
5202 : GenTreeLclVarCommon(GT_PHI_ARG, type, lclNum), gtPredBB(block)
5207 #if DEBUGGABLE_GENTREE
5208 GenTreePhiArg() : GenTreeLclVarCommon()
5214 /* gtPutArgStk -- Argument passed on stack (GT_PUTARG_STK) */
5216 struct GenTreePutArgStk : public GenTreeUnOp
5218 unsigned gtSlotNum; // Slot number of the argument to be passed on stack
5219 #if defined(UNIX_X86_ABI)
5220 unsigned gtPadAlign; // Number of padding slots for stack alignment
5223 // Don't let clang-format mess with the GenTreePutArgStk constructor.
5226 GenTreePutArgStk(genTreeOps oper,
5230 PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
5231 bool putInIncomingArgArea = false,
5232 GenTreeCall* callNode = nullptr)
5233 : GenTreeUnOp(oper, type, op1 DEBUGARG(/*largeNode*/ false))
5234 , gtSlotNum(slotNum)
5235 #if defined(UNIX_X86_ABI)
5238 #if FEATURE_FASTTAILCALL
5239 , gtPutInIncomingArgArea(putInIncomingArgArea)
5240 #endif // FEATURE_FASTTAILCALL
5241 #ifdef FEATURE_PUT_STRUCT_ARG_STK
5242 , gtPutArgStkKind(Kind::Invalid)
5243 , gtNumSlots(numSlots)
5244 , gtNumberReferenceSlots(0)
5246 #endif // FEATURE_PUT_STRUCT_ARG_STK
5247 #if defined(DEBUG) || defined(UNIX_X86_ABI)
5255 #if FEATURE_FASTTAILCALL
5257 bool gtPutInIncomingArgArea; // Whether this arg needs to be placed in incoming arg area.
5258 // By default this is false and will be placed in out-going arg area.
5259 // Fast tail calls set this to true.
5260 // In future if we need to add more such bool fields consider bit fields.
5262 bool putInIncomingArgArea() const
5264 return gtPutInIncomingArgArea;
5267 #else // !FEATURE_FASTTAILCALL
5269 bool putInIncomingArgArea() const
5274 #endif // !FEATURE_FASTTAILCALL
5276 unsigned getArgOffset()
5278 return gtSlotNum * TARGET_POINTER_SIZE;
5281 #if defined(UNIX_X86_ABI)
5282 unsigned getArgPadding()
5287 void setArgPadding(unsigned padAlign)
5289 gtPadAlign = padAlign;
5293 #ifdef FEATURE_PUT_STRUCT_ARG_STK
5295 unsigned getArgSize()
5297 return gtNumSlots * TARGET_POINTER_SIZE;
5300 // Return true if this is a PutArgStk of a SIMD12 struct.
5301 // This is needed because such values are re-typed to SIMD16, and the type of PutArgStk is VOID.
5304 return (varTypeIsSIMD(gtOp1) && (gtNumSlots == 3));
5307 //------------------------------------------------------------------------
5308 // setGcPointers: Sets the number of references and the layout of the struct object returned by the VM.
5311 // numPointers - Number of pointer references.
5312 // pointers - layout of the struct (with pointers marked.)
5318 // This data is used in the codegen for GT_PUTARG_STK to decide how to copy the struct to the stack by value.
5319 // If no pointer references are used, block copying instructions are used.
5320 // Otherwise the pointer reference slots are copied atomically in a way that gcinfo is emitted.
5321 // Any non pointer references between the pointer reference slots are copied in block fashion.
5323 void setGcPointers(unsigned numPointers, BYTE* pointers)
5325 gtNumberReferenceSlots = numPointers;
5326 gtGcPtrs = pointers;
5329 // Instruction selection: during codegen time, what code sequence we will be using
5330 // to encode this operation.
5331 // TODO-Throughput: The following information should be obtained from the child
5334 enum class Kind : __int8{
5335 Invalid, RepInstr, Unroll, Push, PushAllSlots,
5338 Kind gtPutArgStkKind;
5341 return (gtPutArgStkKind == Kind::Push) || (gtPutArgStkKind == Kind::PushAllSlots);
5344 unsigned gtNumSlots; // Number of slots for the argument to be passed on stack
5345 unsigned gtNumberReferenceSlots; // Number of reference slots.
5346 BYTE* gtGcPtrs; // gcPointers
5348 #elif !defined(LEGACY_BACKEND)
5349 unsigned getArgSize();
5350 #endif // !LEGACY_BACKEND
5352 #if defined(DEBUG) || defined(UNIX_X86_ABI)
5353 GenTreeCall* gtCall; // the call node to which this argument belongs
5356 #if DEBUGGABLE_GENTREE
5357 GenTreePutArgStk() : GenTreeUnOp()
5363 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
5364 // Represent the struct argument: split value in register(s) and stack
5365 struct GenTreePutArgSplit : public GenTreePutArgStk
5369 GenTreePutArgSplit(GenTree* op1,
5370 unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
5372 bool putIncomingArgArea = false,
5373 GenTreeCall* callNode = nullptr)
5374 : GenTreePutArgStk(GT_PUTARG_SPLIT,
5377 slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(numSlots),
5380 , gtNumRegs(numRegs)
5383 ClearOtherRegFlags();
5386 // Type required to support multi-reg struct arg.
5387 var_types m_regType[MAX_REG_ARG];
5389 // First reg of struct is always given by gtRegNum.
5390 // gtOtherRegs holds the other reg numbers of struct.
5391 regNumberSmall gtOtherRegs[MAX_REG_ARG - 1];
5393 // GTF_SPILL or GTF_SPILLED flag on a multi-reg struct node indicates that one or
5394 // more of its result regs are in that state. The spill flag of each of the
5395 // return register is stored here. We only need 2 bits per register,
5396 // so this is treated as a 2-bit array.
5397 static const unsigned PACKED_GTF_SPILL = 1;
5398 static const unsigned PACKED_GTF_SPILLED = 2;
5399 unsigned char gtSpillFlags;
5401 //---------------------------------------------------------------------------
5402 // GetRegNumByIdx: get ith register allocated to this struct argument.
5405 // idx - index of the struct
5408 // Return regNumber of ith register of this struct argument
5410 regNumber GetRegNumByIdx(unsigned idx) const
5412 assert(idx < MAX_REG_ARG);
5419 return (regNumber)gtOtherRegs[idx - 1];
5422 //----------------------------------------------------------------------
5423 // SetRegNumByIdx: set ith register of this struct argument
5427 // idx - index of the struct
5432 void SetRegNumByIdx(regNumber reg, unsigned idx)
5434 assert(idx < MAX_REG_ARG);
5441 gtOtherRegs[idx - 1] = (regNumberSmall)reg;
5442 assert(gtOtherRegs[idx - 1] == reg);
5446 //----------------------------------------------------------------------------
5447 // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated
5455 void ClearOtherRegs()
5457 for (unsigned i = 0; i < MAX_REG_ARG - 1; ++i)
5459 gtOtherRegs[i] = REG_NA;
5463 //----------------------------------------------------------------------
5464 // GetRegSpillFlagByIdx: get spill flag associated with the register
5465 // specified by its index.
5468 // idx - Position or index of the register
5471 // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered.
5473 unsigned GetRegSpillFlagByIdx(unsigned idx) const
5475 assert(idx < MAX_REG_ARG);
5477 unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here.
5478 unsigned spillFlags = 0;
5479 if (bits & PACKED_GTF_SPILL)
5481 spillFlags |= GTF_SPILL;
5483 if (bits & PACKED_GTF_SPILLED)
5485 spillFlags |= GTF_SPILLED;
5491 //----------------------------------------------------------------------
5492 // SetRegSpillFlagByIdx: set spill flags for the register
5493 // specified by its index.
5496 // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed.
5497 // idx - Position or index of the register
5502 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
5504 assert(idx < MAX_REG_ARG);
5507 if (flags & GTF_SPILL)
5509 bits |= PACKED_GTF_SPILL;
5511 if (flags & GTF_SPILLED)
5513 bits |= PACKED_GTF_SPILLED;
5516 const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED;
5518 // Clear anything that was already there by masking out the bits before 'or'ing in what we want there.
5519 gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2)));
5522 //--------------------------------------------------------------------------
5523 // GetRegType: Get var_type of the register specified by index.
5526 // index - Index of the register.
5527 // First register will have an index 0 and so on.
5530 // var_type of the register specified by its index.
5532 var_types GetRegType(unsigned index)
5534 assert(index < gtNumRegs);
5535 var_types result = m_regType[index];
5539 //-------------------------------------------------------------------
5540 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
5548 void ClearOtherRegFlags()
5553 #ifdef FEATURE_PUT_STRUCT_ARG_STK
5554 unsigned getArgSize()
5556 return (gtNumSlots + gtNumRegs) * TARGET_POINTER_SIZE;
5558 #endif // FEATURE_PUT_STRUCT_ARG_STK
5560 #if DEBUGGABLE_GENTREE
5561 GenTreePutArgSplit() : GenTreePutArgStk()
5566 #endif // !LEGACY_BACKEND && _TARGET_ARM_
5568 // Represents GT_COPY or GT_RELOAD node
5569 struct GenTreeCopyOrReload : public GenTreeUnOp
5571 #if FEATURE_MULTIREG_RET
5572 // State required to support copy/reload of a multi-reg call node.
5573 // First register is is always given by gtRegNum.
5575 regNumberSmall gtOtherRegs[MAX_RET_REG_COUNT - 1];
5578 //----------------------------------------------------------
5579 // ClearOtherRegs: set gtOtherRegs to REG_NA.
5587 void ClearOtherRegs()
5589 #if FEATURE_MULTIREG_RET
5590 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
5592 gtOtherRegs[i] = REG_NA;
5597 //-----------------------------------------------------------
5598 // GetRegNumByIdx: Get regNumber of ith position.
5601 // idx - register position.
5604 // Returns regNumber assigned to ith position.
5606 regNumber GetRegNumByIdx(unsigned idx) const
5608 assert(idx < MAX_RET_REG_COUNT);
5615 #if FEATURE_MULTIREG_RET
5616 return (regNumber)gtOtherRegs[idx - 1];
5622 //-----------------------------------------------------------
5623 // SetRegNumByIdx: Set the regNumber for ith position.
5627 // idx - register position.
5632 void SetRegNumByIdx(regNumber reg, unsigned idx)
5634 assert(idx < MAX_RET_REG_COUNT);
5640 #if FEATURE_MULTIREG_RET
5643 gtOtherRegs[idx - 1] = (regNumberSmall)reg;
5644 assert(gtOtherRegs[idx - 1] == reg);
5654 //----------------------------------------------------------------------------
5655 // CopyOtherRegs: copy multi-reg state from the given copy/reload node to this
5659 // from - GenTree node from which to copy multi-reg state
5664 // TODO-ARM: Implement this routine for Arm64 and Arm32
5665 // TODO-X86: Implement this routine for x86
5666 void CopyOtherRegs(GenTreeCopyOrReload* from)
5668 assert(OperGet() == from->OperGet());
5670 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
5671 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
5673 gtOtherRegs[i] = from->gtOtherRegs[i];
5678 GenTreeCopyOrReload(genTreeOps oper, var_types type, GenTree* op1) : GenTreeUnOp(oper, type, op1)
5684 #if DEBUGGABLE_GENTREE
5685 GenTreeCopyOrReload() : GenTreeUnOp()
5691 // Represents GT_ALLOCOBJ node
5693 struct GenTreeAllocObj final : public GenTreeUnOp
5695 unsigned int gtNewHelper; // Value returned by ICorJitInfo::getNewHelper
5696 CORINFO_CLASS_HANDLE gtAllocObjClsHnd;
5698 GenTreeAllocObj(var_types type, unsigned int helper, CORINFO_CLASS_HANDLE clsHnd, GenTree* op)
5699 : GenTreeUnOp(GT_ALLOCOBJ, type, op DEBUGARG(/*largeNode*/ TRUE))
5700 , // This node in most cases will be changed to a call node
5702 , gtAllocObjClsHnd(clsHnd)
5705 #if DEBUGGABLE_GENTREE
5706 GenTreeAllocObj() : GenTreeUnOp()
5712 // Represents GT_RUNTIMELOOKUP node
5714 struct GenTreeRuntimeLookup final : public GenTreeUnOp
5716 CORINFO_GENERIC_HANDLE gtHnd;
5717 CorInfoGenericHandleType gtHndType;
5719 GenTreeRuntimeLookup(CORINFO_GENERIC_HANDLE hnd, CorInfoGenericHandleType hndTyp, GenTree* tree)
5720 : GenTreeUnOp(GT_RUNTIMELOOKUP, tree->gtType, tree DEBUGARG(/*largeNode*/ FALSE)), gtHnd(hnd), gtHndType(hndTyp)
5722 assert(hnd != nullptr);
5724 #if DEBUGGABLE_GENTREE
5725 GenTreeRuntimeLookup() : GenTreeUnOp()
5730 // Return reference to the actual tree that does the lookup
5736 bool IsClassHandle() const
5738 return gtHndType == CORINFO_HANDLETYPE_CLASS;
5740 bool IsMethodHandle() const
5742 return gtHndType == CORINFO_HANDLETYPE_METHOD;
5744 bool IsFieldHandle() const
5746 return gtHndType == CORINFO_HANDLETYPE_FIELD;
5749 // Note these operations describe the handle that is input to the
5750 // lookup, not the handle produced by the lookup.
5751 CORINFO_CLASS_HANDLE GetClassHandle() const
5753 assert(IsClassHandle());
5754 return (CORINFO_CLASS_HANDLE)gtHnd;
5756 CORINFO_METHOD_HANDLE GetMethodHandle() const
5758 assert(IsMethodHandle());
5759 return (CORINFO_METHOD_HANDLE)gtHnd;
5761 CORINFO_FIELD_HANDLE GetFieldHandle() const
5763 assert(IsMethodHandle());
5764 return (CORINFO_FIELD_HANDLE)gtHnd;
5768 // Represents a GT_JCC or GT_SETCC node.
5770 struct GenTreeCC final : public GenTree
5772 genTreeOps gtCondition; // any relop
5774 GenTreeCC(genTreeOps oper, genTreeOps condition, var_types type = TYP_VOID)
5775 : GenTree(oper, type DEBUGARG(/*largeNode*/ FALSE)), gtCondition(condition)
5777 assert(OperIs(GT_JCC, GT_SETCC));
5778 assert(OperIsCompare(condition));
5781 #if DEBUGGABLE_GENTREE
5782 GenTreeCC() : GenTree()
5785 #endif // DEBUGGABLE_GENTREE
5788 //------------------------------------------------------------------------
5789 // Deferred inline functions of GenTree -- these need the subtypes above to
5790 // be defined already.
5791 //------------------------------------------------------------------------
5793 inline bool GenTree::OperIsBlkOp()
5795 return (((gtOper == GT_ASG) && varTypeIsStruct(gtOp.gtOp1))
5796 #ifndef LEGACY_BACKEND
5797 || (OperIsBlk() && (AsBlk()->Data() != nullptr))
5802 inline bool GenTree::OperIsDynBlkOp()
5804 if (gtOper == GT_ASG)
5806 return gtGetOp1()->OperGet() == GT_DYN_BLK;
5808 #ifndef LEGACY_BACKEND
5809 else if (gtOper == GT_STORE_DYN_BLK)
5817 inline bool GenTree::OperIsInitBlkOp()
5823 #ifndef LEGACY_BACKEND
5825 if (gtOper == GT_ASG)
5831 src = AsBlk()->Data()->gtSkipReloadOrCopy();
5833 #else // LEGACY_BACKEND
5834 GenTree* src = gtGetOp2();
5835 #endif // LEGACY_BACKEND
5836 return src->OperIsInitVal() || src->OperIsConst();
5839 inline bool GenTree::OperIsCopyBlkOp()
5841 return OperIsBlkOp() && !OperIsInitBlkOp();
5844 //------------------------------------------------------------------------
5845 // IsFPZero: Checks whether this is a floating point constant with value 0.0
5848 // Returns true iff the tree is an GT_CNS_DBL, with value of 0.0.
5850 inline bool GenTree::IsFPZero()
5852 if ((gtOper == GT_CNS_DBL) && (gtDblCon.gtDconVal == 0.0))
5859 //------------------------------------------------------------------------
5860 // IsIntegralConst: Checks whether this is a constant node with the given value
5863 // constVal - the value of interest
5866 // Returns true iff the tree is an integral constant opcode, with
5870 // Like gtIconVal, the argument is of ssize_t, so cannot check for
5871 // long constants in a target-independent way.
5873 inline bool GenTree::IsIntegralConst(ssize_t constVal)
5876 if ((gtOper == GT_CNS_INT) && (gtIntConCommon.IconValue() == constVal))
5881 if ((gtOper == GT_CNS_LNG) && (gtIntConCommon.LngValue() == constVal))
5889 //-------------------------------------------------------------------
5890 // IsIntegralConstVector: returns true if this this is a SIMD vector
5891 // with all its elements equal to an integral constant.
5894 // constVal - const value of vector element
5897 // True if this represents an integral const SIMD vector.
5899 inline bool GenTree::IsIntegralConstVector(ssize_t constVal)
5902 // SIMDIntrinsicInit intrinsic with a const value as initializer
5903 // represents a const vector.
5904 if ((gtOper == GT_SIMD) && (gtSIMD.gtSIMDIntrinsicID == SIMDIntrinsicInit) && gtGetOp1()->IsIntegralConst(constVal))
5906 assert(varTypeIsIntegral(gtSIMD.gtSIMDBaseType));
5907 assert(gtGetOp2IfPresent() == nullptr);
5915 inline bool GenTree::IsBoxedValue()
5917 assert(gtOper != GT_BOX || gtBox.BoxOp() != nullptr);
5918 return (gtOper == GT_BOX) && (gtFlags & GTF_BOX_VALUE);
5921 inline bool GenTree::IsSIMDEqualityOrInequality() const
5924 if (gtOper == GT_SIMD)
5926 SIMDIntrinsicID id = AsSIMD()->gtSIMDIntrinsicID;
5927 return (id == SIMDIntrinsicOpEquality) || (id == SIMDIntrinsicOpInEquality);
5934 inline GenTree* GenTree::MoveNext()
5936 assert(OperIsAnyList());
5941 //------------------------------------------------------------------------
5942 // IsValidCallArgument: Given an GenTree node that represents an argument
5943 // enforce (or don't enforce) the following invariant.
5946 // instance method for a GenTree node
5949 // true: the GenTree node is accepted as a valid argument
5950 // false: the GenTree node is not accepted as a valid argumeny
5953 // For targets that don't support arguments as a list of fields, we do not support GT_FIELD_LIST.
5955 // Currently for AMD64 UNIX we allow a limited case where a GT_FIELD_LIST is
5956 // allowed but every element must be a GT_LCL_FLD.
5958 // For the future targets that allow for Multireg args (and this includes the current ARM64 target),
5959 // or that allow for passing promoted structs, we allow a GT_FIELD_LIST of arbitrary nodes.
5960 // These would typically start out as GT_LCL_VARs or GT_LCL_FLDS or GT_INDs,
5961 // but could be changed into constants or GT_COMMA trees by the later
5962 // optimization phases.
5964 inline bool GenTree::IsValidCallArgument()
5968 // GT_FIELD_LIST is the only list allowed.
5971 if (OperIsFieldList())
5973 #if defined(LEGACY_BACKEND) || (!FEATURE_MULTIREG_ARGS && !FEATURE_PUT_STRUCT_ARG_STK)
5974 // Not allowed to have a GT_FIELD_LIST for an argument
5975 // unless we have a RyuJIT backend and FEATURE_MULTIREG_ARGS or FEATURE_PUT_STRUCT_ARG_STK
5979 #else // we have RyuJIT backend and FEATURE_MULTIREG_ARGS or FEATURE_PUT_STRUCT_ARG_STK
5981 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
5982 // For UNIX ABI we currently only allow a GT_FIELD_LIST of GT_LCL_FLDs nodes
5983 GenTree* gtListPtr = this;
5984 while (gtListPtr != nullptr)
5986 // ToDo: fix UNIX_AMD64 so that we do not generate this kind of a List
5987 // Note the list as currently created is malformed, as the last entry is a nullptr
5988 if (gtListPtr->Current() == nullptr)
5993 // Only a list of GT_LCL_FLDs is allowed
5994 if (gtListPtr->Current()->OperGet() != GT_LCL_FLD)
5998 gtListPtr = gtListPtr->MoveNext();
6000 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
6002 // Note that for non-UNIX ABI the GT_FIELD_LIST may contain any node
6004 // We allow this GT_FIELD_LIST as an argument
6007 #endif // FEATURE_MULTIREG_ARGS
6009 // We don't have either kind of list, so it satisfies the invariant.
6014 inline GenTree* GenTree::Current()
6016 assert(OperIsAnyList());
6020 inline GenTree** GenTree::pCurrent()
6022 assert(OperIsAnyList());
6023 return &(gtOp.gtOp1);
6026 inline GenTree* GenTree::gtGetOp1() const
6028 return AsOp()->gtOp1;
6033 inline bool GenTree::RequiresNonNullOp2(genTreeOps oper)
6054 #ifdef LEGACY_BACKEND
6086 inline GenTree* GenTree::gtGetOp2() const
6088 assert(OperIsBinary());
6090 GenTree* op2 = AsOp()->gtOp2;
6092 // Only allow null op2 if the node type allows it, e.g. GT_LIST.
6093 assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper));
6098 inline GenTree* GenTree::gtGetOp2IfPresent() const
6100 /* gtOp.gtOp2 is only valid for GTK_BINOP nodes. */
6102 GenTree* op2 = OperIsBinary() ? AsOp()->gtOp2 : nullptr;
6104 // This documents the genTreeOps for which gtOp.gtOp2 cannot be nullptr.
6105 // This helps prefix in its analysis of code which calls gtGetOp2()
6107 assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper));
6112 inline GenTree* GenTree::gtEffectiveVal(bool commaOnly)
6114 GenTree* effectiveVal = this;
6117 if (effectiveVal->gtOper == GT_COMMA)
6119 effectiveVal = effectiveVal->gtOp.gtOp2;
6121 else if (!commaOnly && (effectiveVal->gtOper == GT_NOP) && (effectiveVal->gtOp.gtOp1 != nullptr))
6123 effectiveVal = effectiveVal->gtOp.gtOp1;
6127 return effectiveVal;
6132 //-------------------------------------------------------------------------
6133 // gtRetExprVal - walk back through GT_RET_EXPRs
6136 // tree representing return value from a successful inline,
6137 // or original call for failed or yet to be determined inline.
6140 // Multi-level inlines can form chains of GT_RET_EXPRs.
6141 // This method walks back to the root of the chain.
6143 inline GenTree* GenTree::gtRetExprVal()
6145 GenTree* retExprVal = this;
6148 if (retExprVal->gtOper == GT_RET_EXPR)
6150 retExprVal = retExprVal->gtRetExpr.gtInlineCandidate;
6159 inline GenTree* GenTree::gtSkipReloadOrCopy()
6161 // There can be only one reload or copy (we can't have a reload/copy of a reload/copy)
6162 if (gtOper == GT_RELOAD || gtOper == GT_COPY)
6164 assert(gtGetOp1()->OperGet() != GT_RELOAD && gtGetOp1()->OperGet() != GT_COPY);
6170 //-----------------------------------------------------------------------------------
6171 // IsMultiRegCall: whether a call node returning its value in more than one register
6177 // Returns true if this GenTree is a multi register returning call
6178 inline bool GenTree::IsMultiRegCall() const
6182 // We cannot use AsCall() as it is not declared const
6183 const GenTreeCall* call = reinterpret_cast<const GenTreeCall*>(this);
6184 return call->HasMultiRegRetVal();
6190 //-----------------------------------------------------------------------------------
6191 // IsMultiRegNode: whether a node returning its value in more than one register
6197 // Returns true if this GenTree is a multi-reg node.
6198 inline bool GenTree::IsMultiRegNode() const
6200 if (IsMultiRegCall())
6205 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
6206 if (OperIsMultiRegOp() || OperIsPutArgSplit() || (gtOper == GT_COPY))
6215 //-------------------------------------------------------------------------
6216 // IsCopyOrReload: whether this is a GT_COPY or GT_RELOAD node.
6222 // Returns true if this GenTree is a copy or reload node.
6223 inline bool GenTree::IsCopyOrReload() const
6225 return (gtOper == GT_COPY || gtOper == GT_RELOAD);
6228 //-----------------------------------------------------------------------------------
6229 // IsCopyOrReloadOfMultiRegCall: whether this is a GT_COPY or GT_RELOAD of a multi-reg
6236 // Returns true if this GenTree is a copy or reload of multi-reg call node.
6237 inline bool GenTree::IsCopyOrReloadOfMultiRegCall() const
6239 if (IsCopyOrReload())
6241 return gtGetOp1()->IsMultiRegCall();
6247 inline bool GenTree::IsCnsIntOrI() const
6249 return (gtOper == GT_CNS_INT);
6252 inline bool GenTree::IsIntegralConst() const
6254 #ifdef _TARGET_64BIT_
6255 return IsCnsIntOrI();
6256 #else // !_TARGET_64BIT_
6257 return ((gtOper == GT_CNS_INT) || (gtOper == GT_CNS_LNG));
6258 #endif // !_TARGET_64BIT_
6261 // Is this node an integer constant that fits in a 32-bit signed integer (INT32)
6262 inline bool GenTree::IsIntCnsFitsInI32()
6264 #ifdef _TARGET_64BIT_
6265 return IsCnsIntOrI() && AsIntCon()->FitsInI32();
6266 #else // !_TARGET_64BIT_
6267 return IsCnsIntOrI();
6268 #endif // !_TARGET_64BIT_
6271 inline bool GenTree::IsCnsFltOrDbl() const
6273 return OperGet() == GT_CNS_DBL;
6276 inline bool GenTree::IsCnsNonZeroFltOrDbl()
6278 if (OperGet() == GT_CNS_DBL)
6280 double constValue = gtDblCon.gtDconVal;
6281 return *(__int64*)&constValue != 0;
6287 inline bool GenTree::IsHelperCall()
6289 return OperGet() == GT_CALL && gtCall.gtCallType == CT_HELPER;
6292 inline var_types GenTree::CastFromType()
6294 return this->gtCast.CastOp()->TypeGet();
6296 inline var_types& GenTree::CastToType()
6298 return this->gtCast.gtCastType;
6301 //-----------------------------------------------------------------------------------
6302 // HasGCPtr: determine whether this block op involves GC pointers
6308 // Returns true iff the object being copied contains one or more GC pointers.
6311 // Of the block nodes, only GT_OBJ and ST_STORE_OBJ are allowed to have GC pointers.
6313 inline bool GenTreeBlk::HasGCPtr()
6315 if ((gtOper == GT_OBJ) || (gtOper == GT_STORE_OBJ))
6317 return (AsObj()->gtGcPtrCount != 0);
6322 inline bool GenTree::isUsedFromSpillTemp() const
6324 #if !defined(LEGACY_BACKEND)
6325 // If spilled and no reg at use, then it is used from the spill temp location rather than being reloaded.
6326 if (((gtFlags & GTF_SPILLED) != 0) && ((gtFlags & GTF_NOREG_AT_USE) != 0))
6330 #endif //! LEGACY_BACKEND
6335 /*****************************************************************************/
6337 #ifndef _HOST_64BIT_
6338 #include <poppack.h>
6341 /*****************************************************************************/
6343 #if SMALL_TREE_NODES
6345 // In debug, on some platforms (e.g., when LATE_DISASM is defined), GenTreeIntCon is bigger than GenTreeLclFld.
6346 const size_t TREE_NODE_SZ_SMALL = max(sizeof(GenTreeIntCon), sizeof(GenTreeLclFld));
6348 #endif // SMALL_TREE_NODES
6350 const size_t TREE_NODE_SZ_LARGE = sizeof(GenTreeCall);
6352 /*****************************************************************************
6353 * Types returned by GenTree::lvaLclVarRefs()
6358 VR_INVARIANT = 0x00, // an invariant value
6360 VR_IND_REF = 0x01, // an object reference
6361 VR_IND_SCL = 0x02, // a non-object reference
6362 VR_GLB_VAR = 0x04, // a global (clsVar)
6364 // Add a temp define to avoid merge conflict.
6365 #define VR_IND_PTR VR_IND_REF
6367 /*****************************************************************************/
6368 #endif // !GENTREE_H
6369 /*****************************************************************************/