1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
10 XX This is the node in the semantic tree graph. It represents the operation XX
11 XX corresponding to the node, and other information during code-gen. XX
13 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
17 /*****************************************************************************/
20 /*****************************************************************************/
22 #include "vartype.h" // For "var_types"
23 #include "target.h" // For "regNumber"
24 #include "ssaconfig.h" // For "SsaConfig::RESERVED_SSA_NUM"
26 #include "valuenumtype.h"
27 #include "simplerhash.h"
31 // Debugging GenTree is much easier if we add a magic virtual function to make the debugger able to figure out what type it's got.
32 // This is enabled by default in DEBUG. To enable it in RET builds (temporarily!), you need to change the build to define DEBUGGABLE_GENTREE=1,
33 // as well as pass /OPT:NOICF to the linker (or else all the vtables get merged, making the debugging value supplied by them useless).
34 // See protojit.nativeproj for a commented example of setting the build flags correctly.
35 #ifndef DEBUGGABLE_GENTREE
37 #define DEBUGGABLE_GENTREE 1
39 #define DEBUGGABLE_GENTREE 0
41 #endif // !DEBUGGABLE_GENTREE
43 // The SpecialCodeKind enum is used to indicate the type of special (unique)
44 // target block that will be targeted by an instruction.
46 // GenTreeBoundsChk nodes (SCK_RNGCHK_FAIL, SCK_ARG_EXCPN, SCK_ARG_RNG_EXCPN)
47 // - these nodes have a field (gtThrowKind) to indicate which kind
48 // GenTreeOps nodes, for which codegen will generate the branch
49 // - it will use the appropriate kind based on the opcode, though it's not
50 // clear why SCK_OVERFLOW == SCK_ARITH_EXCPN
51 // SCK_PAUSE_EXEC is not currently used.
56 SCK_RNGCHK_FAIL, // target when range check fails
57 SCK_PAUSE_EXEC, // target to stop (e.g. to allow GC)
58 SCK_DIV_BY_ZERO, // target for divide by zero (Not used on X86/X64)
59 SCK_ARITH_EXCPN, // target on arithmetic exception
60 SCK_OVERFLOW = SCK_ARITH_EXCPN, // target on overflow
61 SCK_ARG_EXCPN, // target on ArgumentException (currently used only for SIMD intrinsics)
62 SCK_ARG_RNG_EXCPN, // target on ArgumentOutOfRangeException (currently used only for SIMD intrinsics)
66 /*****************************************************************************/
68 DECLARE_TYPED_ENUM(genTreeOps,BYTE)
70 #define GTNODE(en,sn,cm,ok) GT_ ## en,
75 // GT_CNS_NATIVELONG is the gtOper symbol for GT_CNS_LNG or GT_CNS_INT, depending on the target.
76 // For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
77 // For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
78 // In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
81 GT_CNS_NATIVELONG = GT_CNS_INT,
83 GT_CNS_NATIVELONG = GT_CNS_LNG,
86 END_DECLARE_TYPED_ENUM(genTreeOps,BYTE)
88 /*****************************************************************************
90 * The following enum defines a set of bit flags that can be used
91 * to classify expression tree nodes. Note that some operators will
92 * have more than one bit set, as follows:
94 * GTK_CONST implies GTK_LEAF
95 * GTK_RELOP implies GTK_BINOP
96 * GTK_LOGOP implies GTK_BINOP
101 GTK_SPECIAL = 0x0000, // unclassified operator (special handling reqd)
103 GTK_CONST = 0x0001, // constant operator
104 GTK_LEAF = 0x0002, // leaf operator
105 GTK_UNOP = 0x0004, // unary operator
106 GTK_BINOP = 0x0008, // binary operator
107 GTK_RELOP = 0x0010, // comparison operator
108 GTK_LOGOP = 0x0020, // logical operator
109 GTK_ASGOP = 0x0040, // assignment operator
111 GTK_KINDMASK= 0x007F, // operator kind mask
113 GTK_COMMUTE = 0x0080, // commutative operator
115 GTK_EXOP = 0x0100, // Indicates that an oper for a node type that extends GenTreeOp (or GenTreeUnOp)
116 // by adding non-node fields to unary or binary operator.
118 GTK_LOCAL = 0x0200, // is a local access (load, store, phi)
120 /* Define composite value(s) */
122 GTK_SMPOP = (GTK_UNOP|GTK_BINOP|GTK_RELOP|GTK_LOGOP)
125 /*****************************************************************************/
127 #define SMALL_TREE_NODES 1
129 /*****************************************************************************/
131 DECLARE_TYPED_ENUM(gtCallTypes,BYTE)
133 CT_USER_FUNC, // User function
134 CT_HELPER, // Jit-helper
135 CT_INDIRECT, // Indirect call
137 CT_COUNT // fake entry (must be last)
139 END_DECLARE_TYPED_ENUM(gtCallTypes,BYTE)
142 /*****************************************************************************/
146 struct InlineCandidateInfo;
148 /*****************************************************************************/
150 // GT_FIELD nodes will be lowered into more "code-gen-able" representations, like
151 // GT_IND's of addresses, or GT_LCL_FLD nodes. We'd like to preserve the more abstract
152 // information, and will therefore annotate such lowered nodes with FieldSeq's. A FieldSeq
153 // represents a (possibly) empty sequence of fields. The fields are in the order
154 // in which they are dereferenced. The first field may be an object field or a struct field;
155 // all subsequent fields must be struct fields.
158 CORINFO_FIELD_HANDLE m_fieldHnd;
159 FieldSeqNode* m_next;
161 FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next) : m_fieldHnd(fieldHnd), m_next(next) {}
163 // returns true when this is the pseudo #FirstElem field sequence
164 bool IsFirstElemFieldSeq();
166 // returns true when this is the pseudo #ConstantIndex field sequence
167 bool IsConstantIndexFieldSeq();
169 // returns true when this is the the pseudo #FirstElem field sequence or the pseudo #ConstantIndex field sequence
170 bool IsPseudoField();
172 // Make sure this provides methods that allow it to be used as a KeyFuncs type in SimplerHash.
173 static int GetHashCode(FieldSeqNode fsn)
175 return static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_fieldHnd)) ^ static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_next));
178 static bool Equals(FieldSeqNode fsn1, FieldSeqNode fsn2)
180 return fsn1.m_fieldHnd == fsn2.m_fieldHnd && fsn1.m_next == fsn2.m_next;
184 // This class canonicalizes field sequences.
187 typedef SimplerHashTable<FieldSeqNode, /*KeyFuncs*/FieldSeqNode, FieldSeqNode*, JitSimplerHashBehavior> FieldSeqNodeCanonMap;
190 FieldSeqNodeCanonMap* m_canonMap;
192 static FieldSeqNode s_notAField; // No value, just exists to provide an address.
194 // Dummy variables to provide the addresses for the "pseudo field handle" statics below.
195 static int FirstElemPseudoFieldStruct;
196 static int ConstantIndexPseudoFieldStruct;
199 FieldSeqStore(IAllocator* alloc);
201 // Returns the (canonical in the store) singleton field sequence for the given handle.
202 FieldSeqNode* CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd);
204 // This is a special distinguished FieldSeqNode indicating that a constant does *not*
205 // represent a valid field sequence. This is "infectious", in the sense that appending it
206 // (on either side) to any field sequence yields the "NotAField()" sequence.
207 static FieldSeqNode* NotAField() { return &s_notAField; }
209 // Returns the (canonical in the store) field sequence representing the concatenation of
210 // the sequences represented by "a" and "b". Assumes that "a" and "b" are canonical; that is,
211 // they are the results of CreateSingleton, NotAField, or Append calls. If either of the arguments
212 // are the "NotAField" value, so is the result.
213 FieldSeqNode* Append(FieldSeqNode* a, FieldSeqNode* b);
215 // We have a few "pseudo" field handles:
217 // This treats the constant offset of the first element of something as if it were a field.
218 // Works for method table offsets of boxed structs, or first elem offset of arrays/strings.
219 static CORINFO_FIELD_HANDLE FirstElemPseudoField;
221 // If there is a constant index, we make a psuedo field to correspond to the constant added to
222 // offset of the indexed field. This keeps the field sequence structure "normalized", especially in the
223 // case where the element type is a struct, so we might add a further struct field offset.
224 static CORINFO_FIELD_HANDLE ConstantIndexPseudoField;
226 static bool IsPseudoField(CORINFO_FIELD_HANDLE hnd)
228 return hnd == FirstElemPseudoField || hnd == ConstantIndexPseudoField;
234 /*****************************************************************************/
236 typedef struct GenTree * GenTreePtr;
237 struct GenTreeArgList;
239 // Forward declarations of the subtypes
240 #define GTSTRUCT_0(fn, en) struct GenTree##fn;
241 #define GTSTRUCT_1(fn, en) struct GenTree##fn;
242 #define GTSTRUCT_2(fn, en, en2) struct GenTree##fn;
243 #define GTSTRUCT_3(fn, en, en2, en3) struct GenTree##fn;
244 #define GTSTRUCT_4(fn, en, en2, en3, en4) struct GenTree##fn;
245 #define GTSTRUCT_N(fn, ...) struct GenTree##fn;
246 #include "gtstructs.h"
248 /*****************************************************************************/
251 #include <pshpack4.h>
256 // We use GT_STRUCT_0 only for the category of simple ops.
257 #define GTSTRUCT_0(fn, en) GenTree##fn* As##fn() \
259 assert(this->OperIsSimple()); \
260 return reinterpret_cast<GenTree##fn*>(this); \
262 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
263 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
264 #define GTSTRUCT_1(fn, en) GenTree##fn* As##fn() \
266 assert(this->gtOper == en); \
267 return reinterpret_cast<GenTree##fn*>(this); \
269 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
270 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
271 #define GTSTRUCT_2(fn, en, en2) GenTree##fn* As##fn() \
273 assert(this->gtOper == en || this->gtOper == en2); \
274 return reinterpret_cast<GenTree##fn*>(this); \
276 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
277 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
278 #define GTSTRUCT_3(fn, en, en2, en3) GenTree##fn* As##fn() \
280 assert(this->gtOper == en || this->gtOper == en2 || this->gtOper == en3); \
281 return reinterpret_cast<GenTree##fn*>(this); \
283 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
284 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
286 #define GTSTRUCT_4(fn, en, en2, en3, en4) GenTree##fn* As##fn() \
288 assert(this->gtOper == en || this->gtOper == en2 || this->gtOper == en3 || this->gtOper == en4); \
289 return reinterpret_cast<GenTree##fn*>(this); \
291 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
292 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
295 // VC does not optimize out this loop in retail even though the value it computes is unused
296 // so we need a separate version for non-debug
297 #define GTSTRUCT_N(fn, ...) GenTree##fn* As##fn() \
299 genTreeOps validOps[] = {__VA_ARGS__}; \
300 bool found = false; \
301 for (unsigned i=0; i<ArrLen(validOps); i++) { \
302 if (this->gtOper == validOps[i]) \
309 return reinterpret_cast<GenTree##fn*>(this); \
311 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
312 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
314 #define GTSTRUCT_N(fn, ...) GenTree##fn* As##fn() \
316 return reinterpret_cast<GenTree##fn*>(this); \
318 GenTree##fn& As##fn##Ref() { return *As##fn(); } \
319 __declspec(property(get=As##fn##Ref)) GenTree##fn& gt##fn;
322 #include "gtstructs.h"
324 genTreeOps gtOper; // enum subtype BYTE
325 var_types gtType; // enum subtype BYTE
327 genTreeOps OperGet() const { return gtOper; }
328 var_types TypeGet() const { return gtType; }
331 genTreeOps gtOperSave; // Only used to save gtOper when we destroy a node, to aid debugging.
338 #define IS_CSE_INDEX(x) (x != 0)
339 #define IS_CSE_USE(x) (x > 0)
340 #define IS_CSE_DEF(x) (x < 0)
341 #define GET_CSE_INDEX(x) ((x > 0) ? x : -x)
342 #define TO_CSE_DEF(x) (-x)
344 signed char gtCSEnum; // 0 or the CSE index (negated if def)
345 // valid only for CSE expressions
347 #endif // FEATURE_ANYCSE
350 unsigned short gtAssertionNum; // 0 or Assertion table index
351 // valid only for non-GT_STMT nodes
353 bool HasAssertion() const { return gtAssertionNum != 0; }
354 void ClearAssertion() { gtAssertionNum = 0; }
356 unsigned short GetAssertion() const { return gtAssertionNum; }
357 void SetAssertion(unsigned short value) { assert((unsigned short)value == value); gtAssertionNum = (unsigned short)value; }
361 #if FEATURE_STACK_FP_X87
362 unsigned char gtFPlvl; // x87 stack depth at this node
363 void gtCopyFPlvl(GenTree * other) { gtFPlvl = other->gtFPlvl; }
364 void gtSetFPlvl(unsigned level) { noway_assert(FitsIn<unsigned char>(level)); gtFPlvl = (unsigned char)level; }
365 #else // FEATURE_STACK_FP_X87
366 void gtCopyFPlvl(GenTree * other) { }
367 void gtSetFPlvl(unsigned level) { }
368 #endif // FEATURE_STACK_FP_X87
371 // Cost metrics on the node. Don't allow direct access to the variable for setting.
377 // You are not allowed to read the cost values before they have been set in gtSetEvalOrder().
378 // Keep track of whether the costs have been initialized, and assert if they are read before being initialized.
379 // Obviously, this information does need to be initialized when a node is created.
380 // This is public so the dumpers can see it.
382 bool gtCostsInitialized;
385 #define MAX_COST UCHAR_MAX
386 #define IND_COST_EX 3 // execution cost for an indirection
388 __declspec(property(get=GetCostEx))
389 unsigned char gtCostEx; // estimate of expression execution cost
391 __declspec(property(get=GetCostSz))
392 unsigned char gtCostSz; // estimate of expression code size cost
394 unsigned char GetCostEx() const { assert(gtCostsInitialized); return _gtCostEx; }
395 unsigned char GetCostSz() const { assert(gtCostsInitialized); return _gtCostSz; }
397 // Set the costs. They are always both set at the same time.
398 // Don't use the "put" property: force calling this function, to make it more obvious in the few places
399 // that set the values.
400 // Note that costs are only set in gtSetEvalOrder() and its callees.
401 void SetCosts(unsigned costEx, unsigned costSz)
403 assert(costEx != (unsigned)-1); // looks bogus
404 assert(costSz != (unsigned)-1); // looks bogus
405 INDEBUG(gtCostsInitialized = true;)
407 _gtCostEx = (costEx > MAX_COST) ? MAX_COST : (unsigned char)costEx;
408 _gtCostSz = (costSz > MAX_COST) ? MAX_COST : (unsigned char)costSz;
411 // Opimized copy function, to avoid the SetCosts() function comparisons, and make it more clear that a node copy is happening.
412 void CopyCosts(const GenTree* const tree)
414 INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;) // If the 'tree' costs aren't initialized, we'll hit an assert below.
415 _gtCostEx = tree->gtCostEx;
416 _gtCostSz = tree->gtCostSz;
419 // Same as CopyCosts, but avoids asserts if the costs we are copying have not been initialized.
420 // This is because the importer, for example, clones nodes, before these costs have been initialized.
421 // Note that we directly access the 'tree' costs, not going through the accessor functions (either
422 // directly or through the properties).
423 void CopyRawCosts(const GenTree* const tree)
425 INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;)
426 _gtCostEx = tree->_gtCostEx;
427 _gtCostSz = tree->_gtCostSz;
432 unsigned char _gtCostEx; // estimate of expression execution cost
433 unsigned char _gtCostSz; // estimate of expression code size cost
436 // Register or register pair number of the node.
443 GT_REGTAG_NONE, // Nothing has been assigned to _gtRegNum/_gtRegPair
444 GT_REGTAG_REG, // _gtRegNum has been assigned
445 #if CPU_LONG_USES_REGPAIR
446 GT_REGTAG_REGPAIR // _gtRegPair has been assigned
449 genRegTag GetRegTag() const
451 #if CPU_LONG_USES_REGPAIR
452 assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG || gtRegTag == GT_REGTAG_REGPAIR);
454 assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG);
459 genRegTag gtRegTag; // What is in _gtRegNum/_gtRegPair?
466 // NOTE: After LSRA, one of these values may be valid even if GTF_REG_VAL is not set in gtFlags.
467 // They store the register assigned to the node. If a register is not assigned, _gtRegNum is set to REG_NA
468 // or _gtRegPair is set to REG_PAIR_NONE, depending on the node type.
469 regNumberSmall _gtRegNum; // which register the value is in
470 regPairNoSmall _gtRegPair; // which register pair the value is in
475 // The register number is stored in a small format (8 bits), but the getters return and the setters take
476 // a full-size (unsigned) format, to localize the casts here.
478 __declspec(property(get=GetRegNum,put=SetRegNum))
481 // for codegen purposes, is this node a subnode of its parent
482 bool isContained() const;
484 bool isContainedIndir() const;
486 bool isIndirAddrMode();
488 bool isIndir() const;
490 bool isContainedIntOrIImmed() const { return isContained() && IsCnsIntOrI(); }
492 bool isContainedFltOrDblImmed() const { return isContained() && (OperGet() == GT_CNS_DBL); }
494 bool isLclField() const { return OperGet() == GT_LCL_FLD || OperGet() == GT_STORE_LCL_FLD; }
496 bool isContainedLclField() const { return isContained() && isLclField(); }
498 bool isContainedLclVar() const { return isContained() && (OperGet() == GT_LCL_VAR); }
500 bool isContainedSpillTemp() const;
502 // Indicates whether it is a memory op.
503 // Right now it includes Indir and LclField ops.
504 bool isMemoryOp() const { return isIndir() || isLclField(); }
506 bool isContainedMemoryOp() const
508 return (isContained() && isMemoryOp()) || isContainedLclVar() || isContainedSpillTemp();
511 regNumber GetRegNum() const
513 assert((gtRegTag == GT_REGTAG_REG) ||
514 (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
515 regNumber reg = (regNumber) _gtRegNum;
516 assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
522 void SetRegNum(regNumber reg)
524 assert(reg >= REG_FIRST &&
526 // Make sure the upper bits of _gtRegPair are clear
527 _gtRegPair = (regPairNoSmall) 0;
528 _gtRegNum = (regNumberSmall) reg;
529 INDEBUG(gtRegTag = GT_REGTAG_REG;)
530 assert(_gtRegNum == reg);
533 #if CPU_LONG_USES_REGPAIR
534 __declspec(property(get=GetRegPair,put=SetRegPair))
537 regPairNo GetRegPair() const
539 assert((gtRegTag == GT_REGTAG_REGPAIR) ||
540 (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
541 regPairNo regPair = (regPairNo) _gtRegPair;
542 assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads undefined values
543 (regPair >= REG_PAIR_FIRST &&
544 regPair <= REG_PAIR_LAST) ||
545 (regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
549 void SetRegPair(regPairNo regPair)
551 assert((regPair >= REG_PAIR_FIRST &&
552 regPair <= REG_PAIR_LAST) ||
553 (regPair == REG_PAIR_NONE)); // allow initializing to an undefined value
554 _gtRegPair = (regPairNoSmall) regPair;
555 INDEBUG(gtRegTag = GT_REGTAG_REGPAIR;)
556 assert(_gtRegPair == regPair);
560 // Copy the _gtRegNum/_gtRegPair/gtRegTag fields
561 void CopyReg(GenTreePtr from);
563 void gtClearReg(Compiler* compiler);
565 bool gtHasReg() const;
567 regMaskTP gtGetRegMask() const;
569 unsigned gtFlags; // see GTF_xxxx below
572 unsigned gtDebugFlags; // see GTF_DEBUG_xxx below
573 #endif // defined(DEBUG)
575 ValueNumPair gtVNPair;
577 regMaskSmall gtRsvdRegs; // set of fixed trashed registers
578 #ifdef LEGACY_BACKEND
579 regMaskSmall gtUsedRegs; // set of used (trashed) registers
580 #endif // LEGACY_BACKEND
582 #ifndef LEGACY_BACKEND
583 TreeNodeInfo gtLsraInfo;
584 #endif // !LEGACY_BACKEND
586 void SetVNsFromNode(GenTreePtr tree)
588 gtVNPair = tree->gtVNPair;
591 ValueNum GetVN(ValueNumKind vnk) const
593 if (vnk == VNK_Liberal)
595 return gtVNPair.GetLiberal();
599 assert(vnk == VNK_Conservative);
600 return gtVNPair.GetConservative();
603 void SetVN(ValueNumKind vnk, ValueNum vn)
605 if (vnk == VNK_Liberal)
607 return gtVNPair.SetLiberal(vn);
611 assert(vnk == VNK_Conservative);
612 return gtVNPair.SetConservative(vn);
615 void SetVNs(ValueNumPair vnp)
621 gtVNPair = ValueNumPair(); // Initializes both elements to "NoVN".
624 //---------------------------------------------------------------------
625 // The first set of flags can be used with a large set of nodes, and
626 // thus they must all have distinct values. That is, one can test any
627 // expression node for one of these flags.
628 //---------------------------------------------------------------------
630 #define GTF_ASG 0x00000001 // sub-expression contains an assignment
631 #define GTF_CALL 0x00000002 // sub-expression contains a func. call
632 #define GTF_EXCEPT 0x00000004 // sub-expression might throw an exception
633 #define GTF_GLOB_REF 0x00000008 // sub-expression uses global variable(s)
634 #define GTF_ORDER_SIDEEFF 0x00000010 // sub-expression has a re-ordering side effect
636 // If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree,
637 // otherwise the C# (run csc /o-)
638 // var v = side_eff_operation
639 // with no use of v will drop your tree on the floor.
640 #define GTF_PERSISTENT_SIDE_EFFECTS (GTF_ASG|GTF_CALL)
641 #define GTF_SIDE_EFFECT (GTF_PERSISTENT_SIDE_EFFECTS|GTF_EXCEPT)
642 #define GTF_GLOB_EFFECT (GTF_SIDE_EFFECT|GTF_GLOB_REF)
643 #define GTF_ALL_EFFECT (GTF_GLOB_EFFECT|GTF_ORDER_SIDEEFF)
645 // The extra flag GTF_IS_IN_CSE is used to tell the consumer of these flags
646 // that we are calling in the context of performing a CSE, thus we
647 // should allow the run-once side effects of running a class constructor.
649 // The only requirement of this flag is that it not overlap any of the
650 // side-effect flags. The actual bit used is otherwise arbitrary.
651 #define GTF_IS_IN_CSE GTF_MAKE_CSE
652 #define GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE (GTF_ASG|GTF_CALL|GTF_IS_IN_CSE)
654 // Can any side-effects be observed externally, say by a caller method?
655 // For assignments, only assignments to global memory can be observed
656 // externally, whereas simple assignments to local variables can not.
658 // Be careful when using this inside a "try" protected region as the
659 // order of assignments to local variables would need to be preserved
660 // wrt side effects if the variables are alive on entry to the
661 // "catch/finally" region. In such cases, even assignments to locals
662 // will have to be restricted.
663 #define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \
664 (((flags) & (GTF_CALL|GTF_EXCEPT)) || \
665 (((flags) & (GTF_ASG|GTF_GLOB_REF)) == (GTF_ASG|GTF_GLOB_REF)))
667 #define GTF_REVERSE_OPS 0x00000020 // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second)
668 #define GTF_REG_VAL 0x00000040 // operand is sitting in a register (or part of a TYP_LONG operand is sitting in a register)
670 #define GTF_SPILLED 0x00000080 // the value has been spilled
672 #ifdef LEGACY_BACKEND
673 #define GTF_SPILLED_OPER 0x00000100 // op1 has been spilled
674 #define GTF_SPILLED_OP2 0x00000200 // op2 has been spilled
676 #define GTF_NOREG_AT_USE 0x00000100 // tree node is in memory at the point of use
677 #endif // LEGACY_BACKEND
679 #define GTF_REDINDEX_CHECK 0x00000100 // Used for redundant range checks. Disjoint from GTF_SPILLED_OPER
681 #define GTF_ZSF_SET 0x00000400 // the zero(ZF) and sign(SF) flags set to the operand
682 #if FEATURE_SET_FLAGS
683 #define GTF_SET_FLAGS 0x00000800 // Requires that codegen for this node set the flags
684 // Use gtSetFlags() to check this flags
686 #define GTF_IND_NONFAULTING 0x00000800 // An indir that cannot fault. GTF_SET_FLAGS is not used on indirs
688 #define GTF_MAKE_CSE 0x00002000 // Hoisted Expression: try hard to make this into CSE (see optPerformHoistExpr)
689 #define GTF_DONT_CSE 0x00004000 // don't bother CSE'ing this expr
690 #define GTF_COLON_COND 0x00008000 // this node is conditionally executed (part of ? :)
692 #define GTF_NODE_MASK (GTF_COLON_COND)
694 #define GTF_BOOLEAN 0x00040000 // value is known to be 0/1
696 #define GTF_SMALL_OK 0x00080000 // actual small int sufficient
698 #define GTF_UNSIGNED 0x00100000 // with GT_CAST: the source operand is an unsigned type
699 // with operators: the specified node is an unsigned operator
701 #define GTF_LATE_ARG 0x00200000 // the specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs.
703 #define GTF_SPILL 0x00400000 // needs to be spilled here
704 #define GTF_SPILL_HIGH 0x00040000 // shared with GTF_BOOLEAN
706 #define GTF_COMMON_MASK 0x007FFFFF // mask of all the flags above
708 #define GTF_REUSE_REG_VAL 0x00800000 // This is set by the register allocator on nodes whose value already exists in the
709 // register assigned to this node, so the code generator does not have to generate
710 // code to produce the value.
711 // It is currently used only on constant nodes.
712 // It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since
713 // it is not needed for lclVars and is highly unlikely to be useful for indir nodes
715 //---------------------------------------------------------------------
716 // The following flags can be used only with a small set of nodes, and
717 // thus their values need not be distinct (other than within the set
718 // that goes with a particular node/nodes, of course). That is, one can
719 // only test for one of these flags if the 'gtOper' value is tested as
720 // well to make sure it's the right operator for the particular flag.
721 //---------------------------------------------------------------------
723 // NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags, because
724 // GT_LCL_VAR nodes may be changed to GT_REG_VAR nodes without resetting
725 // the flags. These are also used by GT_LCL_FLD.
726 #define GTF_VAR_DEF 0x80000000 // GT_LCL_VAR -- this is a definition
727 #define GTF_VAR_USEASG 0x40000000 // GT_LCL_VAR -- this is a use/def for a x<op>=y
728 #define GTF_VAR_USEDEF 0x20000000 // GT_LCL_VAR -- this is a use/def as in x=x+y (only the lhs x is tagged)
729 #define GTF_VAR_CAST 0x10000000 // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local)
730 #define GTF_VAR_ITERATOR 0x08000000 // GT_LCL_VAR -- this is a iterator reference in the loop condition
731 #define GTF_VAR_CLONED 0x01000000 // GT_LCL_VAR -- this node has been cloned or is a clone
732 // Relevant for inlining optimizations (see fgInlinePrependStatements)
734 // TODO-Cleanup: Currently, GTF_REG_BIRTH is used only by stackfp
735 // We should consider using it more generally for VAR_BIRTH, instead of
736 // GTF_VAR_DEF && !GTF_VAR_USEASG
737 #define GTF_REG_BIRTH 0x04000000 // GT_REG_VAR -- enregistered variable born here
738 #define GTF_VAR_DEATH 0x02000000 // GT_LCL_VAR, GT_REG_VAR -- variable dies here (last use)
740 #define GTF_VAR_ARR_INDEX 0x00000020 // The variable is part of (the index portion of) an array index expression.
741 // Shares a value with GTF_REVERSE_OPS, which is meaningless for local var.
743 #define GTF_LIVENESS_MASK (GTF_VAR_DEF|GTF_VAR_USEASG|GTF_VAR_USEDEF|GTF_REG_BIRTH|GTF_VAR_DEATH)
745 #define GTF_CALL_UNMANAGED 0x80000000 // GT_CALL -- direct call to unmanaged code
746 #define GTF_CALL_INLINE_CANDIDATE 0x40000000 // GT_CALL -- this call has been marked as an inline candidate
748 #define GTF_CALL_VIRT_KIND_MASK 0x30000000
749 #define GTF_CALL_NONVIRT 0x00000000 // GT_CALL -- a non virtual call
750 #define GTF_CALL_VIRT_STUB 0x10000000 // GT_CALL -- a stub-dispatch virtual call
751 #define GTF_CALL_VIRT_VTABLE 0x20000000 // GT_CALL -- a vtable-based virtual call
753 #define GTF_CALL_NULLCHECK 0x08000000 // GT_CALL -- must check instance pointer for null
754 #define GTF_CALL_POP_ARGS 0x04000000 // GT_CALL -- caller pop arguments?
755 #define GTF_CALL_HOISTABLE 0x02000000 // GT_CALL -- call is hoistable
756 #define GTF_CALL_REG_SAVE 0x01000000 // GT_CALL -- This call preserves all integer regs
757 // For additional flags for GT_CALL node see GTF_CALL_M_
759 #define GTF_NOP_DEATH 0x40000000 // GT_NOP -- operand dies here
761 #define GTF_FLD_NULLCHECK 0x80000000 // GT_FIELD -- need to nullcheck the "this" pointer
762 #define GTF_FLD_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE
764 #define GTF_INX_RNGCHK 0x80000000 // GT_INDEX -- the array reference should be range-checked.
765 #define GTF_INX_REFARR_LAYOUT 0x20000000 // GT_INDEX -- same as GTF_IND_REFARR_LAYOUT
766 #define GTF_INX_STRING_LAYOUT 0x40000000 // GT_INDEX -- this uses the special string array layout
768 #define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
769 #define GTF_IND_REFARR_LAYOUT 0x20000000 // GT_IND -- the array holds object refs (only effects layout of Arrays)
770 #define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere
771 #define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS
772 #define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an assignment; don't evaluate it independently.
773 #define GTF_IND_UNALIGNED 0x02000000 // GT_IND -- the load or store is unaligned (we assume worst case alignment of 1 byte)
774 #define GTF_IND_INVARIANT 0x01000000 // GT_IND -- the target is invariant (a prejit indirection)
775 #define GTF_IND_ARR_LEN 0x80000000 // GT_IND -- the indirection represents an array length (of the REF contribution to its argument).
776 #define GTF_IND_ARR_INDEX 0x00800000 // GT_IND -- the indirection represents an (SZ) array index
778 #define GTF_IND_FLAGS (GTF_IND_VOLATILE|GTF_IND_REFARR_LAYOUT|GTF_IND_TGTANYWHERE|GTF_IND_NONFAULTING|\
779 GTF_IND_TLS_REF|GTF_IND_UNALIGNED|GTF_IND_INVARIANT|GTF_IND_ARR_INDEX)
781 #define GTF_CLS_VAR_ASG_LHS 0x04000000 // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS of an assignment; don't evaluate it independently.
783 #define GTF_ADDR_ONSTACK 0x80000000 // GT_ADDR -- this expression is guaranteed to be on the stack
786 #define GTF_ADDRMODE_NO_CSE 0x80000000 // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex addressing mode
788 #define GTF_MUL_64RSLT 0x40000000 // GT_MUL -- produce 64-bit result
790 #define GTF_MOD_INT_RESULT 0x80000000 // GT_MOD, -- the real tree represented by this
791 // GT_UMOD node evaluates to an int even though
792 // its type is long. The result is
793 // placed in the low member of the
796 #define GTF_RELOP_NAN_UN 0x80000000 // GT_<relop> -- Is branch taken if ops are NaN?
797 #define GTF_RELOP_JMP_USED 0x40000000 // GT_<relop> -- result of compare used for jump or ?:
798 #define GTF_RELOP_QMARK 0x20000000 // GT_<relop> -- the node is the condition for ?:
799 #define GTF_RELOP_SMALL 0x10000000 // GT_<relop> -- We should use a byte or short sized compare (op1->gtType is the small type)
800 #define GTF_RELOP_ZTT 0x08000000 // GT_<relop> -- Loop test cloned for converting while-loops into do-while with explicit "loop test" in the header block.
802 #define GTF_QMARK_CAST_INSTOF 0x80000000 // GT_QMARK -- Is this a top (not nested) level qmark created for castclass or instanceof?
804 #define GTF_BOX_VALUE 0x80000000 // GT_BOX -- "box" is on a value type
806 #define GTF_ICON_HDL_MASK 0xF0000000 // Bits used by handle types below
808 #define GTF_ICON_SCOPE_HDL 0x10000000 // GT_CNS_INT -- constant is a scope handle
809 #define GTF_ICON_CLASS_HDL 0x20000000 // GT_CNS_INT -- constant is a class handle
810 #define GTF_ICON_METHOD_HDL 0x30000000 // GT_CNS_INT -- constant is a method handle
811 #define GTF_ICON_FIELD_HDL 0x40000000 // GT_CNS_INT -- constant is a field handle
812 #define GTF_ICON_STATIC_HDL 0x50000000 // GT_CNS_INT -- constant is a handle to static data
813 #define GTF_ICON_STR_HDL 0x60000000 // GT_CNS_INT -- constant is a string handle
814 #define GTF_ICON_PSTR_HDL 0x70000000 // GT_CNS_INT -- constant is a ptr to a string handle
815 #define GTF_ICON_PTR_HDL 0x80000000 // GT_CNS_INT -- constant is a ldptr handle
816 #define GTF_ICON_VARG_HDL 0x90000000 // GT_CNS_INT -- constant is a var arg cookie handle
817 #define GTF_ICON_PINVKI_HDL 0xA0000000 // GT_CNS_INT -- constant is a pinvoke calli handle
818 #define GTF_ICON_TOKEN_HDL 0xB0000000 // GT_CNS_INT -- constant is a token handle
819 #define GTF_ICON_TLS_HDL 0xC0000000 // GT_CNS_INT -- constant is a TLS ref with offset
820 #define GTF_ICON_FTN_ADDR 0xD0000000 // GT_CNS_INT -- constant is a function address
821 #define GTF_ICON_CIDMID_HDL 0xE0000000 // GT_CNS_INT -- constant is a class or module ID handle
822 #define GTF_ICON_BBC_PTR 0xF0000000 // GT_CNS_INT -- constant is a basic block count pointer
824 #define GTF_ICON_FIELD_OFF 0x08000000 // GT_CNS_INT -- constant is a field offset
826 #define GTF_BLK_HASGCPTR 0x80000000 // GT_COPYBLK -- This struct copy will copy GC Pointers
827 #define GTF_BLK_VOLATILE 0x40000000 // GT_INITBLK/GT_COPYBLK -- is a volatile block operation
828 #define GTF_BLK_UNALIGNED 0x02000000 // GT_INITBLK/GT_COPYBLK -- is an unaligned block operation
830 #define GTF_OVERFLOW 0x10000000 // GT_ADD, GT_SUB, GT_MUL, - Need overflow check
831 // GT_ASG_ADD, GT_ASG_SUB,
833 // Use gtOverflow(Ex)() to check this flag
835 #define GTF_NO_OP_NO 0x80000000 // GT_NO_OP --Have the codegenerator generate a special nop
837 //----------------------------------------------------------------
839 #define GTF_STMT_CMPADD 0x80000000 // GT_STMT -- added by compiler
840 #define GTF_STMT_HAS_CSE 0x40000000 // GT_STMT -- CSE def or use was subsituted
841 #define GTF_STMT_TOP_LEVEL 0x20000000 // GT_STMT -- Top-level statement - true iff gtStmtList->gtPrev == nullptr
842 // True for all stmts when in FGOrderTree
843 #define GTF_STMT_SKIP_LOWER 0x10000000 // GT_STMT -- Skip lowering if we already lowered an embedded stmt.
845 //----------------------------------------------------------------
848 #define GTF_DEBUG_NONE 0x00000000 // No debug flags.
850 #define GTF_DEBUG_NODE_MORPHED 0x00000001 // the node has been morphed (in the global morphing phase)
851 #define GTF_DEBUG_NODE_SMALL 0x00000002
852 #define GTF_DEBUG_NODE_LARGE 0x00000004
854 #define GTF_DEBUG_NODE_MASK 0x00000007 // These flags are all node (rather than operation) properties.
856 #define GTF_DEBUG_VAR_CSE_REF 0x00800000 // GT_LCL_VAR -- This is a CSE LCL_VAR node
857 #endif // defined(DEBUG)
864 unsigned gtSeqNum; // liveness traversal order within the current statement
868 const unsigned short gtOperKindTable[];
871 unsigned OperKind(unsigned gtOper)
873 assert(gtOper < GT_COUNT);
875 return gtOperKindTable[gtOper];
878 unsigned OperKind() const
880 assert(gtOper < GT_COUNT);
882 return gtOperKindTable[gtOper];
885 static bool IsExOp(unsigned opKind)
887 return (opKind & GTK_EXOP) != 0;
889 // Returns the operKind with the GTK_EX_OP bit removed (the
890 // kind of operator, unary or binary, that is extended).
891 static unsigned StripExOp(unsigned opKind)
893 return opKind & ~GTK_EXOP;
897 bool OperIsConst(genTreeOps gtOper)
899 return (OperKind(gtOper) & GTK_CONST ) != 0;
902 bool OperIsConst() const
904 return (OperKind(gtOper) & GTK_CONST ) != 0;
908 bool OperIsLeaf(genTreeOps gtOper)
910 return (OperKind(gtOper) & GTK_LEAF ) != 0;
913 bool OperIsLeaf() const
915 return (OperKind(gtOper) & GTK_LEAF ) != 0;
919 bool OperIsCompare(genTreeOps gtOper)
921 return (OperKind(gtOper) & GTK_RELOP ) != 0;
925 bool OperIsLocal(genTreeOps gtOper)
927 bool result = (OperKind(gtOper) & GTK_LOCAL) != 0;
929 (gtOper == GT_LCL_VAR ||
930 gtOper == GT_PHI_ARG ||
931 gtOper == GT_REG_VAR ||
932 gtOper == GT_LCL_FLD ||
933 gtOper == GT_STORE_LCL_VAR ||
934 gtOper == GT_STORE_LCL_FLD));
939 bool OperIsBlkOp(genTreeOps gtOper)
941 return (gtOper == GT_INITBLK ||
942 gtOper == GT_COPYBLK ||
943 gtOper == GT_COPYOBJ);
947 bool OperIsCopyBlkOp(genTreeOps gtOper)
949 return (gtOper == GT_COPYOBJ || gtOper == GT_COPYBLK);
954 bool OperIsLocalAddr(genTreeOps gtOper)
956 return (gtOper == GT_LCL_VAR_ADDR ||
957 gtOper == GT_LCL_FLD_ADDR);
961 bool OperIsScalarLocal(genTreeOps gtOper)
963 return (gtOper == GT_LCL_VAR ||
964 gtOper == GT_REG_VAR ||
965 gtOper == GT_STORE_LCL_VAR);
969 bool OperIsNonPhiLocal(genTreeOps gtOper)
971 return OperIsLocal(gtOper) && (gtOper != GT_PHI_ARG);
975 bool OperIsLocalRead(genTreeOps gtOper)
977 return (OperIsLocal(gtOper) && !OperIsLocalStore(gtOper));
981 bool OperIsLocalStore(genTreeOps gtOper)
983 return (gtOper == GT_STORE_LCL_VAR ||
984 gtOper == GT_STORE_LCL_FLD);
989 bool OperIsAddrMode(genTreeOps gtOper)
991 return (gtOper == GT_LEA);
994 bool OperIsBlkOp() const;
995 bool OperIsCopyBlkOp() const;
996 bool OperIsInitBlkOp() const;
997 bool OperIsDynBlkOp();
999 bool OperIsPutArgStk() const
1001 return gtOper == GT_PUTARG_STK;
1004 bool OperIsAddrMode() const
1006 return OperIsAddrMode(OperGet());
1009 bool OperIsLocal() const
1011 return OperIsLocal(OperGet());
1014 bool OperIsLocalAddr() const
1016 return OperIsLocalAddr(OperGet());
1019 bool OperIsScalarLocal() const
1021 return OperIsScalarLocal(OperGet());
1024 bool OperIsNonPhiLocal() const
1026 return OperIsNonPhiLocal(OperGet());
1029 bool OperIsLocalStore() const
1031 return OperIsLocalStore(OperGet());
1034 bool OperIsLocalRead() const
1036 return OperIsLocalRead(OperGet());
1039 bool OperIsCompare()
1041 return (OperKind(gtOper) & GTK_RELOP ) != 0;
1045 bool OperIsLogical(genTreeOps gtOper)
1047 return (OperKind(gtOper) & GTK_LOGOP ) != 0;
1050 bool OperIsLogical() const
1052 return (OperKind(gtOper) & GTK_LOGOP ) != 0;
1056 bool OperIsShift(genTreeOps gtOper)
1058 return (gtOper == GT_LSH) ||
1059 (gtOper == GT_RSH) ||
1063 bool OperIsShift() const
1065 return OperIsShift(OperGet());
1069 bool OperIsRotate(genTreeOps gtOper)
1071 return (gtOper == GT_ROL) ||
1075 bool OperIsRotate() const
1077 return OperIsRotate(OperGet());
1081 bool OperIsShiftOrRotate(genTreeOps gtOper)
1083 return OperIsShift(gtOper) ||
1084 OperIsRotate(gtOper);
1087 bool OperIsShiftOrRotate() const
1089 return OperIsShiftOrRotate(OperGet());
1092 bool OperIsArithmetic() const
1094 genTreeOps op = OperGet();
1108 || OperIsShiftOrRotate(op);
1111 #if !defined(LEGACY_BACKEND) && !defined(_TARGET_64BIT_)
1113 bool OperIsHigh(genTreeOps gtOper)
1128 bool OperIsHigh() const
1130 return OperIsHigh(OperGet());
1132 #endif // !defined(LEGACY_BACKEND) && !defined(_TARGET_64BIT_)
1135 bool OperIsUnary(genTreeOps gtOper)
1137 return (OperKind(gtOper) & GTK_UNOP ) != 0;
1140 bool OperIsUnary() const
1142 return OperIsUnary(gtOper);
1146 bool OperIsBinary(genTreeOps gtOper)
1148 return (OperKind(gtOper) & GTK_BINOP ) != 0;
1151 bool OperIsBinary() const
1153 return OperIsBinary(gtOper);
1157 bool OperIsSimple(genTreeOps gtOper)
1159 return (OperKind(gtOper) & GTK_SMPOP ) != 0;
1163 bool OperIsSpecial(genTreeOps gtOper)
1165 return ((OperKind(gtOper) & GTK_KINDMASK) == GTK_SPECIAL);
1168 bool OperIsSimple() const
1170 return OperIsSimple(gtOper);
1174 bool isCommutativeSIMDIntrinsic();
1176 bool isCommutativeSIMDIntrinsic()
1180 #endif // FEATURE_SIMD
1183 bool OperIsCommutative(genTreeOps gtOper)
1185 return (OperKind(gtOper) & GTK_COMMUTE) != 0;
1188 bool OperIsCommutative()
1190 return OperIsCommutative(gtOper) || (OperIsSIMD(gtOper) && isCommutativeSIMDIntrinsic());
1194 bool OperIsAssignment(genTreeOps gtOper)
1196 return (OperKind(gtOper) & GTK_ASGOP) != 0;
1199 bool OperIsAssignment() const
1201 return OperIsAssignment(gtOper);
1205 bool OperIsIndir(genTreeOps gtOper)
1207 return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK;
1210 bool OperIsIndir() const
1212 return OperIsIndir(gtOper);
1216 bool OperIsImplicitIndir(genTreeOps gtOper)
1237 bool OperIsImplicitIndir() const
1239 return OperIsImplicitIndir(gtOper);
1242 bool OperIsStore() const
1244 return OperIsStore(gtOper);
1248 bool OperIsStore(genTreeOps gtOper)
1250 return (gtOper == GT_STOREIND
1251 || gtOper == GT_STORE_LCL_VAR
1252 || gtOper == GT_STORE_LCL_FLD
1253 || gtOper == GT_STORE_CLS_VAR);
1257 bool OperIsAtomicOp(genTreeOps gtOper)
1259 return (gtOper == GT_XADD
1260 || gtOper == GT_XCHG
1261 || gtOper == GT_LOCKADD
1262 || gtOper == GT_CMPXCHG);
1265 bool OperIsAtomicOp()
1267 return OperIsAtomicOp(gtOper);
1270 // This is basically here for cleaner FEATURE_SIMD #ifdefs.
1272 bool OperIsSIMD(genTreeOps gtOper)
1275 return gtOper == GT_SIMD;
1276 #else // !FEATURE_SIMD
1278 #endif // !FEATURE_SIMD
1283 return OperIsSIMD(gtOper);
1286 // Requires that "op" is an op= operator. Returns
1287 // the corresponding "op".
1289 genTreeOps OpAsgToOper(genTreeOps op);
1292 bool NullOp1Legal() const
1294 assert(OperIsSimple(gtOper));
1297 case GT_PHI: case GT_LEA: case GT_RETFILT: case GT_NOP:
1300 return gtType == TYP_VOID;
1306 bool NullOp2Legal() const
1308 assert(OperIsSimple(gtOper));
1309 if (!OperIsBinary(gtOper))
1324 #endif // !FEATURE_SIMD
1332 inline bool RequiresNonNullOp2(genTreeOps oper);
1333 bool IsListForMultiRegArg();
1336 inline bool IsFPZero();
1337 inline bool IsIntegralConst(ssize_t constVal);
1339 inline bool IsBoxedValue();
1343 return gtOper == GT_LIST;
1346 inline GenTreePtr MoveNext();
1348 inline GenTreePtr Current();
1350 inline GenTreePtr *pCurrent();
1352 inline GenTreePtr gtGetOp1();
1354 inline GenTreePtr gtGetOp2();
1356 // Given a tree node, if this is a child of that node, return the pointer to the child node so that it
1357 // can be modified; otherwise, return null.
1358 GenTreePtr* gtGetChildPointer(GenTreePtr parent);
1360 // Get the parent of this node, and optionally capture the pointer to the child so that it can be modified.
1361 GenTreePtr gtGetParent(GenTreePtr** parentChildPtrPtr);
1363 inline GenTreePtr gtEffectiveVal(bool commaOnly = false);
1365 // Return the child of this node if it is a GT_RELOAD or GT_COPY; otherwise simply return the node itself
1366 inline GenTree* gtSkipReloadOrCopy();
1368 // Returns true if it is a call node returning its value in more than one register
1369 inline bool IsMultiRegCall() const;
1371 // Returns true if it is a GT_COPY or GT_RELOAD node
1372 inline bool IsCopyOrReload() const;
1374 // Returns true if it is a GT_COPY or GT_RELOAD of a multi-reg call node
1375 inline bool IsCopyOrReloadOfMultiRegCall() const;
1377 bool OperMayThrow();
1379 unsigned GetScaleIndexMul();
1380 unsigned GetScaleIndexShf();
1381 unsigned GetScaledIndex();
1383 // Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer
1384 // (<= 32 bit) constant. If it returns true, it sets "*offset" to (one of the) constant value(s), and
1385 // "*addr" to the other argument.
1386 bool IsAddWithI32Const(GenTreePtr* addr, int* offset);
1388 // Insert 'node' after this node in execution order.
1389 void InsertAfterSelf(GenTree* node, GenTreeStmt* stmt = nullptr);
1393 #if SMALL_TREE_NODES
1395 unsigned char s_gtNodeSizes[];
1399 void InitNodeSize();
1401 size_t GetNodeSize() const;
1403 bool IsNodeProperlySized() const;
1405 void CopyFrom(const GenTree* src, Compiler* comp);
1408 genTreeOps ReverseRelop(genTreeOps relop);
1411 genTreeOps SwapRelop(genTreeOps relop);
1413 //---------------------------------------------------------------------
1416 bool Compare(GenTreePtr op1, GenTreePtr op2, bool swapOK = false);
1418 //---------------------------------------------------------------------
1420 //---------------------------------------------------------------------
1423 const char * NodeName(genTreeOps op);
1426 const char * OpName(genTreeOps op);
1428 //---------------------------------------------------------------------
1430 //---------------------------------------------------------------------
1432 bool IsNothingNode () const;
1433 void gtBashToNOP ();
1435 // Value number update action enumeration
1436 enum ValueNumberUpdate
1438 CLEAR_VN, // Clear value number
1439 PRESERVE_VN // Preserve value number
1442 void SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN); // set gtOper
1443 void SetOperResetFlags (genTreeOps oper); // set gtOper and reset flags
1445 void ChangeOperConst (genTreeOps oper); // ChangeOper(constOper)
1446 // set gtOper and only keep GTF_COMMON_MASK flags
1447 void ChangeOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN);
1448 void ChangeOperUnchecked (genTreeOps oper);
1450 bool IsLocal() const
1452 return OperIsLocal(OperGet());
1455 // Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
1456 // is not the same size as the type of the GT_LCL_VAR.
1457 bool IsPartialLclFld(Compiler* comp);
1459 // Returns "true" iff "this" defines a local variable. Requires "comp" to be the
1460 // current compilation. If returns "true", sets "*pLclVarTree" to the
1461 // tree for the local that is defined, and, if "pIsEntire" is non-null, sets "*pIsEntire" to
1462 // true or false, depending on whether the assignment writes to the entirety of the local
1463 // variable, or just a portion of it.
1464 bool DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire = nullptr);
1466 // Returns true if "this" represents the address of a local, or a field of a local. If returns true, sets
1467 // "*pLclVarTree" to the node indicating the local variable. If the address is that of a field of this node,
1468 // sets "*pFldSeq" to the field sequence representing that field, else null.
1469 bool IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq);
1471 // Simpler variant of the above which just returns the local node if this is an expression that
1472 // yields an address into a local
1473 GenTreeLclVarCommon* IsLocalAddrExpr();
1475 // Determine if this is a LclVarCommon node and return some additional info about it in the
1476 // two out parameters.
1477 bool IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq);
1479 // Determine whether this is an assignment tree of the form X = X (op) Y,
1480 // where Y is an arbitrary tree, and X is a lclVar.
1481 unsigned IsLclVarUpdateTree(GenTree** otherTree, genTreeOps *updateOper);
1483 // If returns "true", "this" may represent the address of a static or instance field
1484 // (or a field of such a field, in the case of an object field of type struct).
1485 // If returns "true", then either "*pObj" is set to the object reference,
1486 // or "*pStatic" is set to the baseAddr or offset to be added to the "*pFldSeq"
1487 // Only one of "*pObj" or "*pStatic" will be set, the other one will be null.
1488 // The boolean return value only indicates that "this" *may* be a field address
1489 // -- the field sequence must also be checked.
1490 // If it is a field address, the field sequence will be a sequence of length >= 1,
1491 // starting with an instance or static field, and optionally continuing with struct fields.
1492 bool IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic, FieldSeqNode** pFldSeq);
1494 // Requires "this" to be the address of an array (the child of a GT_IND labeled with GTF_IND_ARR_INDEX).
1495 // Sets "pArr" to the node representing the array (either an array object pointer, or perhaps a byref to the some element).
1496 // Sets "*pArrayType" to the class handle for the array type.
1497 // Sets "*inxVN" to the value number inferred for the array index.
1498 // Sets "*pFldSeq" to the sequence, if any, of struct fields used to index into the array element.
1499 void ParseArrayAddress(Compiler* comp,
1500 struct ArrayInfo* arrayInfo,
1503 FieldSeqNode** pFldSeq);
1505 // Helper method for the above.
1506 void ParseArrayAddressWork(Compiler* comp, ssize_t inputMul, GenTreePtr* pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq);
1508 // Requires "this" to be a GT_IND. Requires the outermost caller to set "*pFldSeq" to nullptr.
1509 // Returns true if it is an array index expression, or access to a (sequence of) struct field(s)
1510 // within a struct array element. If it returns true, sets *arrayInfo to the array information, and sets *pFldSeq to the sequence
1511 // of struct field accesses.
1512 bool ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq);
1514 // Requires "this" to be the address of a (possible) array element (or struct field within that).
1515 // If it is, sets "*arrayInfo" to the array access info, "*pFldSeq" to the sequence of struct fields
1516 // accessed within the array element, and returns true. If not, returns "false".
1517 bool ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq);
1519 // Requires "this" to be an int expression. If it is a sequence of one or more integer constants added together,
1520 // returns true and sets "*pFldSeq" to the sequence of fields with which those constants are annotated.
1521 bool ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq);
1523 // Labels "*this" as an array index expression: label all constants and variables that could contribute, as part of an affine expression, to the value of the
1525 void LabelIndex(Compiler* comp, bool isConst = true);
1527 // Assumes that "this" occurs in a context where it is being dereferenced as the LHS of an assignment-like
1528 // statement (assignment, initblk, or copyblk). The "width" should be the number of bytes copied by the
1529 // operation. Returns "true" if "this" is an address of (or within)
1530 // a local variable; sets "*pLclVarTree" to that local variable instance; and, if "pIsEntire" is non-null,
1531 // sets "*pIsEntire" to true if this assignment writes the full width of the local.
1532 bool DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire);
1534 bool IsRegVar () const
1536 return OperGet() == GT_REG_VAR?true:false;
1540 return (gtFlags & GTF_REG_VAL)?true:false;
1544 gtFlags |= GTF_REG_VAL;
1547 regNumber GetReg() const
1549 return InReg() ? gtRegNum : REG_NA;
1551 bool IsRegVarDeath () const
1553 assert(OperGet() == GT_REG_VAR);
1554 return (gtFlags & GTF_VAR_DEATH)?true:false;
1556 bool IsRegVarBirth () const
1558 assert(OperGet() == GT_REG_VAR);
1559 return (gtFlags & GTF_REG_BIRTH)?true:false;
1561 bool IsReverseOp() const
1563 return (gtFlags & GTF_REVERSE_OPS)?true:false;
1566 inline bool IsCnsIntOrI () const;
1568 inline bool IsIntegralConst () const;
1570 inline bool IsIntCnsFitsInI32 ();
1572 inline bool IsCnsFltOrDbl() const;
1574 inline bool IsCnsNonZeroFltOrDbl();
1576 bool IsIconHandle () const
1578 assert(gtOper == GT_CNS_INT);
1579 return (gtFlags & GTF_ICON_HDL_MASK) ? true : false;
1582 bool IsIconHandle (unsigned handleType) const
1584 assert(gtOper == GT_CNS_INT);
1585 assert((handleType & GTF_ICON_HDL_MASK) != 0); // check that handleType is one of the valid GTF_ICON_* values
1586 assert((handleType & ~GTF_ICON_HDL_MASK) == 0);
1587 return (gtFlags & GTF_ICON_HDL_MASK) == handleType;
1590 // Return just the part of the flags corresponding to the GTF_ICON_*_HDL flag. For example,
1591 // GTF_ICON_SCOPE_HDL. The tree node must be a const int, but it might not be a handle, in which
1592 // case we'll return zero.
1593 unsigned GetIconHandleFlag () const
1595 assert(gtOper == GT_CNS_INT);
1596 return (gtFlags & GTF_ICON_HDL_MASK);
1599 // Mark this node as no longer being a handle; clear its GTF_ICON_*_HDL bits.
1600 void ClearIconHandleMask()
1602 assert(gtOper == GT_CNS_INT);
1603 gtFlags &= ~GTF_ICON_HDL_MASK;
1606 // Return true if the two GT_CNS_INT trees have the same handle flag (GTF_ICON_*_HDL).
1607 static bool SameIconHandleFlag(GenTree* t1, GenTree* t2)
1609 return t1->GetIconHandleFlag() == t2->GetIconHandleFlag();
1612 bool IsArgPlaceHolderNode() const { return OperGet() == GT_ARGPLACE; }
1613 bool IsCall () const { return OperGet() == GT_CALL; }
1614 bool IsStatement () const { return OperGet() == GT_STMT; }
1615 inline bool IsHelperCall ();
1617 bool IsVarAddr () const;
1618 bool gtOverflow () const;
1619 bool gtOverflowEx () const;
1620 bool gtSetFlags () const;
1621 bool gtRequestSetFlags ();
1623 bool gtIsValid64RsltMul ();
1624 static int gtDispFlags (unsigned flags, unsigned debugFlags);
1628 inline var_types CastFromType();
1629 inline var_types& CastToType();
1631 // Returns true if this gentree node is marked by lowering to indicate
1632 // that codegen can still generate code even if it wasn't allocated a
1634 bool IsRegOptional() const;
1636 // Returns "true" iff "*this" is an assignment (GT_ASG) tree that defines an SSA name (lcl = phi(...));
1639 // Returns "true" iff "*this" is a statement containing an assignment that defines an SSA name (lcl = phi(...));
1640 bool IsPhiDefnStmt();
1642 // Can't use an assignment operator, because we need the extra "comp" argument
1643 // (to provide the allocator necessary for the VarSet assignment).
1644 // TODO-Cleanup: Not really needed now, w/o liveset on tree nodes
1645 void CopyTo(class Compiler* comp, const GenTree& gt);
1647 // Like the above, excepts assumes copying from small node to small node.
1648 // (Following the code it replaces, it does *not* copy the GenTree fields,
1649 // which CopyTo does.)
1650 void CopyToSmall(const GenTree& gt);
1652 // Because of the fact that we hid the assignment operator of "BitSet" (in DEBUG),
1653 // we can't synthesize an assignment operator.
1654 // TODO-Cleanup: Could change this w/o liveset on tree nodes
1655 // (This is also necessary for the VTable trick.)
1658 // Returns the number of children of the current node.
1659 unsigned NumChildren();
1661 // Requires "childNum < NumChildren()". Returns the "n"th child of "this."
1662 GenTreePtr GetChild(unsigned childNum);
1664 // The maximum possible # of children of any node.
1665 static const int MAX_CHILDREN = 6;
1667 bool IsReuseRegVal() const
1669 // This can be extended to non-constant nodes, but not to local or indir nodes.
1670 if(OperIsConst() && ((gtFlags & GTF_REUSE_REG_VAL) != 0))
1676 void SetReuseRegVal()
1678 assert(OperIsConst());
1679 gtFlags |= GTF_REUSE_REG_VAL;
1681 void ResetReuseRegVal()
1683 assert(OperIsConst());
1684 gtFlags &= ~GTF_REUSE_REG_VAL;
1689 GenTree& operator=(const GenTree& gt) {
1690 assert(!"Don't copy");
1695 #if DEBUGGABLE_GENTREE
1696 // In DEBUG builds, add a dummy virtual method, to give the debugger run-time type information.
1697 virtual void DummyVirt() {}
1699 typedef void* VtablePtr;
1701 VtablePtr GetVtableForOper(genTreeOps oper);
1702 void SetVtableForOper(genTreeOps oper);
1704 static VtablePtr s_vtablesForOpers[GT_COUNT];
1705 static VtablePtr s_vtableForOp;
1706 #endif // DEBUGGABLE_GENTREE
1709 inline void* operator new(size_t sz, class Compiler*, genTreeOps oper);
1711 inline GenTree(genTreeOps oper, var_types type
1712 DEBUGARG(bool largeNode = false));
1716 /*****************************************************************************/
1717 // In the current design, we never instantiate GenTreeUnOp: it exists only to be
1718 // used as a base class. For unary operators, we instantiate GenTreeOp, with a NULL second
1719 // argument. We check that this is true dynamically. We could tighten this and get static
1720 // checking, but that would entail accessing the first child of a unary operator via something
1721 // like gtUnOp.gtOp1 instead of gtOp.gtOp1.
1722 struct GenTreeUnOp: public GenTree
1727 GenTreeUnOp(genTreeOps oper, var_types type
1728 DEBUGARG(bool largeNode = false)) :
1730 DEBUGARG(largeNode)),
1734 GenTreeUnOp(genTreeOps oper, var_types type, GenTreePtr op1
1735 DEBUGARG(bool largeNode = false)) :
1737 DEBUGARG(largeNode)),
1740 assert(op1 != nullptr || NullOp1Legal());
1741 if (op1 != nullptr) // Propagate effects flags from child.
1742 gtFlags |= op1->gtFlags & GTF_ALL_EFFECT;
1745 #if DEBUGGABLE_GENTREE
1746 GenTreeUnOp() : GenTree(), gtOp1(nullptr) {}
1750 struct GenTreeOp: public GenTreeUnOp
1754 GenTreeOp(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2
1755 DEBUGARG(bool largeNode = false)) :
1756 GenTreeUnOp(oper, type, op1
1757 DEBUGARG(largeNode)),
1760 // comparisons are always integral types
1761 assert(!GenTree::OperIsCompare(oper) || varTypeIsIntegral(type));
1762 // Binary operators, with a few exceptions, require a non-nullptr
1764 assert(op2 != nullptr || NullOp2Legal());
1765 // Unary operators, on the other hand, require a null second argument.
1766 assert(!OperIsUnary(oper) || op2 == nullptr);
1767 // Propagate effects flags from child. (UnOp handled this for first child.)
1770 gtFlags |= op2->gtFlags & GTF_ALL_EFFECT;
1774 // A small set of types are unary operators with optional arguments. We use
1775 // this constructor to build those.
1776 GenTreeOp(genTreeOps oper, var_types type
1777 DEBUGARG(bool largeNode = false)) :
1778 GenTreeUnOp(oper, type
1779 DEBUGARG(largeNode)),
1782 // Unary operators with optional arguments:
1783 assert(oper == GT_NOP ||
1784 oper == GT_RETURN ||
1785 oper == GT_RETFILT ||
1789 #if DEBUGGABLE_GENTREE
1790 GenTreeOp() : GenTreeUnOp(), gtOp2(nullptr) {}
1795 struct GenTreeVal: public GenTree
1799 GenTreeVal(genTreeOps oper, var_types type, ssize_t val) :
1800 GenTree(oper, type),
1803 #if DEBUGGABLE_GENTREE
1804 GenTreeVal() : GenTree() {}
1808 struct GenTreeIntConCommon: public GenTree
1810 inline INT64 LngValue();
1811 inline void SetLngValue(INT64 val);
1812 inline ssize_t IconValue();
1813 inline void SetIconValue(ssize_t val);
1815 GenTreeIntConCommon(genTreeOps oper, var_types type
1816 DEBUGARG(bool largeNode = false)) :
1818 DEBUGARG(largeNode))
1823 return FitsInI32(IconValue());
1826 static bool FitsInI32(ssize_t val)
1828 #ifdef _TARGET_64BIT_
1829 return (int)val == val;
1835 bool ImmedValNeedsReloc(Compiler* comp);
1836 bool GenTreeIntConCommon::ImmedValCanBeFolded(Compiler* comp, genTreeOps op);
1838 #ifdef _TARGET_XARCH_
1839 bool FitsInAddrBase(Compiler* comp);
1840 bool AddrNeedsReloc(Compiler* comp);
1843 #if DEBUGGABLE_GENTREE
1844 GenTreeIntConCommon() : GenTree() {}
1848 // node representing a read from a physical register
1849 struct GenTreePhysReg: public GenTree
1851 // physregs need a field beyond gtRegNum because
1852 // gtRegNum indicates the destination (and can be changed)
1853 // whereas reg indicates the source
1855 GenTreePhysReg(regNumber r, var_types type=TYP_I_IMPL) :
1856 GenTree(GT_PHYSREG, type), gtSrcReg(r)
1859 #if DEBUGGABLE_GENTREE
1860 GenTreePhysReg() : GenTree() {}
1864 #ifndef LEGACY_BACKEND
1865 // gtJumpTable - Switch Jump Table
1867 // This node stores a DWORD constant that represents the
1868 // absolute address of a jump table for switches. The code
1869 // generator uses this table to code the destination for every case
1870 // in an array of addresses which starting position is stored in
1872 struct GenTreeJumpTable : public GenTreeIntConCommon
1874 ssize_t gtJumpTableAddr;
1876 GenTreeJumpTable(var_types type
1877 DEBUGARG(bool largeNode = false)) :
1878 GenTreeIntConCommon(GT_JMPTABLE, type
1879 DEBUGARG(largeNode))
1881 #if DEBUGGABLE_GENTREE
1882 GenTreeJumpTable() : GenTreeIntConCommon() {}
1885 #endif // !LEGACY_BACKEND
1887 /* gtIntCon -- integer constant (GT_CNS_INT) */
1888 struct GenTreeIntCon: public GenTreeIntConCommon
1891 * This is the GT_CNS_INT struct definition.
1892 * It's used to hold for both int constants and pointer handle constants.
1893 * For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes
1894 * For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others.
1895 * In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG
1897 ssize_t gtIconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeLngCon below.
1899 /* The InitializeArray intrinsic needs to go back to the newarray statement
1900 to find the class handle of the array so that we can get its size. However,
1901 in ngen mode, the handle in that statement does not correspond to the compile
1902 time handle (rather it lets you get a handle at run-time). In that case, we also
1903 need to store a compile time handle, which goes in this gtCompileTimeHandle field.
1905 ssize_t gtCompileTimeHandle;
1907 // TODO-Cleanup: It's not clear what characterizes the cases where the field
1908 // above is used. It may be that its uses and those of the "gtFieldSeq" field below
1909 // are mutually exclusive, and they could be put in a union. Or else we should separate
1910 // this type into three subtypes.
1912 // If this constant represents the offset of one or more fields, "gtFieldSeq" represents that
1913 // sequence of fields.
1914 FieldSeqNode* gtFieldSeq;
1916 #if defined (LATE_DISASM)
1918 /* If the constant was morphed from some other node,
1919 these fields enable us to get back to what the node
1920 originally represented. See use of gtNewIconHandleNode()
1925 /* Template struct - The significant field of the other
1926 * structs should overlap exactly with this struct
1931 unsigned gtIconHdl1;
1941 CORINFO_CLASS_HANDLE gtIconCls;
1947 GenTreeIntCon(var_types type, ssize_t value
1948 DEBUGARG(bool largeNode = false)) :
1949 GenTreeIntConCommon(GT_CNS_INT, type
1950 DEBUGARG(largeNode)),
1952 gtCompileTimeHandle(0),
1953 gtFieldSeq(FieldSeqStore::NotAField())
1956 GenTreeIntCon(var_types type, ssize_t value, FieldSeqNode* fields
1957 DEBUGARG(bool largeNode = false)) :
1958 GenTreeIntConCommon(GT_CNS_INT, type
1959 DEBUGARG(largeNode)),
1961 gtCompileTimeHandle(0),
1964 assert(fields != NULL);
1967 void FixupInitBlkValue(var_types asgType);
1969 #ifdef _TARGET_64BIT_
1970 void TruncateOrSignExtend32()
1972 if (gtFlags & GTF_UNSIGNED)
1974 gtIconVal = UINT32(gtIconVal);
1978 gtIconVal = INT32(gtIconVal);
1981 #endif // _TARGET_64BIT_
1983 #if DEBUGGABLE_GENTREE
1984 GenTreeIntCon() : GenTreeIntConCommon() {}
1989 /* gtLngCon -- long constant (GT_CNS_LNG) */
1991 struct GenTreeLngCon: public GenTreeIntConCommon
1993 INT64 gtLconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeIntCon above.
1996 return (INT32)(gtLconVal & 0xffffffff);
2001 return (INT32)(gtLconVal >> 32);;
2004 GenTreeLngCon(INT64 val) :
2005 GenTreeIntConCommon(GT_CNS_NATIVELONG, TYP_LONG)
2006 { SetLngValue(val); }
2007 #if DEBUGGABLE_GENTREE
2008 GenTreeLngCon() : GenTreeIntConCommon() {}
2013 inline INT64 GenTreeIntConCommon::LngValue()
2015 #ifndef _TARGET_64BIT_
2016 assert(gtOper == GT_CNS_LNG);
2017 return AsLngCon()->gtLconVal;
2023 inline void GenTreeIntConCommon::SetLngValue(INT64 val)
2025 #ifndef _TARGET_64BIT_
2026 assert(gtOper == GT_CNS_LNG);
2027 AsLngCon()->gtLconVal = val;
2029 // Compile time asserts that these two fields overlap and have the same offsets: gtIconVal and gtLconVal
2030 C_ASSERT(offsetof(GenTreeLngCon, gtLconVal) == offsetof(GenTreeIntCon, gtIconVal));
2031 C_ASSERT(sizeof(AsLngCon()->gtLconVal) == sizeof(AsIntCon()->gtIconVal));
2033 SetIconValue(ssize_t(val));
2037 inline ssize_t GenTreeIntConCommon::IconValue()
2039 assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target!
2040 return AsIntCon()->gtIconVal;
2043 inline void GenTreeIntConCommon::SetIconValue(ssize_t val)
2045 assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target!
2046 AsIntCon()->gtIconVal = val;
2049 /* gtDblCon -- double constant (GT_CNS_DBL) */
2051 struct GenTreeDblCon: public GenTree
2055 bool isBitwiseEqual(GenTreeDblCon* other)
2057 unsigned __int64 bits = *(unsigned __int64 *)(>DconVal);
2058 unsigned __int64 otherBits = *(unsigned __int64 *)(&(other->gtDconVal));
2059 return (bits == otherBits);
2062 GenTreeDblCon(double val) :
2063 GenTree(GT_CNS_DBL, TYP_DOUBLE),
2066 #if DEBUGGABLE_GENTREE
2067 GenTreeDblCon() : GenTree() {}
2072 /* gtStrCon -- string constant (GT_CNS_STR) */
2074 struct GenTreeStrCon: public GenTree
2077 CORINFO_MODULE_HANDLE gtScpHnd;
2079 // Because this node can come from an inlined method we need to
2080 // have the scope handle, since it will become a helper call.
2081 GenTreeStrCon(unsigned sconCPX, CORINFO_MODULE_HANDLE mod
2082 DEBUGARG(bool largeNode = false)) :
2083 GenTree(GT_CNS_STR, TYP_REF
2084 DEBUGARG(largeNode)),
2085 gtSconCPX(sconCPX), gtScpHnd(mod)
2087 #if DEBUGGABLE_GENTREE
2088 GenTreeStrCon() : GenTree() {}
2092 // Common supertype of LCL_VAR, LCL_FLD, REG_VAR, PHI_ARG
2093 // This inherits from UnOp because lclvar stores are Unops
2094 struct GenTreeLclVarCommon: public GenTreeUnOp
2097 unsigned _gtLclNum; // The local number. An index into the Compiler::lvaTable array.
2098 unsigned _gtSsaNum; // The SSA number.
2101 GenTreeLclVarCommon(genTreeOps oper, var_types type, unsigned lclNum
2102 DEBUGARG(bool largeNode = false)) :
2103 GenTreeUnOp(oper, type
2104 DEBUGARG(largeNode))
2109 unsigned GetLclNum() const
2113 __declspec(property(get=GetLclNum)) unsigned gtLclNum;
2115 void SetLclNum(unsigned lclNum)
2118 _gtSsaNum = SsaConfig::RESERVED_SSA_NUM;
2121 unsigned GetSsaNum() const
2125 __declspec(property(get=GetSsaNum)) unsigned gtSsaNum;
2127 void SetSsaNum(unsigned ssaNum)
2134 return (gtSsaNum != SsaConfig::RESERVED_SSA_NUM);
2137 #if DEBUGGABLE_GENTREE
2138 GenTreeLclVarCommon() : GenTreeUnOp() {}
2142 // gtLclVar -- load/store/addr of local variable
2144 struct GenTreeLclVar: public GenTreeLclVarCommon
2146 IL_OFFSET gtLclILoffs; // instr offset of ref (only for debug info)
2148 GenTreeLclVar(var_types type, unsigned lclNum, IL_OFFSET ilOffs
2149 DEBUGARG(bool largeNode = false)) :
2150 GenTreeLclVarCommon(GT_LCL_VAR, type, lclNum
2151 DEBUGARG(largeNode)),
2155 GenTreeLclVar(genTreeOps oper, var_types type, unsigned lclNum, IL_OFFSET ilOffs
2156 DEBUGARG(bool largeNode = false)) :
2157 GenTreeLclVarCommon(oper, type, lclNum
2158 DEBUGARG(largeNode)),
2161 assert(OperIsLocal(oper) || OperIsLocalAddr(oper));
2164 #if DEBUGGABLE_GENTREE
2165 GenTreeLclVar() : GenTreeLclVarCommon() {}
2169 // gtLclFld -- load/store/addr of local variable field
2171 struct GenTreeLclFld: public GenTreeLclVarCommon
2173 unsigned gtLclOffs; // offset into the variable to access
2175 FieldSeqNode* gtFieldSeq; // This LclFld node represents some sequences of accesses.
2177 // old/FE style constructor where load/store/addr share same opcode
2178 GenTreeLclFld(var_types type, unsigned lclNum, unsigned lclOffs) :
2179 GenTreeLclVarCommon(GT_LCL_FLD, type, lclNum),
2180 gtLclOffs(lclOffs), gtFieldSeq(NULL)
2182 assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]);
2186 GenTreeLclFld(genTreeOps oper, var_types type, unsigned lclNum, unsigned lclOffs) :
2187 GenTreeLclVarCommon(oper, type, lclNum),
2188 gtLclOffs(lclOffs), gtFieldSeq(NULL)
2190 assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]);
2192 #if DEBUGGABLE_GENTREE
2193 GenTreeLclFld() : GenTreeLclVarCommon() {}
2197 struct GenTreeRegVar: public GenTreeLclVarCommon
2199 // TODO-Cleanup: Note that the base class GenTree already has a gtRegNum field.
2200 // It's not clear exactly why a GT_REG_VAR has a separate field. When
2201 // GT_REG_VAR is created, the two are identical. It appears that they may
2202 // or may not remain so. In particular, there is a comment in stackfp.cpp
2205 // There used to be an assertion: assert(src->gtRegNum == src->gtRegVar.gtRegNum, ...)
2206 // here, but there's actually no reason to assume that. AFAICT, for FP vars under stack FP,
2207 // src->gtRegVar.gtRegNum is the allocated stack pseudo-register, but src->gtRegNum is the
2208 // FP stack position into which that is loaded to represent a particular use of the variable.
2210 // It might be the case that only for stackfp do they ever differ.
2212 // The following might be possible: the GT_REG_VAR node has a last use prior to a complex
2213 // subtree being evaluated. It could then be spilled from the register. Later,
2214 // it could be unspilled into a different register, which would be recorded at
2215 // the unspill time in the GenTree::gtRegNum, whereas GenTreeRegVar::gtRegNum
2216 // is left alone. It's not clear why that is useful.
2218 // Assuming there is a particular use, like stack fp, that requires it, maybe we
2219 // can get rid of GT_REG_VAR and just leave it as GT_LCL_VAR, using the base class gtRegNum field.
2220 // If we need it for stackfp, we could add a GenTreeStackFPRegVar type, which carries both the
2221 // pieces of information, in a clearer and more specific way (in particular, with
2222 // a different member name).
2227 regNumberSmall _gtRegNum;
2231 GenTreeRegVar(var_types type, unsigned lclNum, regNumber regNum) :
2232 GenTreeLclVarCommon(GT_REG_VAR, type, lclNum
2238 // The register number is stored in a small format (8 bits), but the getters return and the setters take
2239 // a full-size (unsigned) format, to localize the casts here.
2241 __declspec(property(get=GetRegNum,put=SetRegNum))
2244 regNumber GetRegNum() const
2246 return (regNumber) _gtRegNum;
2249 void SetRegNum(regNumber reg)
2251 _gtRegNum = (regNumberSmall) reg;
2252 assert(_gtRegNum == reg);
2255 #if DEBUGGABLE_GENTREE
2256 GenTreeRegVar() : GenTreeLclVarCommon() {}
2260 /* gtCast -- conversion to a different type (GT_CAST) */
2262 struct GenTreeCast: public GenTreeOp
2264 GenTreePtr& CastOp() { return gtOp1; }
2265 var_types gtCastType;
2267 GenTreeCast(var_types type, GenTreePtr op, var_types castType
2268 DEBUGARG(bool largeNode = false)) :
2269 GenTreeOp(GT_CAST, type, op, nullptr
2270 DEBUGARG(largeNode)),
2271 gtCastType(castType)
2273 #if DEBUGGABLE_GENTREE
2274 GenTreeCast() : GenTreeOp() {}
2279 // GT_BOX nodes are place markers for boxed values. The "real" tree
2280 // for most purposes is in gtBoxOp.
2281 struct GenTreeBox: public GenTreeUnOp
2283 // An expanded helper call to implement the "box" if we don't get
2284 // rid of it any other way. Must be in same position as op1.
2286 GenTreePtr& BoxOp() { return gtOp1; }
2287 // This is the statement that contains the assignment tree when the node is an inlined GT_BOX on a value
2289 GenTreePtr gtAsgStmtWhenInlinedBoxValue;
2291 GenTreeBox(var_types type, GenTreePtr boxOp, GenTreePtr asgStmtWhenInlinedBoxValue) :
2292 GenTreeUnOp(GT_BOX, type, boxOp),
2293 gtAsgStmtWhenInlinedBoxValue(asgStmtWhenInlinedBoxValue)
2295 #if DEBUGGABLE_GENTREE
2296 GenTreeBox() : GenTreeUnOp() {}
2302 /* gtField -- data member ref (GT_FIELD) */
2304 struct GenTreeField: public GenTree
2306 GenTreePtr gtFldObj;
2307 CORINFO_FIELD_HANDLE gtFldHnd;
2309 bool gtFldMayOverlap;
2310 #ifdef FEATURE_READYTORUN_COMPILER
2311 CORINFO_CONST_LOOKUP gtFieldLookup;
2314 GenTreeField(var_types type) :
2315 GenTree(GT_FIELD, type
2318 gtFldMayOverlap = false;
2320 #if DEBUGGABLE_GENTREE
2321 GenTreeField() : GenTree() {}
2325 // Represents the Argument list of a call node, as a Lisp-style linked list.
2326 // (Originally I had hoped that this could have *only* the m_arg/m_rest fields, but it turns out
2327 // that enough of the GenTree mechanism is used that it makes sense just to make it a subtype. But
2328 // note that in many ways, this is *not* a "real" node of the tree, but rather a mechanism for
2329 // giving call nodes a flexible number of children. GenTreeArgListNodes never evaluate to registers,
2332 // Note that while this extends GenTreeOp, it is *not* an EXOP. We don't add any new fields, and one
2333 // is free to allocate a GenTreeOp of type GT_LIST. If you use this type, you get the convenient Current/Rest
2334 // method names for the arguments.
2335 struct GenTreeArgList: public GenTreeOp
2337 GenTreePtr& Current() { return gtOp1; }
2338 GenTreeArgList*& Rest() { assert(gtOp2 == NULL || gtOp2->OperGet() == GT_LIST); return *reinterpret_cast<GenTreeArgList**>(>Op2); }
2340 #if DEBUGGABLE_GENTREE
2341 GenTreeArgList() : GenTreeOp() {}
2344 GenTreeArgList(GenTreePtr arg) :
2345 GenTreeArgList(arg, nullptr) {}
2347 GenTreeArgList(GenTreePtr arg, GenTreeArgList* rest) :
2348 GenTreeOp(GT_LIST, TYP_VOID, arg, rest)
2350 // With structs passed in multiple args we could have an arg
2351 // GT_LIST containing a list of LCL_FLDs, see IsListForMultiRegArg()
2353 assert((arg != nullptr) && ((!arg->IsList()) || (arg->IsListForMultiRegArg())));
2354 gtFlags |= arg->gtFlags & GTF_ALL_EFFECT;
2357 gtFlags |= rest->gtFlags & GTF_ALL_EFFECT;
2362 // There was quite a bit of confusion in the code base about which of gtOp1 and gtOp2 was the
2363 // 'then' and 'else' clause of a colon node. Adding these accessors, while not enforcing anything,
2364 // at least *allows* the programmer to be obviously correct.
2365 // However, these conventions seem backward.
2366 // TODO-Cleanup: If we could get these accessors used everywhere, then we could switch them.
2367 struct GenTreeColon: public GenTreeOp
2369 GenTreePtr& ThenNode() { return gtOp2; }
2370 GenTreePtr& ElseNode() { return gtOp1; }
2372 #if DEBUGGABLE_GENTREE
2373 GenTreeColon() : GenTreeOp() {}
2376 GenTreeColon(var_types typ, GenTreePtr thenNode, GenTreePtr elseNode) :
2377 GenTreeOp(GT_COLON, typ, elseNode, thenNode)
2381 // gtCall -- method call (GT_CALL)
2382 typedef class fgArgInfo * fgArgInfoPtr;
2383 enum class InlineObservation;
2385 // Return type descriptor of a GT_CALL node.
2386 // x64 Unix, Arm64, Arm32 and x86 allow a value to be returned in multiple
2387 // registers. For such calls this struct provides the following info
2388 // on their return type
2389 // - type of value returned in each return register
2390 // - ABI return register numbers in which the value is returned
2391 // - count of return registers in which the value is returned
2393 // TODO-ARM: Update this to meet the needs of Arm64 and Arm32
2395 // TODO-AllArch: Right now it is used for describing multi-reg returned types.
2396 // Eventually we would want to use it for describing even single-reg
2397 // returned types (e.g. structs returned in single register x64/arm).
2398 // This would allow us not to lie or normalize single struct return
2399 // values in importer/morph.
2400 struct ReturnTypeDesc
2403 var_types m_regType[MAX_RET_REG_COUNT];
2415 // Initialize the return type descriptor given its type handle
2416 void InitializeReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd);
2418 // Reset type descriptor to defaults
2421 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
2423 m_regType[i] = TYP_UNKNOWN;
2430 //--------------------------------------------------------------------------------------------
2431 // GetReturnRegCount: Get the count of return registers in which the return value is returned.
2437 // Count of return registers.
2438 // Returns 0 if the return type is not returned in registers.
2439 unsigned GetReturnRegCount() const
2444 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
2446 if (m_regType[i] == TYP_UNKNOWN)
2455 // Any remaining elements in m_regTypes[] should also be TYP_UNKNOWN
2456 for (unsigned i = regCount+1; i < MAX_RET_REG_COUNT; ++i)
2458 assert(m_regType[i] == TYP_UNKNOWN);
2465 //-----------------------------------------------------------------------
2466 // IsMultiRegRetType: check whether the type is returned in multiple
2467 // return registers.
2473 // Returns true if the type is returned in multiple return registers.
2475 // Note that we only have to examine the first two values to determine this
2477 bool IsMultiRegRetType() const
2479 if (MAX_RET_REG_COUNT < 2)
2485 return ((m_regType[0] != TYP_UNKNOWN) &&
2486 (m_regType[1] != TYP_UNKNOWN));
2490 //--------------------------------------------------------------------------
2491 // GetReturnRegType: Get var_type of the return register specified by index.
2494 // index - Index of the return register.
2495 // First return register will have an index 0 and so on.
2498 // var_type of the return register specified by its index.
2499 // asserts if the index does not have a valid register return type.
2501 var_types GetReturnRegType(unsigned index)
2503 var_types result = m_regType[index];
2504 assert(result != TYP_UNKNOWN);
2509 // Get ith ABI return register
2510 regNumber GetABIReturnReg(unsigned idx);
2512 // Get reg mask of ABI return registers
2513 regMaskTP GetABIReturnRegs();
2516 struct GenTreeCall final : public GenTree
2518 GenTreePtr gtCallObjp; // The instance argument ('this' pointer)
2519 GenTreeArgList* gtCallArgs; // The list of arguments in original evaluation order
2520 GenTreeArgList* gtCallLateArgs; // On x86: The register arguments in an optimal order
2521 // On ARM/x64: - also includes any outgoing arg space arguments
2522 // - that were evaluated into a temp LclVar
2523 fgArgInfoPtr fgArgInfo;
2525 #if !FEATURE_FIXED_OUT_ARGS
2526 int regArgListCount;
2530 // TODO-Throughput: Revisit this (this used to be only defined if
2531 // FEATURE_FIXED_OUT_ARGS was enabled, so this makes GenTreeCall 4 bytes bigger on x86).
2532 CORINFO_SIG_INFO* callSig; // Used by tail calls and to register callsites with the EE
2534 #ifdef LEGACY_BACKEND
2535 regMaskTP gtCallRegUsedMask; // mask of registers used to pass parameters
2536 #endif // LEGACY_BACKEND
2538 // State required to support multi-reg returning call nodes.
2539 // For now it is enabled only for x64 unix.
2541 // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns.
2542 #if FEATURE_MULTIREG_RET
2543 ReturnTypeDesc gtReturnTypeDesc;
2545 // gtRegNum would always be the first return reg.
2546 // The following array holds the other reg numbers of multi-reg return.
2547 regNumber gtOtherRegs[MAX_RET_REG_COUNT - 1];
2549 // GTF_SPILL or GTF_SPILLED flag on a multi-reg call node indicates that one or
2550 // more of its result regs are in that state. The spill flag of each of the
2551 // return register is stored in the below array.
2552 unsigned gtSpillFlags[MAX_RET_REG_COUNT];
2555 //-----------------------------------------------------------------------
2556 // GetReturnTypeDesc: get the type descriptor of return value of the call
2562 // Type descriptor of the value returned by call
2565 // Right now implemented only for x64 unix and yet to be
2566 // implemented for other multi-reg target arch (Arm64/Arm32/x86).
2568 // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns.
2569 ReturnTypeDesc* GetReturnTypeDesc()
2571 #if FEATURE_MULTIREG_RET
2572 return >ReturnTypeDesc;
2578 //---------------------------------------------------------------------------
2579 // GetRegNumByIdx: get ith return register allocated to this call node.
2582 // idx - index of the return register
2585 // Return regNumber of ith return register of call node.
2586 // Returns REG_NA if there is no valid return register for the given index.
2588 regNumber GetRegNumByIdx(unsigned idx) const
2590 assert(idx < MAX_RET_REG_COUNT);
2597 #if FEATURE_MULTIREG_RET
2598 return gtOtherRegs[idx-1];
2604 //----------------------------------------------------------------------
2605 // SetRegNumByIdx: set ith return register of this call node
2609 // idx - index of the return register
2614 void SetRegNumByIdx(regNumber reg, unsigned idx)
2616 assert(idx < MAX_RET_REG_COUNT);
2622 #if FEATURE_MULTIREG_RET
2625 gtOtherRegs[idx - 1] = reg;
2626 assert(gtOtherRegs[idx - 1] == reg);
2633 //----------------------------------------------------------------------------
2634 // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated
2642 void ClearOtherRegs()
2644 #if FEATURE_MULTIREG_RET
2645 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
2647 gtOtherRegs[i] = REG_NA;
2652 //----------------------------------------------------------------------------
2653 // CopyOtherRegs: copy multi-reg state from the given call node to this node
2656 // fromCall - GenTreeCall node from which to copy multi-reg state
2661 void CopyOtherRegs(GenTreeCall* fromCall)
2663 #if FEATURE_MULTIREG_RET
2664 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
2666 this->gtOtherRegs[i] = fromCall->gtOtherRegs[i];
2671 // Get reg mask of all the valid registers of gtOtherRegs array
2672 regMaskTP GetOtherRegMask() const;
2674 //----------------------------------------------------------------------
2675 // GetRegSpillFlagByIdx: get spill flag associated with the return register
2676 // specified by its index.
2679 // idx - Position or index of the return register
2682 // Returns GTF_* flags associated with.
2683 unsigned GetRegSpillFlagByIdx(unsigned idx) const
2685 assert(idx < MAX_RET_REG_COUNT);
2687 #if FEATURE_MULTIREG_RET
2688 return gtSpillFlags[idx];
2690 assert(!"unreached");
2695 //----------------------------------------------------------------------
2696 // SetRegSpillFlagByIdx: set spill flags for the return register
2697 // specified by its index.
2700 // flags - GTF_* flags
2701 // idx - Position or index of the return register
2705 void SetRegSpillFlagByIdx(unsigned flags, unsigned idx)
2707 assert(idx < MAX_RET_REG_COUNT);
2709 #if FEATURE_MULTIREG_RET
2710 gtSpillFlags[idx] = flags;
2716 //-------------------------------------------------------------------
2717 // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs
2724 void ClearOtherRegFlags()
2726 #if FEATURE_MULTIREG_RET
2727 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
2729 gtSpillFlags[i] = 0;
2734 //-------------------------------------------------------------------------
2735 // CopyOtherRegFlags: copy GTF_* flags associated with gtOtherRegs from
2736 // the given call node.
2739 // fromCall - GenTreeCall node from which to copy
2744 void CopyOtherRegFlags(GenTreeCall* fromCall)
2746 #if FEATURE_MULTIREG_RET
2747 for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i)
2749 this->gtSpillFlags[i] = fromCall->gtSpillFlags[i];
2754 #define GTF_CALL_M_EXPLICIT_TAILCALL 0x0001 // GT_CALL -- the call is "tail" prefixed and importer has performed tail call checks
2755 #define GTF_CALL_M_TAILCALL 0x0002 // GT_CALL -- the call is a tailcall
2756 #define GTF_CALL_M_VARARGS 0x0004 // GT_CALL -- the call uses varargs ABI
2757 #define GTF_CALL_M_RETBUFFARG 0x0008 // GT_CALL -- first parameter is the return buffer argument
2758 #define GTF_CALL_M_DELEGATE_INV 0x0010 // GT_CALL -- call to Delegate.Invoke
2759 #define GTF_CALL_M_NOGCCHECK 0x0020 // GT_CALL -- not a call for computing full interruptability
2760 #define GTF_CALL_M_SPECIAL_INTRINSIC 0x0040 // GT_CALL -- function that could be optimized as an intrinsic
2761 // in special cases. Used to optimize fast way out in morphing
2762 #define GTF_CALL_M_UNMGD_THISCALL 0x0080 // "this" pointer (first argument) should be enregistered (only for GTF_CALL_UNMANAGED)
2763 #define GTF_CALL_M_VIRTSTUB_REL_INDIRECT 0x0080 // the virtstub is indirected through a relative address (only for GTF_CALL_VIRT_STUB)
2764 #define GTF_CALL_M_NONVIRT_SAME_THIS 0x0080 // callee "this" pointer is equal to caller this pointer (only for GTF_CALL_NONVIRT)
2765 #define GTF_CALL_M_FRAME_VAR_DEATH 0x0100 // GT_CALL -- the compLvFrameListRoot variable dies here (last use)
2767 #ifndef LEGACY_BACKEND
2768 #define GTF_CALL_M_TAILCALL_VIA_HELPER 0x0200 // GT_CALL -- call is a tail call dispatched via tail call JIT helper.
2769 #endif // !LEGACY_BACKEND
2771 #if FEATURE_TAILCALL_OPT
2772 #define GTF_CALL_M_IMPLICIT_TAILCALL 0x0400 // GT_CALL -- call is an opportunistic tail call and importer has performed tail call checks
2773 #define GTF_CALL_M_TAILCALL_TO_LOOP 0x0800 // GT_CALL -- call is a fast recursive tail call that can be converted into a loop
2776 #define GTF_CALL_M_PINVOKE 0x1000 // GT_CALL -- call is a pinvoke. This mirrors VM flag CORINFO_FLG_PINVOKE.
2777 // A call marked as Pinvoke is not necessarily a GT_CALL_UNMANAGED. For e.g.
2778 // an IL Stub dynamically generated for a PInvoke declaration is flagged as
2779 // a Pinvoke but not as an unmanaged call. See impCheckForPInvokeCall() to
2780 // know when these flags are set.
2782 #define GTF_CALL_M_R2R_REL_INDIRECT 0x2000 // GT_CALL -- ready to run call is indirected through a relative address
2784 bool IsUnmanaged() const { return (gtFlags & GTF_CALL_UNMANAGED) != 0; }
2785 bool NeedsNullCheck() const { return (gtFlags & GTF_CALL_NULLCHECK) != 0; }
2786 bool CallerPop() const { return (gtFlags & GTF_CALL_POP_ARGS) != 0; }
2787 bool IsVirtual() const { return (gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT; }
2788 bool IsVirtualStub() const { return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB; }
2789 bool IsVirtualVtable() const { return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_VTABLE; }
2790 bool IsInlineCandidate() const { return (gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0; }
2792 #ifndef LEGACY_BACKEND
2793 bool HasNonStandardAddedArgs(Compiler* compiler) const;
2794 int GetNonStandardAddedArgCount(Compiler* compiler) const;
2795 #endif // !LEGACY_BACKEND
2797 // Returns true if this call uses a retBuf argument and its calling convention
2798 bool HasRetBufArg() const
2800 return (gtCallMoreFlags & GTF_CALL_M_RETBUFFARG) != 0;
2803 //-------------------------------------------------------------------------
2804 // TreatAsHasRetBufArg:
2807 // compiler, the compiler instance so that we can call eeGetHelperNum
2810 // Returns true if we treat the call as if it has a retBuf argument
2811 // This method may actually have a retBuf argument
2812 // or it could be a JIT helper that we are still transforming during
2813 // the importer phase.
2816 // On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
2817 // will make HasRetBufArg() return true, but will also force the
2818 // use of register x8 to pass the RetBuf argument.
2820 bool TreatAsHasRetBufArg(Compiler* compiler) const;
2822 //-----------------------------------------------------------------------------------------
2823 // HasMultiRegRetVal: whether the call node returns its value in multiple return registers.
2829 // True if the call is returning a multi-reg return value. False otherwise.
2832 // This is implemented only for x64 Unix and yet to be implemented for
2833 // other multi-reg return target arch (arm64/arm32/x86).
2835 // TODO-ARM: Implement this routine for Arm64 and Arm32
2836 bool HasMultiRegRetVal() const
2838 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
2839 return varTypeIsStruct(gtType) && !HasRetBufArg();
2840 #elif defined(_TARGET_X86_) && !defined(LEGACY_BACKEND)
2841 // LEGACY_BACKEND does not use multi reg returns for calls with long return types
2842 return varTypeIsLong(gtType);
2848 // Returns true if VM has flagged this method as CORINFO_FLG_PINVOKE.
2849 bool IsPInvoke() const { return (gtCallMoreFlags & GTF_CALL_M_PINVOKE) != 0; }
2851 // Note that the distinction of whether tail prefixed or an implicit tail call
2852 // is maintained on a call node till fgMorphCall() after which it will be
2853 // either a tail call (i.e. IsTailCall() is true) or a non-tail call.
2854 bool IsTailPrefixedCall() const { return (gtCallMoreFlags & GTF_CALL_M_EXPLICIT_TAILCALL) != 0; }
2856 // This method returning "true" implies that tail call flowgraph morhphing has
2857 // performed final checks and committed to making a tail call.
2858 bool IsTailCall() const { return (gtCallMoreFlags & GTF_CALL_M_TAILCALL) != 0; }
2860 // This method returning "true" implies that importer has performed tail call checks
2861 // and providing a hint that this can be converted to a tail call.
2862 bool CanTailCall() const { return IsTailPrefixedCall() || IsImplicitTailCall(); }
2864 #ifndef LEGACY_BACKEND
2865 bool IsTailCallViaHelper() const { return IsTailCall() && (gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER); }
2866 #else // LEGACY_BACKEND
2867 bool IsTailCallViaHelper() const { return true; }
2868 #endif // LEGACY_BACKEND
2870 #if FEATURE_FASTTAILCALL
2871 bool IsFastTailCall() const { return IsTailCall() && !(gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER); }
2872 #else // !FEATURE_FASTTAILCALL
2873 bool IsFastTailCall() const { return false; }
2874 #endif // !FEATURE_FASTTAILCALL
2876 #if FEATURE_TAILCALL_OPT
2877 // Returns true if this is marked for opportunistic tail calling.
2878 // That is, can be tail called though not explicitly prefixed with "tail" prefix.
2879 bool IsImplicitTailCall() const { return (gtCallMoreFlags & GTF_CALL_M_IMPLICIT_TAILCALL) != 0; }
2880 bool IsTailCallConvertibleToLoop() const { return (gtCallMoreFlags & GTF_CALL_M_TAILCALL_TO_LOOP) != 0; }
2881 #else // !FEATURE_TAILCALL_OPT
2882 bool IsImplicitTailCall() const { return false; }
2883 bool IsTailCallConvertibleToLoop() const { return false; }
2884 #endif // !FEATURE_TAILCALL_OPT
2886 bool IsSameThis() const { return (gtCallMoreFlags & GTF_CALL_M_NONVIRT_SAME_THIS) != 0; }
2887 bool IsDelegateInvoke() const { return (gtCallMoreFlags & GTF_CALL_M_DELEGATE_INV) != 0; }
2888 bool IsVirtualStubRelativeIndir() const { return (gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT) != 0; }
2890 #ifdef FEATURE_READYTORUN_COMPILER
2891 bool IsR2RRelativeIndir() const { return (gtCallMoreFlags & GTF_CALL_M_R2R_REL_INDIRECT) != 0; }
2892 void setEntryPoint(CORINFO_CONST_LOOKUP entryPoint)
2894 gtEntryPoint = entryPoint;
2895 if (gtEntryPoint.accessType == IAT_PVALUE)
2897 gtCallMoreFlags |= GTF_CALL_M_R2R_REL_INDIRECT;
2900 #endif // FEATURE_READYTORUN_COMPILER
2902 bool IsVarargs() const { return (gtCallMoreFlags & GTF_CALL_M_VARARGS) != 0; }
2904 unsigned short gtCallMoreFlags; // in addition to gtFlags
2906 unsigned char gtCallType :3; // value from the gtCallTypes enumeration
2907 unsigned char gtReturnType :5; // exact return type
2909 CORINFO_CLASS_HANDLE gtRetClsHnd; // The return type handle of the call if it is a struct; always available
2913 // only used for CALLI unmanaged calls (CT_INDIRECT)
2914 GenTreePtr gtCallCookie;
2915 // gtInlineCandidateInfo is only used when inlining methods
2916 InlineCandidateInfo* gtInlineCandidateInfo;
2917 void* gtStubCallStubAddr; // GTF_CALL_VIRT_STUB - these are never inlined
2918 CORINFO_GENERIC_HANDLE compileTimeHelperArgumentHandle; // Used to track type handle argument of dynamic helpers
2919 void* gtDirectCallAddress; // Used to pass direct call address between lower and codegen
2922 // expression evaluated after args are placed which determines the control target
2923 GenTree * gtControlExpr;
2927 CORINFO_METHOD_HANDLE gtCallMethHnd; // CT_USER_FUNC
2928 GenTreePtr gtCallAddr; // CT_INDIRECT
2931 #ifdef FEATURE_READYTORUN_COMPILER
2932 // Call target lookup info for method call from a Ready To Run module
2933 CORINFO_CONST_LOOKUP gtEntryPoint;
2936 #if defined(DEBUG) || defined(INLINE_DATA)
2937 // For non-inline candidates, track the first observation
2938 // that blocks candidacy.
2939 InlineObservation gtInlineObservation;
2941 // IL offset of the call wrt its parent method.
2942 IL_OFFSET gtRawILOffset;
2943 #endif // defined(DEBUG) || defined(INLINE_DATA)
2945 bool IsHelperCall() const
2947 return gtCallType == CT_HELPER;
2950 bool IsHelperCall(CORINFO_METHOD_HANDLE callMethHnd) const
2952 return IsHelperCall() && (callMethHnd == gtCallMethHnd);
2955 bool IsHelperCall(Compiler* compiler, unsigned helper) const;
2957 GenTreeCall(var_types type) :
2958 GenTree(GT_CALL, type)
2961 #if DEBUGGABLE_GENTREE
2962 GenTreeCall() : GenTree()
2968 struct GenTreeCmpXchg: public GenTree
2970 GenTreePtr gtOpLocation;
2971 GenTreePtr gtOpValue;
2972 GenTreePtr gtOpComparand;
2974 GenTreeCmpXchg(var_types type, GenTreePtr loc, GenTreePtr val, GenTreePtr comparand) :
2975 GenTree(GT_CMPXCHG, type),
2976 gtOpLocation(loc), gtOpValue(val), gtOpComparand(comparand)
2978 // There's no reason to do a compare-exchange on a local location, so we'll assume that all of these
2979 // have global effects.
2980 gtFlags |= GTF_GLOB_EFFECT;
2982 #if DEBUGGABLE_GENTREE
2983 GenTreeCmpXchg() : GenTree() {}
2989 struct GenTreeFptrVal: public GenTree
2991 CORINFO_METHOD_HANDLE gtFptrMethod;
2993 #ifdef FEATURE_READYTORUN_COMPILER
2994 CORINFO_CONST_LOOKUP gtEntryPoint;
2995 CORINFO_RESOLVED_TOKEN* gtLdftnResolvedToken;
2998 GenTreeFptrVal(var_types type, CORINFO_METHOD_HANDLE meth) :
2999 GenTree(GT_FTN_ADDR, type),
3002 #if DEBUGGABLE_GENTREE
3003 GenTreeFptrVal() : GenTree() {}
3008 struct GenTreeQmark : public GenTreeOp
3010 // Livesets on entry to then and else subtrees
3011 VARSET_TP gtThenLiveSet;
3012 VARSET_TP gtElseLiveSet;
3014 // The "Compiler*" argument is not a DEBUGARG here because we use it to keep track of the set of
3015 // (possible) QMark nodes.
3016 GenTreeQmark(var_types type, GenTreePtr cond, GenTreePtr colonOp, class Compiler* comp);
3018 #if DEBUGGABLE_GENTREE
3019 GenTreeQmark() : GenTreeOp(GT_QMARK, TYP_INT, NULL, NULL) {}
3023 /* gtIntrinsic -- intrinsic (possibly-binary op [NULL op2 is allowed] with an additional field) */
3025 struct GenTreeIntrinsic: public GenTreeOp
3027 CorInfoIntrinsics gtIntrinsicId;
3028 CORINFO_METHOD_HANDLE gtMethodHandle; // Method handle of the method which is treated as an intrinsic.
3030 #ifdef FEATURE_READYTORUN_COMPILER
3031 // Call target lookup info for method call from a Ready To Run module
3032 CORINFO_CONST_LOOKUP gtEntryPoint;
3035 GenTreeIntrinsic(var_types type, GenTreePtr op1, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle) :
3036 GenTreeOp(GT_INTRINSIC, type, op1, NULL),
3037 gtIntrinsicId(intrinsicId),
3038 gtMethodHandle(methodHandle)
3041 GenTreeIntrinsic(var_types type, GenTreePtr op1, GenTreePtr op2, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle) :
3042 GenTreeOp(GT_INTRINSIC, type, op1, op2),
3043 gtIntrinsicId(intrinsicId),
3044 gtMethodHandle(methodHandle)
3047 #if DEBUGGABLE_GENTREE
3048 GenTreeIntrinsic() : GenTreeOp() {}
3054 /* gtSIMD -- SIMD intrinsic (possibly-binary op [NULL op2 is allowed] with additional fields) */
3055 struct GenTreeSIMD: public GenTreeOp
3057 SIMDIntrinsicID gtSIMDIntrinsicID; // operation Id
3058 var_types gtSIMDBaseType; // SIMD vector base type
3059 unsigned gtSIMDSize; // SIMD vector size in bytes
3061 GenTreeSIMD(var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size) :
3062 GenTreeOp(GT_SIMD, type, op1, nullptr),
3063 gtSIMDIntrinsicID(simdIntrinsicID),
3064 gtSIMDBaseType(baseType),
3068 GenTreeSIMD(var_types type, GenTreePtr op1, GenTreePtr op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size) :
3069 GenTreeOp(GT_SIMD, type, op1, op2),
3070 gtSIMDIntrinsicID(simdIntrinsicID),
3071 gtSIMDBaseType(baseType),
3075 #if DEBUGGABLE_GENTREE
3076 GenTreeSIMD() : GenTreeOp() {}
3079 #endif // FEATURE_SIMD
3081 /* gtIndex -- array access */
3083 struct GenTreeIndex: public GenTreeOp
3085 GenTreePtr& Arr() { return gtOp1; }
3086 GenTreePtr& Index() { return gtOp2; }
3088 unsigned gtIndElemSize; // size of elements in the array
3089 CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type.
3091 GenTreeIndex(var_types type, GenTreePtr arr, GenTreePtr ind, unsigned indElemSize) :
3092 GenTreeOp(GT_INDEX, type, arr, ind),
3093 gtIndElemSize(indElemSize),
3094 gtStructElemClass(nullptr) // We always initialize this after construction.
3097 if (JitConfig.JitSkipArrayBoundCheck() == 1)
3099 // Skip bounds check
3105 gtFlags |= GTF_INX_RNGCHK;
3108 if (type == TYP_REF)
3110 gtFlags |= GTF_INX_REFARR_LAYOUT;
3113 gtFlags |= GTF_EXCEPT|GTF_GLOB_REF;
3115 #if DEBUGGABLE_GENTREE
3116 GenTreeIndex() : GenTreeOp() {}
3120 /* gtArrLen -- array length (GT_ARR_LENGTH)
3121 GT_ARR_LENGTH is used for "arr.length" */
3123 struct GenTreeArrLen: public GenTreeUnOp
3125 GenTreePtr& ArrRef() { return gtOp1; } // the array address node
3127 int gtArrLenOffset; // constant to add to "gtArrRef" to get the address of the array length.
3130 inline int ArrLenOffset() {
3131 return gtArrLenOffset;
3134 GenTreeArrLen(var_types type, GenTreePtr arrRef, int lenOffset) :
3135 GenTreeUnOp(GT_ARR_LENGTH, type, arrRef),
3136 gtArrLenOffset(lenOffset)
3139 #if DEBUGGABLE_GENTREE
3140 GenTreeArrLen() : GenTreeUnOp() {}
3145 // - a comparison value (generally an array length),
3146 // - an index value, and
3147 // - the label to jump to if the index is out of range.
3148 // - the "kind" of the throw block to branch to on failure
3149 // It generates no result.
3151 struct GenTreeBoundsChk: public GenTree
3153 GenTreePtr gtArrLen; // An expression for the length of the array being indexed.
3154 GenTreePtr gtIndex; // The index expression.
3156 GenTreePtr gtIndRngFailBB; // Label to jump to for array-index-out-of-range
3157 SpecialCodeKind gtThrowKind; // Kind of throw block to branch to on failure
3159 /* Only out-of-ranges at same stack depth can jump to the same label (finding return address is easier)
3160 For delayed calling of fgSetRngChkTarget() so that the
3161 optimizer has a chance of eliminating some of the rng checks */
3162 unsigned gtStkDepth;
3164 GenTreeBoundsChk(genTreeOps oper, var_types type, GenTreePtr arrLen, GenTreePtr index, SpecialCodeKind kind) :
3165 GenTree(oper, type),
3166 gtArrLen(arrLen), gtIndex(index),
3167 gtIndRngFailBB(NULL),
3171 // Effects flags propagate upwards.
3172 gtFlags |= (arrLen->gtFlags & GTF_ALL_EFFECT);
3173 gtFlags |= GTF_EXCEPT;
3175 #if DEBUGGABLE_GENTREE
3176 GenTreeBoundsChk() : GenTree() {}
3179 // If the gtArrLen is really an array length, returns array reference, else "NULL".
3180 GenTreePtr GetArray()
3182 if (gtArrLen->OperGet() == GT_ARR_LENGTH)
3184 return gtArrLen->gtArrLen.ArrRef();
3193 // gtArrElem -- general array element (GT_ARR_ELEM), for non "SZ_ARRAYS"
3194 // -- multidimensional arrays, or 1-d arrays with non-zero lower bounds.
3196 struct GenTreeArrElem: public GenTree
3198 GenTreePtr gtArrObj;
3200 #define GT_ARR_MAX_RANK 3
3201 GenTreePtr gtArrInds[GT_ARR_MAX_RANK]; // Indices
3202 unsigned char gtArrRank; // Rank of the array
3204 unsigned char gtArrElemSize; // !!! Caution, this is an "unsigned char", it is used only
3205 // on the optimization path of array intrisics.
3206 // It stores the size of array elements WHEN it can fit
3207 // into an "unsigned char".
3208 // This has caused VSW 571394.
3209 var_types gtArrElemType; // The array element type
3211 // Requires that "inds" is a pointer to an array of "rank" GenTreePtrs for the indices.
3212 GenTreeArrElem(var_types type, GenTreePtr arr, unsigned char rank, unsigned char elemSize, var_types elemType, GenTreePtr* inds) :
3213 GenTree(GT_ARR_ELEM, type),
3214 gtArrObj(arr), gtArrRank(rank), gtArrElemSize(elemSize), gtArrElemType(elemType)
3216 for (unsigned char i = 0; i < rank; i++) gtArrInds[i] = inds[i];
3217 gtFlags |= GTF_EXCEPT;
3219 #if DEBUGGABLE_GENTREE
3220 GenTreeArrElem() : GenTree() {}
3224 //--------------------------------------------
3226 // GenTreeArrIndex (gtArrIndex): Expression to bounds-check the index for one dimension of a
3227 // multi-dimensional or non-zero-based array., and compute the effective index
3228 // (i.e. subtracting the lower bound).
3231 // This node is similar in some ways to GenTreeBoundsChk, which ONLY performs the check.
3232 // The reason that this node incorporates the check into the effective index computation is
3233 // to avoid duplicating the codegen, as the effective index is required to compute the
3235 // TODO-CQ: Enable optimization of the lower bound and length by replacing this:
3238 // +--* ArrIndex[i, ]
3239 // with something like:
3241 // /--* ArrLowerBound[i, ]
3243 // +--* ArrLen[i, ] (either generalize GT_ARR_LENGTH or add a new node)
3245 // +--* ArrIndex[i, ]
3246 // Which could, for example, be optimized to the following when known to be within bounds:
3247 // /--* TempForLowerBoundDim0
3251 struct GenTreeArrIndex: public GenTreeOp
3253 // The array object - may be any expression producing an Array reference, but is likely to be a lclVar.
3254 GenTreePtr& ArrObj() { return gtOp1; }
3255 // The index expression - may be any integral expression.
3256 GenTreePtr& IndexExpr() { return gtOp2; }
3257 unsigned char gtCurrDim; // The current dimension
3258 unsigned char gtArrRank; // Rank of the array
3259 var_types gtArrElemType; // The array element type
3261 GenTreeArrIndex(var_types type, GenTreePtr arrObj, GenTreePtr indexExpr,
3262 unsigned char currDim, unsigned char arrRank, var_types elemType) :
3263 GenTreeOp(GT_ARR_INDEX, type, arrObj, indexExpr),
3264 gtCurrDim(currDim), gtArrRank(arrRank), gtArrElemType(elemType)
3266 gtFlags |= GTF_EXCEPT;
3268 #if DEBUGGABLE_GENTREE
3271 // Used only for GenTree::GetVtableForOper()
3272 GenTreeArrIndex() : GenTreeOp() {}
3276 // Represents either an InitBlk, InitObj, CpBlk or CpObj
3278 struct GenTreeBlkOp : public GenTreeOp
3281 // The destination for the CpBlk/CpObj/InitBlk/InitObj to copy bits to
3283 assert(gtOp1->gtOper == GT_LIST);
3284 return gtOp1->gtOp.gtOp1;
3287 // Return true iff the object being copied contains one or more GC pointers.
3290 // True if this BlkOpNode is a volatile memory operation.
3291 bool IsVolatile() const { return (gtFlags & GTF_BLK_VOLATILE) != 0; }
3293 // True if this BlkOpNode is a volatile memory operation.
3294 bool IsUnaligned() const { return (gtFlags & GTF_BLK_UNALIGNED) != 0; }
3296 // Instruction selection: during codegen time, what code sequence we will be using
3297 // to encode this operation.
3306 bool gtBlkOpGcUnsafe;
3308 GenTreeBlkOp(genTreeOps oper) :
3309 GenTreeOp(oper, TYP_VOID DEBUGARG(true)),
3310 gtBlkOpKind(BlkOpKindInvalid),
3311 gtBlkOpGcUnsafe(false)
3313 assert(OperIsBlkOp(oper));
3316 #if DEBUGGABLE_GENTREE
3319 GenTreeBlkOp() : GenTreeOp(){}
3320 #endif // DEBUGGABLE_GENTREE
3323 // gtObj -- 'object' (GT_OBJ). */
3325 struct GenTreeObj: public GenTreeUnOp
3327 // The address of the block.
3328 GenTreePtr& Addr() { return gtOp1; }
3330 CORINFO_CLASS_HANDLE gtClass; // the class of the object
3332 GenTreeObj(var_types type, GenTreePtr addr, CORINFO_CLASS_HANDLE cls) :
3333 GenTreeUnOp(GT_OBJ, type, addr),
3336 gtFlags |= GTF_GLOB_REF; // An Obj is always a global reference.
3339 #if DEBUGGABLE_GENTREE
3340 GenTreeObj() : GenTreeUnOp() {}
3344 // Represents a CpObj MSIL Node.
3345 struct GenTreeCpObj : public GenTreeBlkOp
3348 // The source for the CpBlk/CpObj to copy bits from
3349 GenTreePtr Source() {
3350 assert(gtOper == GT_COPYOBJ && gtOp1->gtOper == GT_LIST);
3351 return gtOp1->gtOp.gtOp2;
3354 // In the case of CopyObj, this is the class token that represents the type that is being copied.
3355 GenTreePtr ClsTok() { return gtOp2; }
3357 // If non-null, this array represents the gc-layout of the class that is being copied
3361 // If non-zero, this is the number of slots in the class layout that
3362 // contain gc-pointers.
3363 unsigned gtGcPtrCount;
3365 // If non-zero, the number of pointer-sized slots that constitutes the class token in CpObj.
3368 GenTreeCpObj(unsigned gcPtrCount, unsigned gtSlots, BYTE* gtGcPtrs) :
3369 GenTreeBlkOp(GT_COPYOBJ),
3371 gtGcPtrCount(gcPtrCount),
3374 #if DEBUGGABLE_GENTREE
3377 GenTreeCpObj() : GenTreeBlkOp(),
3381 #endif // DEBUGGABLE_GENTREE
3384 // Represents either an InitBlk or InitObj MSIL OpCode.
3385 struct GenTreeInitBlk : public GenTreeBlkOp
3389 // The value used to fill the destination buffer.
3390 GenTreePtr InitVal() { assert(gtOp1->gtOper == GT_LIST);
3391 return gtOp1->gtOp.gtOp2; }
3393 // The size of the buffer to be copied.
3394 GenTreePtr Size() { return gtOp2; }
3396 GenTreeInitBlk() : GenTreeBlkOp(GT_INITBLK){}
3398 #if DEBUGGABLE_GENTREE
3401 #endif // DEBUGGABLE_GENTREE
3404 // Represents a CpBlk or CpObj with no GC-pointers MSIL OpCode.
3405 struct GenTreeCpBlk : public GenTreeBlkOp
3409 // The value used to fill the destination buffer.
3410 // The source for the CpBlk/CpObj to copy bits from
3411 GenTreePtr Source() { assert(gtOp1->gtOper == GT_LIST);
3412 return gtOp1->gtOp.gtOp2; }
3414 // The size of the buffer to be copied.
3415 GenTreePtr Size() { return gtOp2; }
3417 GenTreeCpBlk() : GenTreeBlkOp(GT_COPYBLK){}
3419 #if DEBUGGABLE_GENTREE
3422 #endif // DEBUGGABLE_GENTREE
3425 //--------------------------------------------
3427 // GenTreeArrOffset (gtArrOffset): Expression to compute the accumulated offset for the address
3428 // of an element of a multi-dimensional or non-zero-based array.
3431 // The result of this expression is (gtOffset * dimSize) + gtIndex
3432 // where dimSize is the length/stride/size of the dimension, and is obtained from gtArrObj.
3433 // This node is generated in conjunction with the GenTreeArrIndex node, which computes the
3434 // effective index for a single dimension. The sub-trees can be separately optimized, e.g.
3435 // within a loop body where the expression for the 0th dimension may be invariant.
3437 // Here is an example of how the tree might look for a two-dimension array reference:
3441 // +--* ArrIndex[i, ]
3443 // /--| arrOffs[i, ]
3446 // +--* ArrIndex[*,j]
3448 // /--| arrOffs[*,j]
3449 // TODO-CQ: see comment on GenTreeArrIndex for how its representation may change. When that
3450 // is done, we will also want to replace the <arrObj> argument to arrOffs with the
3451 // ArrLen as for GenTreeArrIndex.
3453 struct GenTreeArrOffs: public GenTree
3455 GenTreePtr gtOffset; // The accumulated offset for lower dimensions - must be TYP_I_IMPL, and
3456 // will either be a CSE temp, the constant 0, or another GenTreeArrOffs node.
3457 GenTreePtr gtIndex; // The effective index for the current dimension - must be non-negative
3458 // and can be any expression (though it is likely to be either a GenTreeArrIndex,
3459 // node, a lclVar, or a constant).
3460 GenTreePtr gtArrObj; // The array object - may be any expression producing an Array reference,
3461 // but is likely to be a lclVar.
3462 unsigned char gtCurrDim; // The current dimension
3463 unsigned char gtArrRank; // Rank of the array
3464 var_types gtArrElemType; // The array element type
3466 GenTreeArrOffs(var_types type, GenTreePtr offset, GenTreePtr index, GenTreePtr arrObj,
3467 unsigned char currDim, unsigned char rank, var_types elemType) :
3468 GenTree(GT_ARR_OFFSET, type), gtOffset(offset), gtIndex(index), gtArrObj(arrObj),
3469 gtCurrDim(currDim), gtArrRank(rank), gtArrElemType(elemType)
3471 assert(index->gtFlags & GTF_EXCEPT);
3472 gtFlags |= GTF_EXCEPT;
3474 #if DEBUGGABLE_GENTREE
3475 GenTreeArrOffs() : GenTree() {}
3479 /* gtAddrMode -- Target-specific canonicalized addressing expression (GT_LEA) */
3481 struct GenTreeAddrMode: public GenTreeOp
3483 // Address is Base + Index*Scale + Offset.
3484 // These are the legal patterns:
3486 // Base // Base != nullptr && Index == nullptr && Scale == 0 && Offset == 0
3487 // Base + Index*Scale // Base != nullptr && Index != nullptr && Scale != 0 && Offset == 0
3488 // Base + Offset // Base != nullptr && Index == nullptr && Scale == 0 && Offset != 0
3489 // Base + Index*Scale + Offset // Base != nullptr && Index != nullptr && Scale != 0 && Offset != 0
3490 // Index*Scale // Base == nullptr && Index != nullptr && Scale > 1 && Offset == 0
3491 // Index*Scale + Offset // Base == nullptr && Index != nullptr && Scale > 1 && Offset != 0
3492 // Offset // Base == nullptr && Index == nullptr && Scale == 0 && Offset != 0
3495 // 1. Base + Index is legal with Scale==1
3496 // 2. If Index is null, Scale should be zero (or unintialized / unused)
3497 // 3. If Scale==1, then we should have "Base" instead of "Index*Scale", and "Base + Offset" instead of "Index*Scale + Offset".
3499 // First operand is base address/pointer
3500 bool HasBase() const { return gtOp1 != nullptr; }
3501 GenTreePtr& Base() { return gtOp1; }
3503 // Second operand is scaled index value
3504 bool HasIndex() const { return gtOp2 != nullptr; }
3505 GenTreePtr& Index() { return gtOp2; }
3507 unsigned gtScale; // The scale factor
3508 unsigned gtOffset; // The offset to add
3510 GenTreeAddrMode(var_types type, GenTreePtr base, GenTreePtr index,
3511 unsigned scale, unsigned offset) :
3512 GenTreeOp(GT_LEA, type, base, index )
3517 #if DEBUGGABLE_GENTREE
3520 // Used only for GenTree::GetVtableForOper()
3521 GenTreeAddrMode() : GenTreeOp() {}
3525 // Indir is just an op, no additional data, but some additional abstractions
3526 struct GenTreeIndir: public GenTreeOp
3528 // like an assign, op1 is the destination
3529 GenTreePtr& Addr() { return gtOp1; }
3531 // these methods provide an interface to the indirection node which
3539 GenTreeIndir(genTreeOps oper, var_types type, GenTree *addr, GenTree *data) :
3540 GenTreeOp(oper, type, addr, data)
3544 #if DEBUGGABLE_GENTREE
3547 // Used only for GenTree::GetVtableForOper()
3548 GenTreeIndir() : GenTreeOp() {}
3552 // Read-modify-write status of a RMW memory op rooted at a storeInd
3554 STOREIND_RMW_STATUS_UNKNOWN, // RMW status of storeInd unknown
3555 // Default status unless modified by IsRMWMemOpRootedAtStoreInd()
3557 // One of these denote storeind is a RMW memory operation.
3558 STOREIND_RMW_DST_IS_OP1, // StoreInd is known to be a RMW memory op and dst candidate is op1
3559 STOREIND_RMW_DST_IS_OP2, // StoreInd is known to be a RMW memory op and dst candidate is op2
3561 // One of these denote the reason for storeind is marked as non-RMW operation
3562 STOREIND_RMW_UNSUPPORTED_ADDR, // Addr mode is not yet supported for RMW memory
3563 STOREIND_RMW_UNSUPPORTED_OPER, // Operation is not supported for RMW memory
3564 STOREIND_RMW_UNSUPPORTED_TYPE, // Type is not supported for RMW memory
3565 STOREIND_RMW_INDIR_UNEQUAL // Indir to read value is not equivalent to indir that writes the value
3568 // StoreInd is just a BinOp, with additional RMW status
3569 struct GenTreeStoreInd: public GenTreeIndir
3571 #if !CPU_LOAD_STORE_ARCH
3572 // The below flag is set and used during lowering
3573 RMWStatus gtRMWStatus;
3575 bool IsRMWStatusUnknown() { return gtRMWStatus == STOREIND_RMW_STATUS_UNKNOWN; }
3576 bool IsNonRMWMemoryOp() {
3577 return gtRMWStatus == STOREIND_RMW_UNSUPPORTED_ADDR ||
3578 gtRMWStatus == STOREIND_RMW_UNSUPPORTED_OPER ||
3579 gtRMWStatus == STOREIND_RMW_UNSUPPORTED_TYPE ||
3580 gtRMWStatus == STOREIND_RMW_INDIR_UNEQUAL;
3582 bool IsRMWMemoryOp() { return gtRMWStatus == STOREIND_RMW_DST_IS_OP1 || gtRMWStatus == STOREIND_RMW_DST_IS_OP2; }
3583 bool IsRMWDstOp1() { return gtRMWStatus == STOREIND_RMW_DST_IS_OP1; }
3584 bool IsRMWDstOp2() { return gtRMWStatus == STOREIND_RMW_DST_IS_OP2; }
3585 #endif //!CPU_LOAD_STORE_ARCH
3587 RMWStatus GetRMWStatus() {
3588 #if !CPU_LOAD_STORE_ARCH
3591 return STOREIND_RMW_STATUS_UNKNOWN;
3595 void SetRMWStatusDefault()
3597 #if !CPU_LOAD_STORE_ARCH
3598 gtRMWStatus = STOREIND_RMW_STATUS_UNKNOWN;
3602 void SetRMWStatus(RMWStatus status)
3604 #if !CPU_LOAD_STORE_ARCH
3605 gtRMWStatus = status;
3609 GenTreePtr& Data() { return gtOp2; }
3611 GenTreeStoreInd(var_types type, GenTree *destPtr, GenTree *data) :
3612 GenTreeIndir(GT_STOREIND, type, destPtr, data)
3614 SetRMWStatusDefault();
3617 #if DEBUGGABLE_GENTREE
3620 // Used only for GenTree::GetVtableForOper()
3621 GenTreeStoreInd() : GenTreeIndir() { SetRMWStatusDefault(); }
3626 /* gtRetExp -- Place holder for the return expression from an inline candidate (GT_RET_EXPR) */
3628 struct GenTreeRetExpr: public GenTree
3630 GenTreePtr gtInlineCandidate;
3632 CORINFO_CLASS_HANDLE gtRetClsHnd;
3634 GenTreeRetExpr(var_types type) :
3635 GenTree(GT_RET_EXPR, type)
3637 #if DEBUGGABLE_GENTREE
3638 GenTreeRetExpr() : GenTree() {}
3643 /* gtStmt -- 'statement expr' (GT_STMT) */
3645 class InlineContext;
3647 struct GenTreeStmt: public GenTree
3649 GenTreePtr gtStmtExpr; // root of the expression tree
3650 GenTreePtr gtStmtList; // first node (for forward walks)
3651 InlineContext* gtInlineContext; // The inline context for this statement.
3653 #if defined(DEBUGGING_SUPPORT) || defined(DEBUG)
3654 IL_OFFSETX gtStmtILoffsx; // instr offset (if available)
3658 IL_OFFSET gtStmtLastILoffs;// instr offset at end of stmt
3661 bool gtStmtIsTopLevel()
3663 return (gtFlags & GTF_STMT_TOP_LEVEL) != 0;
3666 bool gtStmtIsEmbedded()
3668 return !gtStmtIsTopLevel();
3671 // Return the next statement, if it is embedded, otherwise nullptr
3672 GenTreeStmt* gtStmtNextIfEmbedded()
3674 GenTree* nextStmt = gtNext;
3675 if (nextStmt != nullptr && nextStmt->gtStmt.gtStmtIsEmbedded())
3677 return nextStmt->AsStmt();
3685 GenTree* gtStmtNextTopLevelStmt()
3687 GenTree* nextStmt = gtNext;
3688 while (nextStmt != nullptr && nextStmt->gtStmt.gtStmtIsEmbedded())
3690 nextStmt = nextStmt->gtNext;
3695 __declspec(property(get=getNextStmt))
3696 GenTreeStmt* gtNextStmt;
3698 __declspec(property(get=getPrevStmt))
3699 GenTreeStmt* gtPrevStmt;
3701 GenTreeStmt* getNextStmt()
3703 if (gtNext == nullptr)
3706 return gtNext->AsStmt();
3709 GenTreeStmt* getPrevStmt()
3711 if (gtPrev == nullptr)
3714 return gtPrev->AsStmt();
3717 GenTreeStmt(GenTreePtr expr, IL_OFFSETX offset)
3718 : GenTree(GT_STMT, TYP_VOID)
3720 , gtStmtList(nullptr)
3721 , gtInlineContext(nullptr)
3722 #if defined(DEBUGGING_SUPPORT) || defined(DEBUG)
3723 , gtStmtILoffsx(offset)
3726 , gtStmtLastILoffs(BAD_IL_OFFSET)
3729 // Statements can't have statements as part of their expression tree.
3730 assert(expr->gtOper != GT_STMT);
3732 gtFlags |= GTF_STMT_TOP_LEVEL;
3734 // Set the statement to have the same costs as the top node of the tree.
3735 // This is used long before costs have been assigned, so we need to copy
3740 #if DEBUGGABLE_GENTREE
3741 GenTreeStmt() : GenTree(GT_STMT, TYP_VOID) {}
3748 /* NOTE: Any tree nodes that are larger than 8 bytes (two ints or
3749 pointers) must be flagged as 'large' in GenTree::InitNodeSize().
3753 /* gtClsVar -- 'static data member' (GT_CLS_VAR) */
3755 struct GenTreeClsVar: public GenTree
3757 CORINFO_FIELD_HANDLE gtClsVarHnd;
3758 FieldSeqNode* gtFieldSeq;
3760 GenTreeClsVar(var_types type, CORINFO_FIELD_HANDLE clsVarHnd, FieldSeqNode* fldSeq) :
3761 GenTree(GT_CLS_VAR, type),
3762 gtClsVarHnd(clsVarHnd),
3765 gtFlags |= GTF_GLOB_REF;
3767 #if DEBUGGABLE_GENTREE
3768 GenTreeClsVar() : GenTree() {}
3772 /* gtArgPlace -- 'register argument placeholder' (GT_ARGPLACE) */
3774 struct GenTreeArgPlace: public GenTree
3776 CORINFO_CLASS_HANDLE gtArgPlaceClsHnd; // Needed when we have a TYP_STRUCT argument
3778 GenTreeArgPlace(var_types type, CORINFO_CLASS_HANDLE clsHnd) :
3779 GenTree(GT_ARGPLACE, type),
3780 gtArgPlaceClsHnd(clsHnd)
3782 #if DEBUGGABLE_GENTREE
3783 GenTreeArgPlace() : GenTree() {}
3787 /* gtLabel -- code label target (GT_LABEL) */
3789 struct GenTreeLabel: public GenTree
3791 BasicBlock* gtLabBB;
3793 GenTreeLabel(BasicBlock* bb) :
3794 GenTree(GT_LABEL, TYP_VOID),
3797 #if DEBUGGABLE_GENTREE
3798 GenTreeLabel() : GenTree() {}
3802 /* gtPhiArg -- phi node rhs argument, var = phi(phiarg, phiarg, phiarg...); GT_PHI_ARG */
3803 struct GenTreePhiArg: public GenTreeLclVarCommon
3805 BasicBlock * gtPredBB;
3807 GenTreePhiArg(var_types type, unsigned lclNum, unsigned snum, BasicBlock* block)
3808 : GenTreeLclVarCommon(GT_PHI_ARG, type, lclNum)
3814 #if DEBUGGABLE_GENTREE
3815 GenTreePhiArg() : GenTreeLclVarCommon() {}
3819 /* gtPutArgStk -- Argument passed on stack */
3821 struct GenTreePutArgStk: public GenTreeUnOp
3823 unsigned gtSlotNum; // Slot number of the argument to be passed on stack
3825 #if FEATURE_FASTTAILCALL
3826 bool putInIncomingArgArea; // Whether this arg needs to be placed in incoming arg area.
3827 // By default this is false and will be placed in out-going arg area.
3828 // Fast tail calls set this to true.
3829 // In future if we need to add more such bool fields consider bit fields.
3835 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(unsigned numSlots)
3836 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(bool isStruct),
3837 bool _putInIncomingArgArea = false
3838 DEBUGARG(GenTreePtr callNode = NULL)
3839 DEBUGARG(bool largeNode = false))
3841 GenTreeUnOp(oper, type DEBUGARG(largeNode)),
3843 putInIncomingArgArea(_putInIncomingArgArea)
3844 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3845 , gtPutArgStkKind(PutArgStkKindInvalid),
3846 gtNumSlots(numSlots),
3847 gtIsStruct(isStruct),
3848 gtNumberReferenceSlots(0),
3850 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3863 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(unsigned numSlots)
3864 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(bool isStruct),
3865 bool _putInIncomingArgArea = false
3866 DEBUGARG(GenTreePtr callNode = NULL)
3867 DEBUGARG(bool largeNode = false))
3869 GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode)),
3871 putInIncomingArgArea(_putInIncomingArgArea)
3872 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3873 , gtPutArgStkKind(PutArgStkKindInvalid),
3874 gtNumSlots(numSlots),
3875 gtIsStruct(isStruct),
3876 gtNumberReferenceSlots(0),
3878 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3885 #else // !FEATURE_FASTTAILCALL
3891 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(unsigned numSlots)
3892 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(bool isStruct)
3893 DEBUGARG(GenTreePtr callNode = NULL)
3894 DEBUGARG(bool largeNode = false))
3896 GenTreeUnOp(oper, type DEBUGARG(largeNode)),
3898 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3899 , gtPutArgStkKind(PutArgStkKindInvalid),
3900 gtNumSlots(numSlots),
3901 gtIsStruct(isStruct),
3902 gtNumberReferenceSlots(0),
3904 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3917 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(unsigned numSlots)
3918 FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(bool isStruct)
3919 DEBUGARG(GenTreePtr callNode = NULL)
3920 DEBUGARG(bool largeNode = false))
3922 GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode)),
3924 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3925 , gtPutArgStkKind(PutArgStkKindInvalid),
3926 gtNumSlots(numSlots),
3927 gtIsStruct(isStruct),
3928 gtNumberReferenceSlots(0),
3930 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3936 #endif // FEATURE_FASTTAILCALL
3938 unsigned getArgOffset() { return gtSlotNum * TARGET_POINTER_SIZE; }
3940 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3941 unsigned getArgSize() { return gtNumSlots * TARGET_POINTER_SIZE; }
3942 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3944 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3945 //------------------------------------------------------------------------
3946 // setGcPointers: Sets the number of references and the layout of the struct object returned by the VM.
3949 // numPointers - Number of pointer references.
3950 // pointers - layout of the struct (with pointers marked.)
3956 // This data is used in the codegen for GT_PUTARG_STK to decide how to copy the struct to the stack by value.
3957 // If no pointer references are used, block copying instructions are used.
3958 // Otherwise the pointer reference slots are copied atomically in a way that gcinfo is emitted.
3959 // Any non pointer references between the pointer reference slots are copied in block fashion.
3961 void setGcPointers(unsigned numPointers, BYTE* pointers)
3963 gtNumberReferenceSlots = numPointers;
3964 gtGcPtrs = pointers;
3966 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3969 GenTreePtr gtCall; // the call node to which this argument belongs
3972 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
3973 // Instruction selection: during codegen time, what code sequence we will be using
3974 // to encode this operation.
3976 enum PutArgStkKind : __int8
3978 PutArgStkKindInvalid,
3979 PutArgStkKindRepInstr,
3980 PutArgStkKindUnroll,
3983 PutArgStkKind gtPutArgStkKind;
3985 unsigned gtNumSlots; // Number of slots for the argument to be passed on stack
3986 bool gtIsStruct; // This stack arg is a struct.
3987 unsigned gtNumberReferenceSlots; // Number of reference slots.
3988 BYTE* gtGcPtrs; // gcPointers
3989 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
3991 #if DEBUGGABLE_GENTREE
3992 GenTreePutArgStk() : GenTreeUnOp() {}
3996 // Represents GT_COPY or GT_RELOAD node
3997 struct GenTreeCopyOrReload : public GenTreeUnOp
3999 // State required to support copy/reload of a multi-reg call node.
4000 // First register is is always given by gtRegNum.
4002 #if FEATURE_MULTIREG_RET
4003 regNumber gtOtherRegs[MAX_RET_REG_COUNT - 1];
4006 //----------------------------------------------------------
4007 // ClearOtherRegs: set gtOtherRegs to REG_NA.
4015 void ClearOtherRegs()
4017 #if FEATURE_MULTIREG_RET
4018 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
4020 gtOtherRegs[i] = REG_NA;
4025 //-----------------------------------------------------------
4026 // GetRegNumByIdx: Get regNumber of ith position.
4029 // idx - register position.
4032 // Returns regNumber assigned to ith position.
4034 regNumber GetRegNumByIdx(unsigned idx) const
4036 assert(idx < MAX_RET_REG_COUNT);
4043 #if FEATURE_MULTIREG_RET
4044 return gtOtherRegs[idx - 1];
4050 //-----------------------------------------------------------
4051 // SetRegNumByIdx: Set the regNumber for ith position.
4055 // idx - register position.
4060 void SetRegNumByIdx(regNumber reg, unsigned idx)
4062 assert(idx < MAX_RET_REG_COUNT);
4068 #if FEATURE_MULTIREG_RET
4071 gtOtherRegs[idx - 1] = reg;
4072 assert(gtOtherRegs[idx - 1] == reg);
4082 //----------------------------------------------------------------------------
4083 // CopyOtherRegs: copy multi-reg state from the given copy/reload node to this
4087 // from - GenTree node from which to copy multi-reg state
4092 // TODO-ARM: Implement this routine for Arm64 and Arm32
4093 // TODO-X86: Implement this routine for x86
4094 void CopyOtherRegs(GenTreeCopyOrReload* from)
4096 assert(OperGet() == from->OperGet());
4098 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
4099 for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
4101 gtOtherRegs[i] = from->gtOtherRegs[i];
4106 GenTreeCopyOrReload(genTreeOps oper,
4108 GenTree* op1) : GenTreeUnOp(oper, type, op1)
4114 #if DEBUGGABLE_GENTREE
4115 GenTreeCopyOrReload() : GenTreeUnOp() {}
4119 //------------------------------------------------------------------------
4120 // Deferred inline functions of GenTree -- these need the subtypes above to
4121 // be defined already.
4122 //------------------------------------------------------------------------
4124 inline bool GenTree::OperIsBlkOp() const
4126 return (gtOper == GT_INITBLK ||
4127 gtOper == GT_COPYBLK ||
4128 gtOper == GT_COPYOBJ);
4131 inline bool GenTree::OperIsDynBlkOp()
4133 return (OperIsBlkOp() && !gtGetOp2()->IsCnsIntOrI());
4136 inline bool GenTree::OperIsCopyBlkOp() const
4138 return (gtOper == GT_COPYOBJ || gtOper == GT_COPYBLK);
4141 inline bool GenTree::OperIsInitBlkOp() const
4143 return (gtOper == GT_INITBLK);
4146 //------------------------------------------------------------------------
4147 // IsFPZero: Checks whether this is a floating point constant with value 0.0
4150 // Returns true iff the tree is an GT_CNS_DBL, with value of 0.0.
4152 inline bool GenTree::IsFPZero()
4154 if ((gtOper == GT_CNS_DBL) && (gtDblCon.gtDconVal == 0.0))
4159 //------------------------------------------------------------------------
4160 // IsIntegralConst: Checks whether this is a constant node with the given value
4163 // constVal - the value of interest
4166 // Returns true iff the tree is an integral constant opcode, with
4170 // Like gtIconVal, the argument is of ssize_t, so cannot check for
4171 // long constants in a target-independent way.
4173 inline bool GenTree::IsIntegralConst(ssize_t constVal)
4176 if ((gtOper == GT_CNS_INT) && (gtIntConCommon.IconValue() == constVal))
4179 if ((gtOper == GT_CNS_LNG) && (gtIntConCommon.LngValue() == constVal))
4185 inline bool GenTree::IsBoxedValue()
4187 assert(gtOper != GT_BOX || gtBox.BoxOp() != NULL);
4188 return (gtOper == GT_BOX) && (gtFlags & GTF_BOX_VALUE);
4191 inline GenTreePtr GenTree::MoveNext()
4198 //------------------------------------------------------------------------
4199 // IsListForMultiRegArg: Given an GenTree node that represents an argument
4200 // enforce (or don't enforce) the following invariant.
4202 // For LEGACY_BACKEND or architectures that don't support MultiReg args
4203 // we don't allow a GT_LIST at all.
4205 // Currently for AMD64 UNIX we allow a limited case where a GT_LIST is
4206 // allowed but every element must be a GT_LCL_FLD.
4208 // For the future targets that allow for Multireg args (and this includes
4209 // the current ARM64 target) we allow a GT_LIST of arbitrary nodes, these
4210 // would typically start out as GT_LCL_VARs or GT_LCL_FLDS or GT_INDs,
4211 // but could be changed into constants or GT_COMMA trees by the later
4212 // optimization phases.
4215 // instance method for a GenTree node
4218 // true: the GenTree node is accepted as a valid argument
4219 // false: the GenTree node is not accepted as a valid argumeny
4221 inline bool GenTree::IsListForMultiRegArg()
4225 // We don't have a GT_LIST, so just return true.
4228 else // We do have a GT_LIST
4230 #if defined(LEGACY_BACKEND) || !FEATURE_MULTIREG_ARGS
4232 // Not allowed to have a GT_LIST for an argument
4233 // unless we have a RyuJIT backend and FEATURE_MULTIREG_ARGS
4237 #else // we have RyuJIT backend and FEATURE_MULTIREG_ARGS
4239 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
4240 // For UNIX ABI we currently only allow a GT_LIST of GT_LCL_FLDs nodes
4241 GenTree* gtListPtr = this;
4242 while (gtListPtr != nullptr)
4244 // ToDo: fix UNIX_AMD64 so that we do not generate this kind of a List
4245 // Note the list as currently created is malformed, as the last entry is a nullptr
4246 if (gtListPtr->Current() == nullptr)
4249 // Only a list of GT_LCL_FLDs is allowed
4250 if (gtListPtr->Current()->OperGet() != GT_LCL_FLD)
4254 gtListPtr = gtListPtr->MoveNext();
4256 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
4258 // Note that for non-UNIX ABI the GT_LIST may contain any node
4260 // We allow this GT_LIST as an argument
4263 #endif // RyuJIT backend and FEATURE_MULTIREG_ARGS
4268 inline GenTreePtr GenTree::Current()
4274 inline GenTreePtr *GenTree::pCurrent()
4277 return &(gtOp.gtOp1);
4280 inline GenTreePtr GenTree::gtGetOp1()
4287 inline bool GenTree::RequiresNonNullOp2(genTreeOps oper)
4340 inline GenTreePtr GenTree::gtGetOp2()
4342 /* gtOp.gtOp2 is only valid for GTK_BINOP nodes. */
4344 GenTreePtr op2 = OperIsBinary() ? gtOp.gtOp2 : nullptr;
4346 // This documents the genTreeOps for which gtOp.gtOp2 cannot be nullptr.
4347 // This helps prefix in its analyis of code which calls gtGetOp2()
4349 assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper));
4354 inline GenTreePtr GenTree::gtEffectiveVal(bool commaOnly)
4359 return gtOp.gtOp2->gtEffectiveVal(commaOnly);
4362 if (!commaOnly && gtOp.gtOp1 != NULL)
4363 return gtOp.gtOp1->gtEffectiveVal();
4373 inline GenTree* GenTree::gtSkipReloadOrCopy()
4375 // There can be only one reload or copy (we can't have a reload/copy of a reload/copy)
4376 if (gtOper == GT_RELOAD || gtOper == GT_COPY)
4378 assert(gtGetOp1()->OperGet() != GT_RELOAD && gtGetOp1()->OperGet() != GT_COPY);
4384 //-----------------------------------------------------------------------------------
4385 // IsMultiRegCall: whether a call node returning its value in more than one register
4391 // Returns true if this GenTree is a multi register returning call
4392 inline bool GenTree::IsMultiRegCall() const
4396 // We cannot use AsCall() as it is not declared const
4397 const GenTreeCall* call = reinterpret_cast<const GenTreeCall *>(this);
4398 return call->HasMultiRegRetVal();
4404 //-------------------------------------------------------------------------
4405 // IsCopyOrReload: whether this is a GT_COPY or GT_RELOAD node.
4411 // Returns true if this GenTree is a copy or reload node.
4412 inline bool GenTree::IsCopyOrReload() const
4414 return (gtOper == GT_COPY || gtOper == GT_RELOAD);
4417 //-----------------------------------------------------------------------------------
4418 // IsCopyOrReloadOfMultiRegCall: whether this is a GT_COPY or GT_RELOAD of a multi-reg
4425 // Returns true if this GenTree is a copy or reload of multi-reg call node.
4426 inline bool GenTree::IsCopyOrReloadOfMultiRegCall() const
4428 if (IsCopyOrReload())
4430 GenTree* t = const_cast<GenTree*>(this);
4431 return t->gtGetOp1()->IsMultiRegCall();
4437 inline bool GenTree::IsCnsIntOrI() const
4439 return (gtOper == GT_CNS_INT);
4442 inline bool GenTree::IsIntegralConst() const
4444 #ifdef _TARGET_64BIT_
4445 return IsCnsIntOrI();
4446 #else // !_TARGET_64BIT_
4447 return ((gtOper == GT_CNS_INT) || (gtOper == GT_CNS_LNG));
4448 #endif // !_TARGET_64BIT_
4451 inline bool GenTree::IsIntCnsFitsInI32()
4453 #ifdef _TARGET_64BIT_
4454 return IsCnsIntOrI() && ((int)gtIntConCommon.IconValue() == gtIntConCommon.IconValue());
4455 #else // !_TARGET_64BIT_
4456 return IsCnsIntOrI();
4457 #endif // !_TARGET_64BIT_
4460 inline bool GenTree::IsCnsFltOrDbl() const
4462 return OperGet() == GT_CNS_DBL;
4465 inline bool GenTree::IsCnsNonZeroFltOrDbl()
4467 if (OperGet() == GT_CNS_DBL)
4469 double constValue = gtDblCon.gtDconVal;
4470 return *(__int64*)&constValue != 0;
4476 inline bool GenTree::IsHelperCall() { return OperGet() == GT_CALL && gtCall.gtCallType == CT_HELPER; }
4478 inline var_types GenTree::CastFromType() { return this->gtCast.CastOp()->TypeGet(); }
4479 inline var_types& GenTree::CastToType() { return this->gtCast.gtCastType; }
4481 //-----------------------------------------------------------------------------------
4482 // HasGCPtr: determine whether this block op involves GC pointers
4488 // Returns true iff the object being copied contains one or more GC pointers.
4491 // Of the block ops only GT_COPYOBJ is allowed to have GC pointers.
4494 GenTreeBlkOp::HasGCPtr()
4496 if (gtFlags & GTF_BLK_HASGCPTR)
4498 assert((gtOper == GT_COPYOBJ) && (AsCpObj()->gtGcPtrCount != 0));
4504 inline bool GenTree::isContainedSpillTemp() const
4506 #if !defined(LEGACY_BACKEND)
4507 // If spilled and no reg at use, then it is treated as contained.
4508 if (((gtFlags & GTF_SPILLED) != 0) &&
4509 ((gtFlags & GTF_NOREG_AT_USE) != 0))
4513 #endif //!LEGACY_BACKEND
4518 /*****************************************************************************/
4520 #ifndef _HOST_64BIT_
4521 #include <poppack.h>
4524 /*****************************************************************************/
4526 #if SMALL_TREE_NODES
4528 // In debug, on some platforms (e.g., when LATE_DISASM is defined), GenTreeIntCon is bigger than GenTreeLclFld.
4530 size_t TREE_NODE_SZ_SMALL = max(sizeof(GenTreeIntCon), sizeof(GenTreeLclFld));
4532 #endif // SMALL_TREE_NODES
4535 size_t TREE_NODE_SZ_LARGE = sizeof(GenTreeCall);
4537 /*****************************************************************************
4538 * Types returned by GenTree::lvaLclVarRefs()
4543 VR_INVARIANT = 0x00, // an invariant value
4545 VR_IND_REF = 0x01, // an object reference
4546 VR_IND_SCL = 0x02, // a non-object reference
4547 VR_GLB_VAR = 0x04, // a global (clsVar)
4549 // Add a temp define to avoid merge conflict.
4550 #define VR_IND_PTR VR_IND_REF
4552 /*****************************************************************************/
4553 #endif // !GENTREE_H
4554 /*****************************************************************************/